summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/bin/feature_flag_spec.rb13
-rw-r--r--spec/channels/application_cable/connection_spec.rb52
-rw-r--r--spec/controllers/admin/application_settings_controller_spec.rb38
-rw-r--r--spec/controllers/admin/clusters_controller_spec.rb5
-rw-r--r--spec/controllers/admin/integrations_controller_spec.rb14
-rw-r--r--spec/controllers/admin/runners_controller_spec.rb17
-rw-r--r--spec/controllers/admin/sessions_controller_spec.rb2
-rw-r--r--spec/controllers/admin/users_controller_spec.rb6
-rw-r--r--spec/controllers/application_controller_spec.rb4
-rw-r--r--spec/controllers/boards/lists_controller_spec.rb11
-rw-r--r--spec/controllers/concerns/controller_with_feature_category/config_spec.rb53
-rw-r--r--spec/controllers/concerns/controller_with_feature_category_spec.rb38
-rw-r--r--spec/controllers/concerns/redis_tracking_spec.rb14
-rw-r--r--spec/controllers/dashboard_controller_spec.rb10
-rw-r--r--spec/controllers/every_controller_spec.rb25
-rw-r--r--spec/controllers/groups/clusters_controller_spec.rb5
-rw-r--r--spec/controllers/groups/group_links_controller_spec.rb33
-rw-r--r--spec/controllers/groups/labels_controller_spec.rb79
-rw-r--r--spec/controllers/groups/milestones_controller_spec.rb7
-rw-r--r--spec/controllers/groups/registry/repositories_controller_spec.rb2
-rw-r--r--spec/controllers/groups/settings/ci_cd_controller_spec.rb21
-rw-r--r--spec/controllers/groups_controller_spec.rb67
-rw-r--r--spec/controllers/import/bulk_imports_controller_spec.rb137
-rw-r--r--spec/controllers/import/manifest_controller_spec.rb99
-rw-r--r--spec/controllers/invites_controller_spec.rb89
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb6
-rw-r--r--spec/controllers/projects/group_links_controller_spec.rb51
-rw-r--r--spec/controllers/projects/import/jira_controller_spec.rb2
-rw-r--r--spec/controllers/projects/incidents_controller_spec.rb111
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb7
-rw-r--r--spec/controllers/projects/labels_controller_spec.rb95
-rw-r--r--spec/controllers/projects/milestones_controller_spec.rb4
-rw-r--r--spec/controllers/projects/registry/tags_controller_spec.rb2
-rw-r--r--spec/controllers/projects/releases/evidences_controller_spec.rb32
-rw-r--r--spec/controllers/projects/runners_controller_spec.rb41
-rw-r--r--spec/controllers/projects/serverless/functions_controller_spec.rb16
-rw-r--r--spec/controllers/projects/settings/access_tokens_controller_spec.rb26
-rw-r--r--spec/controllers/projects/settings/ci_cd_controller_spec.rb17
-rw-r--r--spec/controllers/projects/static_site_editor_controller_spec.rb87
-rw-r--r--spec/controllers/projects/tags_controller_spec.rb21
-rw-r--r--spec/controllers/runner_setup_controller_spec.rb21
-rw-r--r--spec/controllers/sessions_controller_spec.rb23
-rw-r--r--spec/factories/alert_management/alerts.rb14
-rw-r--r--spec/factories/alert_management/http_integrations.rb14
-rw-r--r--spec/factories/alerting/alert.rb25
-rw-r--r--spec/factories/authentication_event.rb11
-rw-r--r--spec/factories/ci/build_pending_states.rb2
-rw-r--r--spec/factories/ci/build_trace_chunks.rb13
-rw-r--r--spec/factories/ci/pipelines.rb2
-rw-r--r--spec/factories/design_management/designs.rb2
-rw-r--r--spec/factories/events.rb11
-rw-r--r--spec/factories/group_import_states.rb1
-rw-r--r--spec/factories/instance_statistics/measurement.rb8
-rw-r--r--spec/factories/issue_email_participants.rb8
-rw-r--r--spec/factories/merge_request_diffs.rb8
-rw-r--r--spec/factories/merge_requests.rb2
-rw-r--r--spec/factories/namespaces.rb8
-rw-r--r--spec/factories/packages.rb160
-rw-r--r--spec/factories/packages/package_file.rb165
-rw-r--r--spec/factories/pages_deployments.rb9
-rw-r--r--spec/factories/project_tracing_settings.rb8
-rw-r--r--spec/factories/projects.rb6
-rw-r--r--spec/factories/prometheus_metrics.rb1
-rw-r--r--spec/factories/services.rb6
-rw-r--r--spec/factories/terraform/state.rb10
-rw-r--r--spec/factories/terraform/state_version.rb8
-rw-r--r--spec/factories/todos.rb4
-rw-r--r--spec/factories/usage_data.rb14
-rw-r--r--spec/factories/wiki_pages.rb3
-rw-r--r--spec/factories/wikis.rb6
-rw-r--r--spec/features/admin/admin_settings_spec.rb32
-rw-r--r--spec/features/admin/admin_users_spec.rb22
-rw-r--r--spec/features/admin/admin_uses_repository_checks_spec.rb2
-rw-r--r--spec/features/admin/clusters/eks_spec.rb2
-rw-r--r--spec/features/admin/dashboard_spec.rb6
-rw-r--r--spec/features/boards/add_issues_modal_spec.rb4
-rw-r--r--spec/features/boards/boards_spec.rb22
-rw-r--r--spec/features/calendar_spec.rb2
-rw-r--r--spec/features/clusters/cluster_detail_page_spec.rb12
-rw-r--r--spec/features/commits_spec.rb1
-rw-r--r--spec/features/dashboard/todos/todos_filtering_spec.rb8
-rw-r--r--spec/features/dashboard/todos/todos_spec.rb15
-rw-r--r--spec/features/expand_collapse_diffs_spec.rb1
-rw-r--r--spec/features/groups/clusters/eks_spec.rb2
-rw-r--r--spec/features/groups/clusters/user_spec.rb8
-rw-r--r--spec/features/groups/members/manage_groups_spec.rb113
-rw-r--r--spec/features/groups/navbar_spec.rb8
-rw-r--r--spec/features/groups/packages_spec.rb2
-rw-r--r--spec/features/groups/show_spec.rb13
-rw-r--r--spec/features/incidents/incident_details_spec.rb32
-rw-r--r--spec/features/invites_spec.rb45
-rw-r--r--spec/features/issuables/close_reopen_report_toggle_spec.rb12
-rw-r--r--spec/features/issuables/markdown_references/internal_references_spec.rb8
-rw-r--r--spec/features/issuables/merge_request_discussion_lock_spec.rb79
-rw-r--r--spec/features/issues/gfm_autocomplete_spec.rb24
-rw-r--r--spec/features/issues/user_sees_live_update_spec.rb2
-rw-r--r--spec/features/issues/user_views_issue_spec.rb4
-rw-r--r--spec/features/merge_request/batch_comments_spec.rb15
-rw-r--r--spec/features/merge_request/maintainer_edits_fork_spec.rb7
-rw-r--r--spec/features/merge_request/user_comments_on_diff_spec.rb9
-rw-r--r--spec/features/merge_request/user_edits_mr_spec.rb18
-rw-r--r--spec/features/merge_request/user_expands_diff_spec.rb5
-rw-r--r--spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb29
-rw-r--r--spec/features/merge_request/user_reopens_merge_request_spec.rb6
-rw-r--r--spec/features/merge_request/user_resolves_wip_mr_spec.rb4
-rw-r--r--spec/features/merge_request/user_sees_diff_spec.rb3
-rw-r--r--spec/features/merge_request/user_sees_page_metadata_spec.rb17
-rw-r--r--spec/features/merge_request/user_sees_pipelines_spec.rb4
-rw-r--r--spec/features/merge_request/user_sees_suggest_pipeline_spec.rb35
-rw-r--r--spec/features/merge_request/user_suggests_changes_on_diff_spec.rb5
-rw-r--r--spec/features/merge_request/user_views_open_merge_request_spec.rb19
-rw-r--r--spec/features/milestones/user_views_milestone_spec.rb76
-rw-r--r--spec/features/milestones/user_views_milestones_spec.rb3
-rw-r--r--spec/features/operations_sidebar_link_spec.rb81
-rw-r--r--spec/features/profiles/keys_spec.rb36
-rw-r--r--spec/features/projects/activity/user_sees_design_comment_spec.rb2
-rw-r--r--spec/features/projects/blobs/edit_spec.rb11
-rw-r--r--spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb2
-rw-r--r--spec/features/projects/branches_spec.rb4
-rw-r--r--spec/features/projects/clusters/eks_spec.rb2
-rw-r--r--spec/features/projects/clusters/gcp_spec.rb12
-rw-r--r--spec/features/projects/clusters/user_spec.rb8
-rw-r--r--spec/features/projects/clusters_spec.rb16
-rw-r--r--spec/features/projects/commit/user_comments_on_commit_spec.rb48
-rw-r--r--spec/features/projects/environments/environments_spec.rb4
-rw-r--r--spec/features/projects/issues/design_management/user_links_to_designs_in_issue_spec.rb29
-rw-r--r--spec/features/projects/issues/viewing_relocated_issues_spec.rb40
-rw-r--r--spec/features/projects/members/groups_with_access_list_spec.rb41
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb17
-rw-r--r--spec/features/projects/releases/user_creates_release_spec.rb20
-rw-r--r--spec/features/projects/releases/user_views_edit_release_spec.rb2
-rw-r--r--spec/features/projects/releases/user_views_release_spec.rb14
-rw-r--r--spec/features/projects/releases/user_views_releases_spec.rb4
-rw-r--r--spec/features/projects/show/user_manages_notifications_spec.rb8
-rw-r--r--spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb2
-rw-r--r--spec/features/projects/tree/tree_show_spec.rb2
-rw-r--r--spec/features/projects/user_sees_sidebar_spec.rb2
-rw-r--r--spec/features/projects/wiki/markdown_preview_spec.rb168
-rw-r--r--spec/features/projects/wiki/shortcuts_spec.rb20
-rw-r--r--spec/features/projects/wiki/user_creates_wiki_page_spec.rb360
-rw-r--r--spec/features/projects/wiki/user_deletes_wiki_page_spec.rb22
-rw-r--r--spec/features/projects/wiki/user_updates_wiki_page_spec.rb263
-rw-r--r--spec/features/projects/wiki/user_views_wiki_empty_spec.rb138
-rw-r--r--spec/features/projects/wikis_spec.rb20
-rw-r--r--spec/features/projects_spec.rb9
-rw-r--r--spec/features/protected_branches_spec.rb14
-rw-r--r--spec/features/search/user_searches_for_code_spec.rb1
-rw-r--r--spec/features/search/user_uses_header_search_field_spec.rb26
-rw-r--r--spec/features/static_site_editor_spec.rb67
-rw-r--r--spec/features/tags/developer_deletes_tag_spec.rb27
-rw-r--r--spec/features/task_lists_spec.rb115
-rw-r--r--spec/features/triggers_spec.rb186
-rw-r--r--spec/features/users/show_spec.rb10
-rw-r--r--spec/features/users/terms_spec.rb15
-rw-r--r--spec/finders/ci/pipelines_for_merge_request_finder_spec.rb2
-rw-r--r--spec/finders/group_labels_finder_spec.rb42
-rw-r--r--spec/finders/groups_finder_spec.rb56
-rw-r--r--spec/finders/merge_requests/by_approvals_finder_spec.rb82
-rw-r--r--spec/finders/packages/generic/package_finder_spec.rb31
-rw-r--r--spec/fixtures/api/schemas/entities/group_group_link.json4
-rw-r--r--spec/fixtures/api/schemas/entities/merge_request_basic.json1
-rw-r--r--spec/fixtures/api/schemas/entities/test_case.json1
-rw-r--r--spec/fixtures/api/schemas/entities/trigger.json39
-rw-r--r--spec/fixtures/api/schemas/feature_flags_client_token.json10
-rw-r--r--spec/fixtures/api/schemas/group_member.json13
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/packages/package.json3
-rw-r--r--spec/fixtures/api/schemas/registry/repository.json3
-rw-r--r--spec/fixtures/api/schemas/unleash/unleash.json20
-rw-r--r--spec/fixtures/api/schemas/unleash/unleash_feature.json27
-rw-r--r--spec/fixtures/api/schemas/unleash/unleash_strategy.json24
-rw-r--r--spec/fixtures/invalid_manifest.xml4
-rw-r--r--spec/fixtures/lib/backup/design_repo.bundlebin0 -> 490 bytes
-rw-r--r--spec/fixtures/lib/backup/project_repo.bundlebin0 -> 387 bytes
-rw-r--r--spec/fixtures/lib/backup/wiki_repo.bundlebin0 -> 365 bytes
-rw-r--r--spec/fixtures/packages/debian/libsample0_1.2.3~alpha2-1_amd64.deb1
-rw-r--r--spec/fixtures/packages/generic/myfile.tar.gzbin0 -> 1149 bytes
-rw-r--r--spec/frontend/alert_handler_spec.js65
-rw-r--r--spec/frontend/alert_management/components/alert_details_spec.js98
-rw-r--r--spec/frontend/alert_management/components/alert_management_table_spec.js30
-rw-r--r--spec/frontend/alert_management/components/alert_summary_row_spec.js40
-rw-r--r--spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js4
-rw-r--r--spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js13
-rw-r--r--spec/frontend/alert_management/components/system_notes/alert_management_system_note_spec.js6
-rw-r--r--spec/frontend/alert_settings/__snapshots__/alert_settings_form_spec.js.snap14
-rw-r--r--spec/frontend/analytics/instance_statistics/components/app_spec.js24
-rw-r--r--spec/frontend/analytics/instance_statistics/components/instance_counts_spec.js54
-rw-r--r--spec/frontend/analytics/instance_statistics/mock_data.js4
-rw-r--r--spec/frontend/analytics/shared/components/metric_card_spec.js129
-rw-r--r--spec/frontend/api_spec.js40
-rw-r--r--spec/frontend/awards_handler_spec.js24
-rw-r--r--spec/frontend/batch_comments/components/preview_item_spec.js16
-rw-r--r--spec/frontend/batch_comments/components/publish_button_spec.js11
-rw-r--r--spec/frontend/batch_comments/components/publish_dropdown_spec.js87
-rw-r--r--spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js90
-rw-r--r--spec/frontend/batch_comments/stores/modules/batch_comments/mutations_spec.js52
-rw-r--r--spec/frontend/behaviors/load_startup_css_spec.js44
-rw-r--r--spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap2
-rw-r--r--spec/frontend/blob/suggest_web_ide_ci/web_ide_alert_spec.js67
-rw-r--r--spec/frontend/boards/board_blank_state_spec.js95
-rw-r--r--spec/frontend/boards/boards_store_spec.js19
-rw-r--r--spec/frontend/boards/components/board_configuration_options_spec.js59
-rw-r--r--spec/frontend/boards/components/board_content_spec.js3
-rw-r--r--spec/frontend/boards/components/sidebar/board_editable_item_spec.js14
-rw-r--r--spec/frontend/boards/stores/actions_spec.js74
-rw-r--r--spec/frontend/boards/stores/mutations_spec.js70
-rw-r--r--spec/frontend/ci_settings_pipeline_triggers/components/triggers_list_spec.js102
-rw-r--r--spec/frontend/ci_settings_pipeline_triggers/mock_data.js30
-rw-r--r--spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js24
-rw-r--r--spec/frontend/clusters/components/fluentd_output_settings_spec.js4
-rw-r--r--spec/frontend/clusters/components/ingress_modsecurity_settings_spec.js10
-rw-r--r--spec/frontend/clusters/components/knative_domain_editor_spec.js4
-rw-r--r--spec/frontend/clusters/services/crossplane_provider_stack_spec.js4
-rw-r--r--spec/frontend/clusters_list/components/clusters_spec.js18
-rw-r--r--spec/frontend/clusters_list/components/node_error_help_text_spec.js33
-rw-r--r--spec/frontend/clusters_list/mock_data.js10
-rw-r--r--spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap1
-rw-r--r--spec/frontend/commit/pipelines/pipelines_spec.js82
-rw-r--r--spec/frontend/create_cluster/eks_cluster/components/create_eks_cluster_spec.js8
-rw-r--r--spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js1
-rw-r--r--spec/frontend/create_cluster/eks_cluster/store/actions_spec.js32
-rw-r--r--spec/frontend/design_management/components/design_todo_button_spec.js2
-rw-r--r--spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap2
-rw-r--r--spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap6
-rw-r--r--spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap9
-rw-r--r--spec/frontend/design_management/components/toolbar/design_navigation_spec.js4
-rw-r--r--spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap3
-rw-r--r--spec/frontend/design_management/mock_data/apollo_mock.js1
-rw-r--r--spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap11
-rw-r--r--spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap1
-rw-r--r--spec/frontend/design_management/pages/index_spec.js56
-rw-r--r--spec/frontend/design_management/router_spec.js2
-rw-r--r--spec/frontend/design_management/utils/design_management_utils_spec.js4
-rw-r--r--spec/frontend/diff_comments_store_spec.js136
-rw-r--r--spec/frontend/diffs/components/app_spec.js4
-rw-r--r--spec/frontend/diffs/components/collapsed_files_warning_spec.js2
-rw-r--r--spec/frontend/diffs/components/commit_item_spec.js6
-rw-r--r--spec/frontend/diffs/components/diff_file_header_spec.js53
-rw-r--r--spec/frontend/diffs/components/diff_file_spec.js26
-rw-r--r--spec/frontend/diffs/components/diff_row_utils_spec.js203
-rw-r--r--spec/frontend/diffs/components/diff_table_cell_spec.js279
-rw-r--r--spec/frontend/diffs/components/edit_button_spec.js75
-rw-r--r--spec/frontend/diffs/mock_data/diff_file.js2
-rw-r--r--spec/frontend/diffs/mock_data/diff_file_unreadable.js2
-rw-r--r--spec/frontend/diffs/store/actions_spec.js8
-rw-r--r--spec/frontend/diffs/store/getters_spec.js90
-rw-r--r--spec/frontend/diffs/store/mutations_spec.js8
-rw-r--r--spec/frontend/emoji/emoji_spec.js41
-rw-r--r--spec/frontend/environment.js10
-rw-r--r--spec/frontend/environments/environments_app_spec.js12
-rw-r--r--spec/frontend/environments/folder/environments_folder_view_spec.js5
-rw-r--r--spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js12
-rw-r--r--spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js159
-rw-r--r--spec/frontend/feature_flags/components/edit_feature_flag_spec.js197
-rw-r--r--spec/frontend/feature_flags/components/environments_dropdown_spec.js145
-rw-r--r--spec/frontend/feature_flags/components/feature_flags_spec.js343
-rw-r--r--spec/frontend/feature_flags/components/feature_flags_tab_spec.js168
-rw-r--r--spec/frontend/feature_flags/components/feature_flags_table_spec.js262
-rw-r--r--spec/frontend/feature_flags/components/form_spec.js485
-rw-r--r--spec/frontend/feature_flags/components/new_environments_dropdown_spec.js103
-rw-r--r--spec/frontend/feature_flags/components/new_feature_flag_spec.js145
-rw-r--r--spec/frontend/feature_flags/components/strategy_spec.js320
-rw-r--r--spec/frontend/feature_flags/components/user_lists_table_spec.js98
-rw-r--r--spec/frontend/feature_flags/mock_data.js109
-rw-r--r--spec/frontend/feature_flags/store/edit/actions_spec.js334
-rw-r--r--spec/frontend/feature_flags/store/edit/mutations_spec.js150
-rw-r--r--spec/frontend/feature_flags/store/helpers_spec.js514
-rw-r--r--spec/frontend/feature_flags/store/index/actions_spec.js605
-rw-r--r--spec/frontend/feature_flags/store/index/mutations_spec.js332
-rw-r--r--spec/frontend/feature_flags/store/new/actions_spec.js223
-rw-r--r--spec/frontend/feature_flags/store/new/mutations_spec.js65
-rw-r--r--spec/frontend/fixtures/releases.rb136
-rw-r--r--spec/frontend/gfm_auto_complete_spec.js219
-rw-r--r--spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap22
-rw-r--r--spec/frontend/groups/components/item_actions_spec.js125
-rw-r--r--spec/frontend/groups/components/item_caret_spec.js58
-rw-r--r--spec/frontend/groups/components/item_stats_spec.js131
-rw-r--r--spec/frontend/groups/components/item_stats_value_spec.js111
-rw-r--r--spec/frontend/groups/components/item_type_icon_spec.js80
-rw-r--r--spec/frontend/groups/members/index_spec.js10
-rw-r--r--spec/frontend/helpers/experimentation_helper.js14
-rw-r--r--spec/frontend/helpers/keep_alive_component_helper.js29
-rw-r--r--spec/frontend/helpers/keep_alive_component_helper_spec.js32
-rw-r--r--spec/frontend/helpers/local_storage_helper.js2
-rw-r--r--spec/frontend/helpers/local_storage_helper_spec.js4
-rw-r--r--spec/frontend/helpers/vue_test_utils_helper.js7
-rw-r--r--spec/frontend/helpers/wait_for_text.js3
-rw-r--r--spec/frontend/ide/components/commit_sidebar/actions_spec.js4
-rw-r--r--spec/frontend/ide/components/ide_review_spec.js78
-rw-r--r--spec/frontend/ide/components/ide_side_bar_spec.js81
-rw-r--r--spec/frontend/ide/components/ide_tree_list_spec.js8
-rw-r--r--spec/frontend/ide/components/ide_tree_spec.js41
-rw-r--r--spec/frontend/ide/components/repo_commit_section_spec.js21
-rw-r--r--spec/frontend/incidents/components/incidents_list_spec.js211
-rw-r--r--spec/frontend/incidents/mocks/incidents_filter.json14
-rw-r--r--spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap27
-rw-r--r--spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap1
-rw-r--r--spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap54
-rw-r--r--spec/frontend/integrations/edit/components/confirmation_modal_spec.js51
-rw-r--r--spec/frontend/integrations/edit/components/integration_form_spec.js23
-rw-r--r--spec/frontend/integrations/edit/mock_data.js1
-rw-r--r--spec/frontend/invite_members/components/invite_members_modal_spec.js115
-rw-r--r--spec/frontend/invite_members/components/invite_members_trigger_spec.js58
-rw-r--r--spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js5
-rw-r--r--spec/frontend/issuable/related_issues/components/issue_token_spec.js257
-rw-r--r--spec/frontend/issuable/related_issues/components/related_issues_block_spec.js5
-rw-r--r--spec/frontend/issuable/related_issues/components/related_issues_list_spec.js5
-rw-r--r--spec/frontend/issuable_create/components/issuable_form_spec.js1
-rw-r--r--spec/frontend/issue_show/components/incidents/highlight_bar_spec.js4
-rw-r--r--spec/frontend/issue_show/components/incidents/incident_tabs_spec.js2
-rw-r--r--spec/frontend/issue_show/issue_spec.js8
-rw-r--r--spec/frontend/jobs/store/utils_spec.js8
-rw-r--r--spec/frontend/lib/dompurify_spec.js98
-rw-r--r--spec/frontend/lib/utils/axios_startup_calls_spec.js49
-rw-r--r--spec/frontend/lib/utils/datetime_utility_spec.js23
-rw-r--r--spec/frontend/lib/utils/experimentation_spec.js20
-rw-r--r--spec/frontend/lib/utils/url_utility_spec.js57
-rw-r--r--spec/frontend/merge_request_spec.js63
-rw-r--r--spec/frontend/monitoring/components/__snapshots__/group_empty_state_spec.js.snap181
-rw-r--r--spec/frontend/monitoring/components/group_empty_state_spec.js33
-rw-r--r--spec/frontend/notes/components/discussion_counter_spec.js28
-rw-r--r--spec/frontend/notes/components/discussion_filter_spec.js20
-rw-r--r--spec/frontend/notes/components/sort_discussion_spec.js12
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap214
-rw-r--r--spec/frontend/packages/details/store/getters_spec.js2
-rw-r--r--spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap756
-rw-r--r--spec/frontend/packages/list/components/packages_list_app_spec.js1
-rw-r--r--spec/frontend/packages/list/components/packages_title_spec.js71
-rw-r--r--spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap26
-rw-r--r--spec/frontend/packages/shared/components/__snapshots__/publish_method_spec.js.snap3
-rw-r--r--spec/frontend/packages/shared/components/package_list_row_spec.js8
-rw-r--r--spec/frontend/packages/shared/components/package_path_spec.js86
-rw-r--r--spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap3
-rw-r--r--spec/frontend/pages/projects/pipeline_schedules/shared/components/pipeline_schedule_callout_spec.js21
-rw-r--r--spec/frontend/pipeline_new/mock_data.js6
-rw-r--r--spec/frontend/pipelines/components/dag/dag_spec.js9
-rw-r--r--spec/frontend/pipelines/graph/graph_component_spec.js102
-rw-r--r--spec/frontend/pipelines/header_component_spec.js177
-rw-r--r--spec/frontend/pipelines/legacy_header_component_spec.js116
-rw-r--r--spec/frontend/pipelines/mock_data.js78
-rw-r--r--spec/frontend/pipelines/pipelines_spec.js383
-rw-r--r--spec/frontend/pipelines/test_reports/mock_data.js16
-rw-r--r--spec/frontend/pipelines/test_reports/test_suite_table_spec.js21
-rw-r--r--spec/frontend/project_find_file_spec.js5
-rw-r--r--spec/frontend/projects/commit_box/info/load_branches_spec.js68
-rw-r--r--spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap1
-rw-r--r--spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap3
-rw-r--r--spec/frontend/projects/settings/access_dropdown_spec.js21
-rw-r--r--spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js4
-rw-r--r--spec/frontend/registry/explorer/components/details_page/partial_cleanup_alert_spec.js71
-rw-r--r--spec/frontend/registry/explorer/components/list_page/registry_header_spec.js90
-rw-r--r--spec/frontend/registry/explorer/pages/details_spec.js65
-rw-r--r--spec/frontend/registry/settings/components/__snapshots__/registry_settings_app_spec.js.snap7
-rw-r--r--spec/frontend/registry/settings/components/registry_settings_app_spec.js88
-rw-r--r--spec/frontend/registry/settings/components/settings_form_spec.js293
-rw-r--r--spec/frontend/registry/settings/graphql/cache_updated_spec.js56
-rw-r--r--spec/frontend/registry/settings/mock_data.js32
-rw-r--r--spec/frontend/registry/settings/store/actions_spec.js90
-rw-r--r--spec/frontend/registry/settings/store/getters_spec.js72
-rw-r--r--spec/frontend/registry/settings/store/mutations_spec.js80
-rw-r--r--spec/frontend/registry/shared/__snapshots__/utils_spec.js.snap101
-rw-r--r--spec/frontend/registry/shared/components/expiration_policy_fields_spec.js20
-rw-r--r--spec/frontend/registry/shared/stubs.js11
-rw-r--r--spec/frontend/registry/shared/utils_spec.js37
-rw-r--r--spec/frontend/related_merge_requests/components/related_merge_requests_spec.js5
-rw-r--r--spec/frontend/releases/__snapshots__/util_spec.js.snap110
-rw-r--r--spec/frontend/releases/components/app_edit_new_spec.js5
-rw-r--r--spec/frontend/releases/components/app_index_spec.js63
-rw-r--r--spec/frontend/releases/components/app_show_spec.js8
-rw-r--r--spec/frontend/releases/components/asset_links_form_spec.js18
-rw-r--r--spec/frontend/releases/components/evidence_block_spec.js8
-rw-r--r--spec/frontend/releases/components/release_block_assets_spec.js10
-rw-r--r--spec/frontend/releases/components/release_block_footer_spec.js4
-rw-r--r--spec/frontend/releases/components/release_block_header_spec.js6
-rw-r--r--spec/frontend/releases/components/release_block_metadata_spec.js4
-rw-r--r--spec/frontend/releases/components/release_block_milestone_info_spec.js26
-rw-r--r--spec/frontend/releases/components/release_block_spec.js8
-rw-r--r--spec/frontend/releases/components/release_skeleton_loader_spec.js15
-rw-r--r--spec/frontend/releases/components/releases_pagination_graphql_spec.js10
-rw-r--r--spec/frontend/releases/components/releases_pagination_rest_spec.js8
-rw-r--r--spec/frontend/releases/mock_data.js335
-rw-r--r--spec/frontend/releases/stores/getters_spec.js22
-rw-r--r--spec/frontend/releases/stores/modules/detail/actions_spec.js13
-rw-r--r--spec/frontend/releases/stores/modules/detail/mutations_spec.js25
-rw-r--r--spec/frontend/releases/stores/modules/list/actions_spec.js334
-rw-r--r--spec/frontend/releases/stores/modules/list/mutations_spec.js44
-rw-r--r--spec/frontend/releases/util_spec.js6
-rw-r--r--spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap58
-rw-r--r--spec/frontend/repository/components/last_commit_spec.js2
-rw-r--r--spec/frontend/repository/log_tree_spec.js8
-rw-r--r--spec/frontend/right_sidebar_spec.js13
-rw-r--r--spec/frontend/search/components/dropdown_filter_spec.js (renamed from spec/frontend/search/components/state_filter_spec.js)34
-rw-r--r--spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap12
-rw-r--r--spec/frontend/self_monitor/components/self_monitor_form_spec.js4
-rw-r--r--spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap2
-rw-r--r--spec/frontend/serverless/components/missing_prometheus_spec.js4
-rw-r--r--spec/frontend/sidebar/confidential/edit_form_buttons_spec.js15
-rw-r--r--spec/frontend/sidebar/lock/edit_form_buttons_spec.js14
-rw-r--r--spec/frontend/sidebar/reviewer_title_spec.js116
-rw-r--r--spec/frontend/sidebar/reviewers_spec.js169
-rw-r--r--spec/frontend/sidebar/sidebar_labels_spec.js2
-rw-r--r--spec/frontend/snippet/snippet_edit_spec.js1
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap1
-rw-r--r--spec/frontend/snippets/components/edit_spec.js8
-rw-r--r--spec/frontend/snippets/components/snippet_blob_view_spec.js6
-rw-r--r--spec/frontend/static_site_editor/mock_data.js5
-rw-r--r--spec/frontend/static_site_editor/pages/home_spec.js51
-rw-r--r--spec/frontend/static_site_editor/pages/success_spec.js110
-rw-r--r--spec/frontend/static_site_editor/services/front_matterify_spec.js47
-rw-r--r--spec/frontend/static_site_editor/services/submit_content_changes_spec.js53
-rw-r--r--spec/frontend/static_site_editor/services/templater_spec.js8
-rw-r--r--spec/frontend/test_setup.js3
-rw-r--r--spec/frontend/user_lists/components/add_user_modal_spec.js50
-rw-r--r--spec/frontend/user_lists/components/edit_user_list_spec.js150
-rw-r--r--spec/frontend/user_lists/components/new_user_list_spec.js93
-rw-r--r--spec/frontend/user_lists/components/user_list_form_spec.js40
-rw-r--r--spec/frontend/user_lists/components/user_list_spec.js196
-rw-r--r--spec/frontend/user_lists/store/edit/actions_spec.js121
-rw-r--r--spec/frontend/user_lists/store/edit/mutations_spec.js61
-rw-r--r--spec/frontend/user_lists/store/new/actions_spec.js69
-rw-r--r--spec/frontend/user_lists/store/new/mutations_spec.js38
-rw-r--r--spec/frontend/user_lists/store/show/actions_spec.js117
-rw-r--r--spec/frontend/user_lists/store/show/mutations_spec.js86
-rw-r--r--spec/frontend/user_lists/store/utils_spec.js23
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js22
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js14
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js15
-rw-r--r--spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js5
-rw-r--r--spec/frontend/vue_mr_widget/deployment/deployment_spec.js5
-rw-r--r--spec/frontend/vue_mr_widget/mr_widget_options_spec.js59
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap2
-rw-r--r--spec/frontend/vue_shared/components/alert_details_table_spec.js (renamed from spec/frontend/vue_shared/components/alert_detail_table_spec.js)29
-rw-r--r--spec/frontend/vue_shared/components/clipboard_button_spec.js5
-rw-r--r--spec/frontend/vue_shared/components/confirm_modal_spec.js16
-rw-r--r--spec/frontend/vue_shared/components/local_storage_sync_spec.js122
-rw-r--r--spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap1
-rw-r--r--spec/frontend/vue_shared/components/markdown/field_spec.js17
-rw-r--r--spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js8
-rw-r--r--spec/frontend/vue_shared/components/members/avatars/group_avatar_spec.js46
-rw-r--r--spec/frontend/vue_shared/components/members/avatars/invite_avatar_spec.js38
-rw-r--r--spec/frontend/vue_shared/components/members/avatars/user_avatar_spec.js85
-rw-r--r--spec/frontend/vue_shared/components/members/mock_data.js61
-rw-r--r--spec/frontend/vue_shared/components/members/table/created_at_spec.js61
-rw-r--r--spec/frontend/vue_shared/components/members/table/expires_at_spec.js86
-rw-r--r--spec/frontend/vue_shared/components/members/table/member_avatar_spec.js39
-rw-r--r--spec/frontend/vue_shared/components/members/table/member_source_spec.js71
-rw-r--r--spec/frontend/vue_shared/components/members/table/member_table_cell_spec.js130
-rw-r--r--spec/frontend/vue_shared/components/members/table/members_table_spec.js104
-rw-r--r--spec/frontend/vue_shared/components/members/utils_spec.js29
-rw-r--r--spec/frontend/vue_shared/components/registry/__snapshots__/code_instruction_spec.js.snap5
-rw-r--r--spec/frontend/vue_shared/components/registry/title_area_spec.js33
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/editor_service_spec.js11
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_html_block_spec.js27
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/services/sanitize_html_spec.js11
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js23
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/mock_data.js7
-rw-r--r--spec/frontend/vue_shared/components/todo_button_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/web_ide_link_spec.js1
-rw-r--r--spec/frontend/vue_shared/droplab_dropdown_button_spec.js132
-rw-r--r--spec/frontend/whats_new/components/app_spec.js38
-rw-r--r--spec/frontend/whats_new/store/actions_spec.js7
-rw-r--r--spec/frontend_integration/.eslintrc.yml2
-rw-r--r--spec/frontend_integration/ide/__snapshots__/ide_integration_spec.js.snap1
-rw-r--r--spec/frontend_integration/ide/ide_helper.js102
-rw-r--r--spec/frontend_integration/ide/ide_integration_spec.js47
-rw-r--r--spec/frontend_integration/test_helpers/setup/setup_mock_server.js9
-rw-r--r--spec/graphql/features/feature_flag_spec.rb4
-rw-r--r--spec/graphql/resolvers/admin/analytics/instance_statistics/measurements_resolver_spec.rb35
-rw-r--r--spec/graphql/resolvers/board_resolver_spec.rb72
-rw-r--r--spec/graphql/resolvers/concerns/looks_ahead_spec.rb14
-rw-r--r--spec/graphql/resolvers/issues_resolver_spec.rb33
-rw-r--r--spec/graphql/resolvers/projects_resolver_spec.rb44
-rw-r--r--spec/graphql/resolvers/snippets/blobs_resolver_spec.rb50
-rw-r--r--spec/graphql/resolvers/terraform/states_resolver_spec.rb33
-rw-r--r--spec/graphql/types/base_field_spec.rb4
-rw-r--r--spec/graphql/types/design_management/design_collection_copy_state_enum_spec.rb11
-rw-r--r--spec/graphql/types/design_management/design_collection_type_spec.rb2
-rw-r--r--spec/graphql/types/global_id_type_spec.rb26
-rw-r--r--spec/graphql/types/issue_sort_enum_spec.rb2
-rw-r--r--spec/graphql/types/package_type_enum_spec.rb2
-rw-r--r--spec/graphql/types/project_type_spec.rb9
-rw-r--r--spec/graphql/types/snippet_type_spec.rb52
-rw-r--r--spec/graphql/types/terraform/state_type_spec.rb21
-rw-r--r--spec/haml_lint/linter/documentation_links_spec.rb98
-rw-r--r--spec/helpers/analytics/unique_visits_helper_spec.rb9
-rw-r--r--spec/helpers/boards_helper_spec.rb5
-rw-r--r--spec/helpers/clusters_helper_spec.rb44
-rw-r--r--spec/helpers/emails_helper_spec.rb112
-rw-r--r--spec/helpers/feature_flags_helper_spec.rb21
-rw-r--r--spec/helpers/gitlab_routing_helper_spec.rb4
-rw-r--r--spec/helpers/groups/group_members_helper_spec.rb15
-rw-r--r--spec/helpers/issuables_helper_spec.rb32
-rw-r--r--spec/helpers/issues_helper_spec.rb21
-rw-r--r--spec/helpers/projects/incidents_helper_spec.rb14
-rw-r--r--spec/helpers/projects_helper_spec.rb114
-rw-r--r--spec/helpers/search_helper_spec.rb29
-rw-r--r--spec/helpers/tree_helper_spec.rb10
-rw-r--r--spec/helpers/whats_new_helper_spec.rb46
-rw-r--r--spec/lib/api/github/entities_spec.rb31
-rw-r--r--spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb2
-rw-r--r--spec/lib/api/helpers_spec.rb19
-rw-r--r--spec/lib/backup/files_spec.rb2
-rw-r--r--spec/lib/backup/repositories_spec.rb (renamed from spec/lib/backup/repository_spec.rb)128
-rw-r--r--spec/lib/banzai/filter/design_reference_filter_spec.rb20
-rw-r--r--spec/lib/banzai/filter/external_issue_reference_filter_spec.rb10
-rw-r--r--spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/issue_reference_filter_spec.rb6
-rw-r--r--spec/lib/banzai/filter/milestone_reference_filter_spec.rb44
-rw-r--r--spec/lib/feature/definition_spec.rb11
-rw-r--r--spec/lib/feature_spec.rb4
-rw-r--r--spec/lib/gitlab/alert_management/alert_params_spec.rb101
-rw-r--r--spec/lib/gitlab/alert_management/payload/generic_spec.rb32
-rw-r--r--spec/lib/gitlab/alerting/alert_spec.rb299
-rw-r--r--spec/lib/gitlab/alerting/notification_payload_parser_spec.rb204
-rw-r--r--spec/lib/gitlab/analytics/unique_visits_spec.rb2
-rw-r--r--spec/lib/gitlab/auth/auth_finders_spec.rb26
-rw-r--r--spec/lib/gitlab/auth/current_user_mode_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/migrate_users_bio_to_user_details_spec.rb17
-rw-r--r--spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb8
-rw-r--r--spec/lib/gitlab/bitbucket_import/importer_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/matching_merge_request_spec.rb31
-rw-r--r--spec/lib/gitlab/ci/ansi2json/line_spec.rb9
-rw-r--r--spec/lib/gitlab/ci/ansi2json_spec.rb38
-rw-r--r--spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb147
-rw-r--r--spec/lib/gitlab/ci/config/entry/product/variables_spec.rb77
-rw-r--r--spec/lib/gitlab/ci/cron_parser_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/parsers/test/junit_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/runner/backoff_spec.rb126
-rw-r--r--spec/lib/gitlab/ci/trace/checksum_spec.rb121
-rw-r--r--spec/lib/gitlab/ci/trace/metrics_spec.rb18
-rw-r--r--spec/lib/gitlab/ci/trace_spec.rb9
-rw-r--r--spec/lib/gitlab/ci/yaml_processor/result_spec.rb45
-rw-r--r--spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb12
-rw-r--r--spec/lib/gitlab/code_navigation_path_spec.rb14
-rw-r--r--spec/lib/gitlab/cycle_analytics/events_spec.rb273
-rw-r--r--spec/lib/gitlab/danger/commit_linter_spec.rb14
-rw-r--r--spec/lib/gitlab/danger/helper_spec.rb22
-rw-r--r--spec/lib/gitlab/danger/roulette_spec.rb73
-rw-r--r--spec/lib/gitlab/danger/teammate_spec.rb2
-rw-r--r--spec/lib/gitlab/database/background_migration_job_spec.rb2
-rw-r--r--spec/lib/gitlab/database/batch_count_spec.rb36
-rw-r--r--spec/lib/gitlab/database/concurrent_reindex_spec.rb207
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb102
-rw-r--r--spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb2
-rw-r--r--spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb2
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb17
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb6
-rw-r--r--spec/lib/gitlab/database/postgres_index_spec.rb116
-rw-r--r--spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb255
-rw-r--r--spec/lib/gitlab/database/reindexing/reindex_action_spec.rb86
-rw-r--r--spec/lib/gitlab/database/reindexing_spec.rb66
-rw-r--r--spec/lib/gitlab/database/similarity_score_spec.rb11
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_spec.rb64
-rw-r--r--spec/lib/gitlab/database_spec.rb81
-rw-r--r--spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb2
-rw-r--r--spec/lib/gitlab/diff/highlight_cache_spec.rb34
-rw-r--r--spec/lib/gitlab/email/handler/create_note_handler_spec.rb23
-rw-r--r--spec/lib/gitlab/exclusive_lease_helpers_spec.rb15
-rw-r--r--spec/lib/gitlab/experimentation_spec.rb58
-rw-r--r--spec/lib/gitlab/git/branch_spec.rb6
-rw-r--r--spec/lib/gitlab/git/diff_collection_spec.rb17
-rw-r--r--spec/lib/gitlab/git/diff_spec.rb12
-rw-r--r--spec/lib/gitlab/git/object_pool_spec.rb4
-rw-r--r--spec/lib/gitlab/git/remote_mirror_spec.rb4
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb2
-rw-r--r--spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb4
-rw-r--r--spec/lib/gitlab/git_access_snippet_spec.rb23
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb4
-rw-r--r--spec/lib/gitlab/gon_helper_spec.rb4
-rw-r--r--spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb2
-rw-r--r--spec/lib/gitlab/graphql/markdown_field/resolver_spec.rb33
-rw-r--r--spec/lib/gitlab/graphql/markdown_field_spec.rb59
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb11
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb7
-rw-r--r--spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb2
-rw-r--r--spec/lib/gitlab/group_search_results_spec.rb7
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml4
-rw-r--r--spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb10
-rw-r--r--spec/lib/gitlab/import_export/group/relation_factory_spec.rb95
-rw-r--r--spec/lib/gitlab/import_export/lfs_saver_spec.rb8
-rw-r--r--spec/lib/gitlab/import_export/project/relation_factory_spec.rb73
-rw-r--r--spec/lib/gitlab/job_waiter_spec.rb17
-rw-r--r--spec/lib/gitlab/lfs/client_spec.rb103
-rw-r--r--spec/lib/gitlab/lfs_token_spec.rb2
-rw-r--r--spec/lib/gitlab/manifest_import/manifest_spec.rb14
-rw-r--r--spec/lib/gitlab/manifest_import/metadata_spec.rb62
-rw-r--r--spec/lib/gitlab/metrics/dashboard/importers/prometheus_metrics_spec.rb82
-rw-r--r--spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb88
-rw-r--r--spec/lib/gitlab/middleware/rails_queue_duration_spec.rb2
-rw-r--r--spec/lib/gitlab/pagination/offset_pagination_spec.rb30
-rw-r--r--spec/lib/gitlab/project_search_results_spec.rb6
-rw-r--r--spec/lib/gitlab/project_template_spec.rb6
-rw-r--r--spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb2
-rw-r--r--spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb2
-rw-r--r--spec/lib/gitlab/prometheus/query_variables_spec.rb4
-rw-r--r--spec/lib/gitlab/regex_spec.rb205
-rw-r--r--spec/lib/gitlab/search_results_spec.rb39
-rw-r--r--spec/lib/gitlab/sql/pattern_spec.rb37
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config/entry/global_spec.rb245
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config/entry/image_upload_path_spec.rb38
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config/entry/mount_spec.rb101
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config/entry/mounts_spec.rb53
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config/entry/static_site_generator_spec.rb50
-rw-r--r--spec/lib/gitlab/static_site_editor/config/file_config_spec.rb82
-rw-r--r--spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb22
-rw-r--r--spec/lib/gitlab/themes_spec.rb14
-rw-r--r--spec/lib/gitlab/tracking_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb12
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb26
-rw-r--r--spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb50
-rw-r--r--spec/lib/gitlab/usage_data_counters/track_unique_events_spec.rb4
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb84
-rw-r--r--spec/lib/gitlab/utils/usage_data_spec.rb17
-rw-r--r--spec/lib/gitlab/webpack/manifest_spec.rb116
-rw-r--r--spec/lib/gitlab_danger_spec.rb2
-rw-r--r--spec/lib/google_api/auth_spec.rb8
-rw-r--r--spec/lib/grafana/time_window_spec.rb6
-rw-r--r--spec/lib/pager_duty/webhook_payload_parser_spec.rb84
-rw-r--r--spec/lib/safe_zip/extract_spec.rb36
-rw-r--r--spec/mailers/abuse_report_mailer_spec.rb10
-rw-r--r--spec/mailers/emails/projects_spec.rb119
-rw-r--r--spec/mailers/notify_spec.rb36
-rw-r--r--spec/migrations/add_partial_index_to_ci_builds_table_on_user_id_name_spec.rb22
-rw-r--r--spec/migrations/backfill_status_page_published_incidents_spec.rb2
-rw-r--r--spec/migrations/cleanup_group_import_states_with_null_user_id_spec.rb101
-rw-r--r--spec/migrations/ensure_filled_file_store_on_package_files_spec.rb40
-rw-r--r--spec/models/alert_management/http_integration_spec.rb92
-rw-r--r--spec/models/analytics/instance_statistics/measurement_spec.rb30
-rw-r--r--spec/models/application_setting/term_spec.rb5
-rw-r--r--spec/models/application_setting_spec.rb2
-rw-r--r--spec/models/audit_event_spec.rb7
-rw-r--r--spec/models/authentication_event_spec.rb36
-rw-r--r--spec/models/ci/build_pending_state_spec.rb27
-rw-r--r--spec/models/ci/build_spec.rb20
-rw-r--r--spec/models/ci/build_trace_chunk_spec.rb58
-rw-r--r--spec/models/ci/freeze_period_status_spec.rb4
-rw-r--r--spec/models/ci/job_artifact_spec.rb4
-rw-r--r--spec/models/ci/pipeline_schedule_spec.rb4
-rw-r--r--spec/models/ci/pipeline_spec.rb12
-rw-r--r--spec/models/ci_platform_metric_spec.rb6
-rw-r--r--spec/models/clusters/applications/runner_spec.rb4
-rw-r--r--spec/models/clusters/cluster_spec.rb1
-rw-r--r--spec/models/commit_status_spec.rb133
-rw-r--r--spec/models/concerns/avatarable_spec.rb4
-rw-r--r--spec/models/concerns/bulk_insertable_associations_spec.rb6
-rw-r--r--spec/models/concerns/cache_markdown_field_spec.rb6
-rw-r--r--spec/models/concerns/case_sensitivity_spec.rb4
-rw-r--r--spec/models/concerns/checksummable_spec.rb16
-rw-r--r--spec/models/concerns/counter_attribute_spec.rb30
-rw-r--r--spec/models/concerns/featurable_spec.rb2
-rw-r--r--spec/models/concerns/issuable_spec.rb76
-rw-r--r--spec/models/concerns/mentionable_spec.rb6
-rw-r--r--spec/models/concerns/milestoneable_spec.rb2
-rw-r--r--spec/models/concerns/milestoneish_spec.rb8
-rw-r--r--spec/models/concerns/reactive_caching_spec.rb11
-rw-r--r--spec/models/concerns/resolvable_discussion_spec.rb6
-rw-r--r--spec/models/concerns/routable_spec.rb2
-rw-r--r--spec/models/concerns/schedulable_spec.rb2
-rw-r--r--spec/models/concerns/subscribable_spec.rb22
-rw-r--r--spec/models/concerns/token_authenticatable_spec.rb2
-rw-r--r--spec/models/container_repository_spec.rb27
-rw-r--r--spec/models/design_management/design_collection_spec.rb12
-rw-r--r--spec/models/design_management/design_spec.rb9
-rw-r--r--spec/models/event_spec.rb50
-rw-r--r--spec/models/group_import_state_spec.rb1
-rw-r--r--spec/models/group_spec.rb319
-rw-r--r--spec/models/import_failure_spec.rb2
-rw-r--r--spec/models/integration_spec.rb10
-rw-r--r--spec/models/issue/metrics_spec.rb14
-rw-r--r--spec/models/issue_email_participant_spec.rb19
-rw-r--r--spec/models/issue_spec.rb158
-rw-r--r--spec/models/iteration_spec.rb45
-rw-r--r--spec/models/member_spec.rb76
-rw-r--r--spec/models/merge_request_diff_spec.rb11
-rw-r--r--spec/models/merge_request_spec.rb97
-rw-r--r--spec/models/namespace_spec.rb136
-rw-r--r--spec/models/notification_setting_spec.rb1
-rw-r--r--spec/models/operations/feature_flag_spec.rb18
-rw-r--r--spec/models/operations/feature_flags/strategy_spec.rb176
-rw-r--r--spec/models/packages/package_spec.rb14
-rw-r--r--spec/models/plan_limits_spec.rb1
-rw-r--r--spec/models/project_feature_usage_spec.rb2
-rw-r--r--spec/models/project_spec.rb35
-rw-r--r--spec/models/project_statistics_spec.rb52
-rw-r--r--spec/models/project_tracing_setting_spec.rb40
-rw-r--r--spec/models/resource_label_event_spec.rb36
-rw-r--r--spec/models/resource_milestone_event_spec.rb1
-rw-r--r--spec/models/resource_state_event_spec.rb16
-rw-r--r--spec/models/resource_weight_event_spec.rb10
-rw-r--r--spec/models/service_spec.rb36
-rw-r--r--spec/models/snippet_input_action_spec.rb2
-rw-r--r--spec/models/snippet_repository_spec.rb34
-rw-r--r--spec/models/snippet_statistics_spec.rb19
-rw-r--r--spec/models/terraform/state_spec.rb21
-rw-r--r--spec/models/terraform/state_version_spec.rb2
-rw-r--r--spec/models/todo_spec.rb46
-rw-r--r--spec/models/user_spec.rb28
-rw-r--r--spec/models/wiki_directory_spec.rb78
-rw-r--r--spec/models/wiki_page_spec.rb293
-rw-r--r--spec/policies/design_management/design_policy_spec.rb36
-rw-r--r--spec/policies/global_policy_spec.rb18
-rw-r--r--spec/policies/group_policy_spec.rb70
-rw-r--r--spec/policies/project_policy_spec.rb2
-rw-r--r--spec/policies/terraform/state_policy_spec.rb33
-rw-r--r--spec/presenters/ci/pipeline_presenter_spec.rb17
-rw-r--r--spec/presenters/event_presenter_spec.rb30
-rw-r--r--spec/presenters/merge_request_presenter_spec.rb21
-rw-r--r--spec/presenters/projects/prometheus/alert_presenter_spec.rb346
-rw-r--r--spec/presenters/sentry_error_presenter_spec.rb8
-rw-r--r--spec/presenters/snippet_blob_presenter_spec.rb92
-rw-r--r--spec/presenters/snippet_presenter_spec.rb21
-rw-r--r--spec/requests/api/admin/instance_clusters_spec.rb18
-rw-r--r--spec/requests/api/ci/runner/jobs_artifacts_spec.rb30
-rw-r--r--spec/requests/api/ci/runner/jobs_put_spec.rb43
-rw-r--r--spec/requests/api/commits_spec.rb59
-rw-r--r--spec/requests/api/debian_group_packages_spec.rb39
-rw-r--r--spec/requests/api/debian_project_packages_spec.rb46
-rw-r--r--spec/requests/api/features_spec.rb2
-rw-r--r--spec/requests/api/files_spec.rb6
-rw-r--r--spec/requests/api/generic_packages_spec.rb419
-rw-r--r--spec/requests/api/graphql/gitlab_schema_spec.rb4
-rw-r--r--spec/requests/api/graphql/mutations/boards/lists/destroy_spec.rb77
-rw-r--r--spec/requests/api/graphql/mutations/snippets/create_spec.rb12
-rw-r--r--spec/requests/api/graphql/mutations/snippets/update_spec.rb6
-rw-r--r--spec/requests/api/graphql/project/issues_spec.rb35
-rw-r--r--spec/requests/api/graphql_spec.rb10
-rw-r--r--spec/requests/api/group_clusters_spec.rb18
-rw-r--r--spec/requests/api/group_container_repositories_spec.rb2
-rw-r--r--spec/requests/api/group_packages_spec.rb6
-rw-r--r--spec/requests/api/groups_spec.rb133
-rw-r--r--spec/requests/api/helpers_spec.rb63
-rw-r--r--spec/requests/api/internal/base_spec.rb84
-rw-r--r--spec/requests/api/internal/lfs_spec.rb93
-rw-r--r--spec/requests/api/lint_spec.rb49
-rw-r--r--spec/requests/api/maven_packages_spec.rb4
-rw-r--r--spec/requests/api/merge_requests_spec.rb2
-rw-r--r--spec/requests/api/npm_packages_spec.rb10
-rw-r--r--spec/requests/api/project_clusters_spec.rb18
-rw-r--r--spec/requests/api/project_container_repositories_spec.rb8
-rw-r--r--spec/requests/api/project_packages_spec.rb13
-rw-r--r--spec/requests/api/releases_spec.rb4
-rw-r--r--spec/requests/api/repositories_spec.rb4
-rw-r--r--spec/requests/api/search_spec.rb36
-rw-r--r--spec/requests/api/services_spec.rb30
-rw-r--r--spec/requests/api/settings_spec.rb8
-rw-r--r--spec/requests/api/terraform/state_spec.rb2
-rw-r--r--spec/requests/api/terraform/state_version_spec.rb210
-rw-r--r--spec/requests/api/unleash_spec.rb608
-rw-r--r--spec/requests/api/usage_data_spec.rb4
-rw-r--r--spec/requests/api/users_spec.rb69
-rw-r--r--spec/requests/git_http_spec.rb26
-rw-r--r--spec/requests/projects/cycle_analytics_events_spec.rb2
-rw-r--r--spec/requests/request_profiler_spec.rb2
-rw-r--r--spec/routing/admin_routing_spec.rb6
-rw-r--r--spec/routing/group_routing_spec.rb4
-rw-r--r--spec/routing/instance_statistics_routing_spec.rb11
-rw-r--r--spec/routing/project_routing_spec.rb6
-rw-r--r--spec/routing/routing_spec.rb6
-rw-r--r--spec/rubocop/cop/code_reuse/active_record_spec.rb6
-rw-r--r--spec/serializers/ci/trigger_entity_spec.rb70
-rw-r--r--spec/serializers/ci/trigger_serializer_spec.rb15
-rw-r--r--spec/serializers/cluster_serializer_spec.rb1
-rw-r--r--spec/serializers/diff_file_base_entity_spec.rb55
-rw-r--r--spec/serializers/diffs_entity_spec.rb10
-rw-r--r--spec/serializers/feature_flag_entity_spec.rb22
-rw-r--r--spec/serializers/feature_flag_serializer_spec.rb23
-rw-r--r--spec/serializers/feature_flag_summary_entity_spec.rb21
-rw-r--r--spec/serializers/feature_flag_summary_serializer_spec.rb22
-rw-r--r--spec/serializers/feature_flags_client_serializer_spec.rb17
-rw-r--r--spec/serializers/group_group_link_entity_spec.rb22
-rw-r--r--spec/serializers/merge_request_poll_cached_widget_entity_spec.rb61
-rw-r--r--spec/serializers/merge_request_poll_widget_entity_spec.rb33
-rw-r--r--spec/serializers/merge_request_widget_entity_spec.rb4
-rw-r--r--spec/serializers/paginated_diff_entity_spec.rb10
-rw-r--r--spec/serializers/pipeline_serializer_spec.rb9
-rw-r--r--spec/serializers/test_case_entity_spec.rb2
-rw-r--r--spec/services/admin/propagate_integration_service_spec.rb82
-rw-r--r--spec/services/admin/propagate_service_template_spec.rb125
-rw-r--r--spec/services/alert_management/process_prometheus_alert_service_spec.rb36
-rw-r--r--spec/services/audit_event_service_spec.rb13
-rw-r--r--spec/services/bulk_create_integration_service_spec.rb107
-rw-r--r--spec/services/bulk_update_integration_service_spec.rb57
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb27
-rw-r--r--spec/services/ci/expire_pipeline_cache_service_spec.rb2
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb4
-rw-r--r--spec/services/ci/pipelines/create_artifact_service_spec.rb10
-rw-r--r--spec/services/ci/retry_build_service_spec.rb4
-rw-r--r--spec/services/ci/update_build_queue_service_spec.rb16
-rw-r--r--spec/services/ci/update_build_state_service_spec.rb97
-rw-r--r--spec/services/clusters/gcp/finalize_creation_service_spec.rb16
-rw-r--r--spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb20
-rw-r--r--spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb8
-rw-r--r--spec/services/clusters/kubernetes/fetch_kubernetes_token_service_spec.rb24
-rw-r--r--spec/services/deployments/after_create_service_spec.rb4
-rw-r--r--spec/services/design_management/copy_design_collection/copy_service_spec.rb259
-rw-r--r--spec/services/design_management/copy_design_collection/queue_service_spec.rb51
-rw-r--r--spec/services/design_management/delete_designs_service_spec.rb2
-rw-r--r--spec/services/design_management/generate_image_versions_service_spec.rb51
-rw-r--r--spec/services/design_management/save_designs_service_spec.rb33
-rw-r--r--spec/services/feature_flags/create_service_spec.rb79
-rw-r--r--spec/services/feature_flags/destroy_service_spec.rb61
-rw-r--r--spec/services/feature_flags/disable_service_spec.rb91
-rw-r--r--spec/services/feature_flags/enable_service_spec.rb153
-rw-r--r--spec/services/feature_flags/update_service_spec.rb250
-rw-r--r--spec/services/git/branch_hooks_service_spec.rb22
-rw-r--r--spec/services/git/wiki_push_service_spec.rb18
-rw-r--r--spec/services/groups/create_service_spec.rb87
-rw-r--r--spec/services/groups/import_export/import_service_spec.rb9
-rw-r--r--spec/services/groups/transfer_service_spec.rb38
-rw-r--r--spec/services/groups/update_service_spec.rb25
-rw-r--r--spec/services/groups/update_shared_runners_service_spec.rb194
-rw-r--r--spec/services/incident_management/incidents/update_severity_service_spec.rb86
-rw-r--r--spec/services/issuable/bulk_update_service_spec.rb2
-rw-r--r--spec/services/issuable/clone/attributes_rewriter_spec.rb10
-rw-r--r--spec/services/issuable/common_system_notes_service_spec.rb8
-rw-r--r--spec/services/issues/close_service_spec.rb23
-rw-r--r--spec/services/issues/move_service_spec.rb50
-rw-r--r--spec/services/issues/update_service_spec.rb2
-rw-r--r--spec/services/keys/last_used_service_spec.rb2
-rw-r--r--spec/services/lfs/push_service_spec.rb48
-rw-r--r--spec/services/members/destroy_service_spec.rb18
-rw-r--r--spec/services/members/invitation_reminder_email_service_spec.rb78
-rw-r--r--spec/services/merge_requests/close_service_spec.rb67
-rw-r--r--spec/services/merge_requests/export_csv_service_spec.rb115
-rw-r--r--spec/services/merge_requests/ff_merge_service_spec.rb104
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb22
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb139
-rw-r--r--spec/services/merge_requests/reopen_service_spec.rb20
-rw-r--r--spec/services/merge_requests/update_service_spec.rb53
-rw-r--r--spec/services/metrics/dashboard/custom_dashboard_service_spec.rb17
-rw-r--r--spec/services/milestones/destroy_service_spec.rb2
-rw-r--r--spec/services/milestones/promote_service_spec.rb2
-rw-r--r--spec/services/milestones/transfer_service_spec.rb2
-rw-r--r--spec/services/namespace_settings/update_service_spec.rb35
-rw-r--r--spec/services/notes/create_service_spec.rb2
-rw-r--r--spec/services/notes/update_service_spec.rb2
-rw-r--r--spec/services/notification_service_spec.rb85
-rw-r--r--spec/services/packages/create_event_service_spec.rb54
-rw-r--r--spec/services/packages/generic/create_package_file_service_spec.rb54
-rw-r--r--spec/services/packages/generic/find_or_create_package_service_spec.rb88
-rw-r--r--spec/services/projects/alerting/notify_service_spec.rb45
-rw-r--r--spec/services/projects/container_repository/cleanup_tags_service_spec.rb6
-rw-r--r--spec/services/projects/container_repository/delete_tags_service_spec.rb106
-rw-r--r--spec/services/projects/create_service_spec.rb163
-rw-r--r--spec/services/projects/transfer_service_spec.rb31
-rw-r--r--spec/services/projects/update_pages_service_spec.rb6
-rw-r--r--spec/services/projects/update_remote_mirror_service_spec.rb17
-rw-r--r--spec/services/projects/update_service_spec.rb26
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb72
-rw-r--r--spec/services/resource_access_tokens/create_service_spec.rb12
-rw-r--r--spec/services/search_service_spec.rb6
-rw-r--r--spec/services/snippets/update_service_spec.rb95
-rw-r--r--spec/services/static_site_editor/config_service_spec.rb86
-rw-r--r--spec/services/system_note_service_spec.rb24
-rw-r--r--spec/services/system_notes/incident_service_spec.rb59
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb142
-rw-r--r--spec/services/users/build_service_spec.rb14
-rw-r--r--spec/simplecov_env.rb3
-rw-r--r--spec/spec_helper.rb14
-rw-r--r--spec/support/capybara.rb4
-rw-r--r--spec/support/counter_attribute.rb6
-rw-r--r--spec/support/factory_bot.rb4
-rw-r--r--spec/support/google_api/cloud_platform_helpers.rb2
-rw-r--r--spec/support/helpers/api_internal_base_helpers.rb85
-rw-r--r--spec/support/helpers/features/canonical_link_helpers.rb28
-rw-r--r--spec/support/helpers/git_http_helpers.rb26
-rw-r--r--spec/support/helpers/graphql_helpers.rb3
-rw-r--r--spec/support/helpers/kubernetes_helpers.rb10
-rw-r--r--spec/support/helpers/search_helpers.rb10
-rw-r--r--spec/support/helpers/stub_experiments.rb4
-rw-r--r--spec/support/helpers/stub_feature_flags.rb4
-rw-r--r--spec/support/helpers/stub_object_storage.rb18
-rw-r--r--spec/support/helpers/usage_data_helpers.rb4
-rw-r--r--spec/support/helpers/wait_for_requests.rb11
-rw-r--r--spec/support/helpers/wiki_helpers.rb10
-rw-r--r--spec/support/matchers/be_sorted.rb21
-rw-r--r--spec/support/migrations_helpers/schema_version_finder.rb34
-rw-r--r--spec/support/models/merge_request_without_merge_request_diff.rb7
-rw-r--r--spec/support/shared_contexts/cache_allowed_users_in_namespace_shared_context.rb2
-rw-r--r--spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/controllers/known_sign_in_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/controllers/milestone_tabs_shared_examples.rb23
-rw-r--r--spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/editable_merge_request_shared_examples.rb26
-rw-r--r--spec/support/shared_examples/features/navbar_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/features/packages_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/features/page_description_shared_examples.rb9
-rw-r--r--spec/support/shared_examples/features/protected_branches_with_deploy_keys_examples.rb95
-rw-r--r--spec/support/shared_examples/features/wiki/file_attachments_shared_examples.rb (renamed from spec/support/shared_examples/features/wiki_file_attachments_shared_examples.rb)4
-rw-r--r--spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb245
-rw-r--r--spec/support/shared_examples/features/wiki/user_deletes_wiki_page_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/features/wiki/user_previews_wiki_changes_shared_examples.rb110
-rw-r--r--spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb231
-rw-r--r--spec/support/shared_examples/features/wiki/user_uses_wiki_shortcuts_shared_examples.rb20
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_asciidoc_page_with_includes_shared_examples.rb (renamed from spec/features/projects/wiki/users_views_asciidoc_page_with_includes_spec.rb)22
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_wiki_empty_shared_examples.rb62
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb (renamed from spec/features/projects/wiki/user_views_wiki_page_spec.rb)36
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_wiki_pages_shared_examples.rb (renamed from spec/features/projects/wiki/user_views_wiki_pages_spec.rb)18
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_wiki_sidebar_shared_examples.rb68
-rw-r--r--spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/lib/gitlab/diff_file_collections_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/lib/gitlab/import_export/relation_factory_shared_examples.rb107
-rw-r--r--spec/support/shared_examples/lib/gitlab/search_confidential_filter_shared_examples.rb91
-rw-r--r--spec/support/shared_examples/lib/gitlab/search_results_sorted_shared_examples.rb19
-rw-r--r--spec/support/shared_examples/lib/gitlab/search_state_filter_shared_examples.rb (renamed from spec/support/shared_examples/lib/gitlab/search_issue_state_filter_shared_examples.rb)0
-rw-r--r--spec/support/shared_examples/models/mentionable_shared_examples.rb23
-rw-r--r--spec/support/shared_examples/models/relative_positioning_shared_examples.rb163
-rw-r--r--spec/support/shared_examples/models/resource_timebox_event_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/models/throttled_touch_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/models/update_project_statistics_shared_examples.rb260
-rw-r--r--spec/support/shared_examples/models/wiki_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/policies/resource_access_token_shared_examples.rb32
-rw-r--r--spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/api/container_repositories_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb309
-rw-r--r--spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/requests/api/packages_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/requests/api/tracking_shared_examples.rb9
-rw-r--r--spec/support/shared_examples/services/boards/boards_create_service_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/merge_request_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/services/packages_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/services/projects/urls_with_escaped_elements_shared_example.rb20
-rw-r--r--spec/support/shared_examples/validators/ip_address_validator_shared_examples.rb10
-rw-r--r--spec/support_specs/helpers/stub_feature_flags_spec.rb31
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb34
-rw-r--r--spec/tasks/gitlab/db_rake_spec.rb74
-rw-r--r--spec/uploaders/pages/deployment_uploader_spec.rb39
-rw-r--r--spec/validators/ip_address_validator_spec.rb37
-rw-r--r--spec/views/admin/dashboard/index.html.haml_spec.rb2
-rw-r--r--spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb18
-rw-r--r--spec/views/profiles/preferences/show.html.haml_spec.rb8
-rw-r--r--spec/views/projects/merge_requests/diffs/_diffs.html.haml_spec.rb38
-rw-r--r--spec/views/search/_results.html.haml_spec.rb22
-rw-r--r--spec/views/shared/milestones/_issuables.html.haml_spec.rb3
-rw-r--r--spec/workers/analytics/instance_statistics/counter_job_worker_spec.rb20
-rw-r--r--spec/workers/authorized_project_update/periodic_recalculate_worker_spec.rb12
-rw-r--r--spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb12
-rw-r--r--spec/workers/cleanup_container_repository_worker_spec.rb23
-rw-r--r--spec/workers/concerns/limited_capacity/job_tracker_spec.rb100
-rw-r--r--spec/workers/concerns/limited_capacity/worker_spec.rb285
-rw-r--r--spec/workers/design_management/copy_design_collection_worker_spec.rb39
-rw-r--r--spec/workers/design_management/new_version_worker_spec.rb4
-rw-r--r--spec/workers/git_garbage_collect_worker_spec.rb50
-rw-r--r--spec/workers/group_import_worker_spec.rb53
-rw-r--r--spec/workers/incident_management/add_severity_system_note_worker_spec.rb60
-rw-r--r--spec/workers/incident_management/process_alert_worker_spec.rb2
-rw-r--r--spec/workers/incident_management/process_prometheus_alert_worker_spec.rb2
-rw-r--r--spec/workers/member_invitation_reminder_emails_worker_spec.rb29
-rw-r--r--spec/workers/metrics/dashboard/sync_dashboards_worker_spec.rb25
-rw-r--r--spec/workers/post_receive_spec.rb2
-rw-r--r--spec/workers/propagate_integration_group_worker_spec.rb37
-rw-r--r--spec/workers/propagate_integration_inherit_worker_spec.rb38
-rw-r--r--spec/workers/propagate_integration_project_worker_spec.rb37
957 files changed, 31562 insertions, 11632 deletions
diff --git a/spec/bin/feature_flag_spec.rb b/spec/bin/feature_flag_spec.rb
index 41117880f95..185a03fc587 100644
--- a/spec/bin/feature_flag_spec.rb
+++ b/spec/bin/feature_flag_spec.rb
@@ -240,5 +240,18 @@ RSpec.describe 'bin/feature-flag' do
end
end
end
+
+ describe '.read_ee_only' do
+ where(:type, :is_ee_only) do
+ :development | false
+ :licensed | true
+ end
+
+ with_them do
+ let(:options) { OpenStruct.new(name: 'foo', type: type) }
+
+ it { expect(described_class.read_ee_only(options)).to eq(is_ee_only) }
+ end
+ end
end
end
diff --git a/spec/channels/application_cable/connection_spec.rb b/spec/channels/application_cable/connection_spec.rb
index e5f7ea1103c..7d60548f780 100644
--- a/spec/channels/application_cable/connection_spec.rb
+++ b/spec/channels/application_cable/connection_spec.rb
@@ -5,27 +5,39 @@ require 'spec_helper'
RSpec.describe ApplicationCable::Connection, :clean_gitlab_redis_shared_state do
let(:session_id) { Rack::Session::SessionId.new('6919a6f1bb119dd7396fadc38fd18d0d') }
- before do
- Gitlab::Redis::SharedState.with do |redis|
- redis.set("session:gitlab:#{session_id.private_id}", Marshal.dump(session_hash))
+ context 'when session cookie is set' do
+ before do
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.set("session:gitlab:#{session_id.private_id}", Marshal.dump(session_hash))
+ end
+
+ cookies[Gitlab::Application.config.session_options[:key]] = session_id.public_id
end
- cookies[Gitlab::Application.config.session_options[:key]] = session_id.public_id
- end
+ context 'when user is logged in' do
+ let(:user) { create(:user) }
+ let(:session_hash) { { 'warden.user.user.key' => [[user.id], user.encrypted_password[0, 29]] } }
+
+ it 'sets current_user' do
+ connect
+
+ expect(connection.current_user).to eq(user)
+ end
- context 'when user is logged in' do
- let(:user) { create(:user) }
- let(:session_hash) { { 'warden.user.user.key' => [[user.id], user.encrypted_password[0, 29]] } }
+ context 'with a stale password' do
+ let(:partial_password_hash) { build(:user, password: 'some_old_password').encrypted_password[0, 29] }
+ let(:session_hash) { { 'warden.user.user.key' => [[user.id], partial_password_hash] } }
- it 'sets current_user' do
- connect
+ it 'sets current_user to nil' do
+ connect
- expect(connection.current_user).to eq(user)
+ expect(connection.current_user).to be_nil
+ end
+ end
end
- context 'with a stale password' do
- let(:partial_password_hash) { build(:user, password: 'some_old_password').encrypted_password[0, 29] }
- let(:session_hash) { { 'warden.user.user.key' => [[user.id], partial_password_hash] } }
+ context 'when user is not logged in' do
+ let(:session_hash) { {} }
it 'sets current_user to nil' do
connect
@@ -35,10 +47,18 @@ RSpec.describe ApplicationCable::Connection, :clean_gitlab_redis_shared_state do
end
end
- context 'when user is not logged in' do
- let(:session_hash) { {} }
+ context 'when session cookie is not set' do
+ it 'sets current_user to nil' do
+ connect
+
+ expect(connection.current_user).to be_nil
+ end
+ end
+ context 'when session cookie is an empty string' do
it 'sets current_user to nil' do
+ cookies[Gitlab::Application.config.session_options[:key]] = ''
+
connect
expect(connection.current_user).to be_nil
diff --git a/spec/controllers/admin/application_settings_controller_spec.rb b/spec/controllers/admin/application_settings_controller_spec.rb
index 4f223811be8..f71f859a704 100644
--- a/spec/controllers/admin/application_settings_controller_spec.rb
+++ b/spec/controllers/admin/application_settings_controller_spec.rb
@@ -15,6 +15,37 @@ RSpec.describe Admin::ApplicationSettingsController do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
end
+ describe 'GET #integrations' do
+ before do
+ sign_in(admin)
+ end
+
+ context 'when GitLab.com' do
+ before do
+ allow(::Gitlab).to receive(:com?) { true }
+ end
+
+ it 'returns 404' do
+ get :integrations
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when not GitLab.com' do
+ before do
+ allow(::Gitlab).to receive(:com?) { false }
+ end
+
+ it 'renders correct template' do
+ get :integrations
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('admin/application_settings/integrations')
+ end
+ end
+ end
+
describe 'GET #usage_data with no access' do
before do
stub_usage_data_connections
@@ -56,6 +87,13 @@ RSpec.describe Admin::ApplicationSettingsController do
sign_in(admin)
end
+ it 'updates the require_admin_approval_after_user_signup setting' do
+ put :update, params: { application_setting: { require_admin_approval_after_user_signup: true } }
+
+ expect(response).to redirect_to(general_admin_application_settings_path)
+ expect(ApplicationSetting.current.require_admin_approval_after_user_signup).to eq(true)
+ end
+
it 'updates the password_authentication_enabled_for_git setting' do
put :update, params: { application_setting: { password_authentication_enabled_for_git: "0" } }
diff --git a/spec/controllers/admin/clusters_controller_spec.rb b/spec/controllers/admin/clusters_controller_spec.rb
index d2a569a9d48..69bdc79c5f5 100644
--- a/spec/controllers/admin/clusters_controller_spec.rb
+++ b/spec/controllers/admin/clusters_controller_spec.rb
@@ -416,6 +416,7 @@ RSpec.describe Admin::ClustersController do
expect(cluster).to be_user
expect(cluster).to be_kubernetes
expect(cluster).to be_platform_kubernetes_rbac
+ expect(cluster).to be_namespace_per_environment
end
end
end
@@ -585,6 +586,7 @@ RSpec.describe Admin::ClustersController do
enabled: false,
name: 'my-new-cluster-name',
managed: false,
+ namespace_per_environment: false,
base_domain: domain
}
}
@@ -599,6 +601,7 @@ RSpec.describe Admin::ClustersController do
expect(cluster.enabled).to be_falsey
expect(cluster.name).to eq('my-new-cluster-name')
expect(cluster).not_to be_managed
+ expect(cluster).not_to be_namespace_per_environment
expect(cluster.domain).to eq('test-domain.com')
end
@@ -624,6 +627,7 @@ RSpec.describe Admin::ClustersController do
enabled: false,
name: 'my-new-cluster-name',
managed: false,
+ namespace_per_environment: false,
domain: domain
}
}
@@ -637,6 +641,7 @@ RSpec.describe Admin::ClustersController do
expect(cluster.enabled).to be_falsey
expect(cluster.name).to eq('my-new-cluster-name')
expect(cluster).not_to be_managed
+ expect(cluster).not_to be_namespace_per_environment
end
end
diff --git a/spec/controllers/admin/integrations_controller_spec.rb b/spec/controllers/admin/integrations_controller_spec.rb
index 4b1806a43d2..1a13d016b73 100644
--- a/spec/controllers/admin/integrations_controller_spec.rb
+++ b/spec/controllers/admin/integrations_controller_spec.rb
@@ -20,6 +20,18 @@ RSpec.describe Admin::IntegrationsController do
end
end
end
+
+ context 'when GitLab.com' do
+ before do
+ allow(::Gitlab).to receive(:com?) { true }
+ end
+
+ it 'returns 404' do
+ get :edit, params: { id: Service.available_services_names.sample }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
end
describe '#update' do
@@ -43,7 +55,7 @@ RSpec.describe Admin::IntegrationsController do
end
it 'calls to PropagateIntegrationWorker' do
- expect(PropagateIntegrationWorker).to have_received(:perform_async).with(integration.id, false)
+ expect(PropagateIntegrationWorker).to have_received(:perform_async).with(integration.id)
end
end
diff --git a/spec/controllers/admin/runners_controller_spec.rb b/spec/controllers/admin/runners_controller_spec.rb
index 013eee19409..3fffc50475c 100644
--- a/spec/controllers/admin/runners_controller_spec.rb
+++ b/spec/controllers/admin/runners_controller_spec.rb
@@ -151,4 +151,21 @@ RSpec.describe Admin::RunnersController do
expect(runner.active).to eq(false)
end
end
+
+ describe 'GET #runner_setup_scripts' do
+ it 'renders the setup scripts' do
+ get :runner_setup_scripts, params: { os: 'linux', arch: 'amd64' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to have_key("install")
+ expect(json_response).to have_key("register")
+ end
+
+ it 'renders errors if they occur' do
+ get :runner_setup_scripts, params: { os: 'foo', arch: 'bar' }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to have_key("errors")
+ end
+ end
end
diff --git a/spec/controllers/admin/sessions_controller_spec.rb b/spec/controllers/admin/sessions_controller_spec.rb
index 35982e57034..5fa7a7f278d 100644
--- a/spec/controllers/admin/sessions_controller_spec.rb
+++ b/spec/controllers/admin/sessions_controller_spec.rb
@@ -109,7 +109,7 @@ RSpec.describe Admin::SessionsController, :do_not_mock_admin_mode do
# triggering the auth form will request admin mode
get :new
- Timecop.freeze(Gitlab::Auth::CurrentUserMode::ADMIN_MODE_REQUESTED_GRACE_PERIOD.from_now) do
+ travel_to(Gitlab::Auth::CurrentUserMode::ADMIN_MODE_REQUESTED_GRACE_PERIOD.from_now) do
post :create, params: { user: { password: user.password } }
expect(response).to redirect_to(new_admin_session_path)
diff --git a/spec/controllers/admin/users_controller_spec.rb b/spec/controllers/admin/users_controller_spec.rb
index 6301da74f4a..0ee773f291c 100644
--- a/spec/controllers/admin/users_controller_spec.rb
+++ b/spec/controllers/admin/users_controller_spec.rb
@@ -23,6 +23,12 @@ RSpec.describe Admin::UsersController do
expect(assigns(:users)).to eq([admin])
end
+
+ it 'eager loads authorized projects association' do
+ get :index
+
+ expect(assigns(:users).first.association(:authorized_projects)).to be_loaded
+ end
end
describe 'GET :id' do
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index 188a4cb04af..94e110325f9 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -416,13 +416,13 @@ RSpec.describe ApplicationController do
end
it 'returns false if the grace period has expired' do
- Timecop.freeze(3.hours.from_now) do
+ travel_to(3.hours.from_now) do
expect(subject).to be_falsey
end
end
it 'returns true if the grace period is still active' do
- Timecop.freeze(1.hour.from_now) do
+ travel_to(1.hour.from_now) do
expect(subject).to be_truthy
end
end
diff --git a/spec/controllers/boards/lists_controller_spec.rb b/spec/controllers/boards/lists_controller_spec.rb
index c72d9e5053a..9b09f46d17e 100644
--- a/spec/controllers/boards/lists_controller_spec.rb
+++ b/spec/controllers/boards/lists_controller_spec.rb
@@ -260,6 +260,17 @@ RSpec.describe Boards::ListsController do
end
end
+ context 'with an error service response' do
+ it 'returns an unprocessable entity response' do
+ allow(Boards::Lists::DestroyService).to receive(:new)
+ .and_return(double(execute: ServiceResponse.error(message: 'error')))
+
+ remove_board_list user: user, board: board, list: planning
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
+ end
+
def remove_board_list(user:, board:, list:)
sign_in(user)
diff --git a/spec/controllers/concerns/controller_with_feature_category/config_spec.rb b/spec/controllers/concerns/controller_with_feature_category/config_spec.rb
deleted file mode 100644
index 9b8ffd2baab..00000000000
--- a/spec/controllers/concerns/controller_with_feature_category/config_spec.rb
+++ /dev/null
@@ -1,53 +0,0 @@
-# frozen_string_literal: true
-
-require "fast_spec_helper"
-require "rspec-parameterized"
-require_relative "../../../../app/controllers/concerns/controller_with_feature_category/config"
-
-RSpec.describe ControllerWithFeatureCategory::Config do
- describe "#matches?" do
- using RSpec::Parameterized::TableSyntax
-
- where(:only_actions, :except_actions, :if_proc, :unless_proc, :test_action, :expected) do
- nil | nil | nil | nil | "action" | true
- [:included] | nil | nil | nil | "action" | false
- [:included] | nil | nil | nil | "included" | true
- nil | [:excluded] | nil | nil | "excluded" | false
- nil | nil | true | nil | "action" | true
- [:included] | nil | true | nil | "action" | false
- [:included] | nil | true | nil | "included" | true
- nil | [:excluded] | true | nil | "excluded" | false
- nil | nil | false | nil | "action" | false
- [:included] | nil | false | nil | "action" | false
- [:included] | nil | false | nil | "included" | false
- nil | [:excluded] | false | nil | "excluded" | false
- nil | nil | nil | true | "action" | false
- [:included] | nil | nil | true | "action" | false
- [:included] | nil | nil | true | "included" | false
- nil | [:excluded] | nil | true | "excluded" | false
- nil | nil | nil | false | "action" | true
- [:included] | nil | nil | false | "action" | false
- [:included] | nil | nil | false | "included" | true
- nil | [:excluded] | nil | false | "excluded" | false
- nil | nil | true | false | "action" | true
- [:included] | nil | true | false | "action" | false
- [:included] | nil | true | false | "included" | true
- nil | [:excluded] | true | false | "excluded" | false
- nil | nil | false | true | "action" | false
- [:included] | nil | false | true | "action" | false
- [:included] | nil | false | true | "included" | false
- nil | [:excluded] | false | true | "excluded" | false
- end
-
- with_them do
- let(:config) do
- if_to_proc = if_proc.nil? ? nil : -> (_) { if_proc }
- unless_to_proc = unless_proc.nil? ? nil : -> (_) { unless_proc }
-
- described_class.new(:category, only_actions, except_actions, if_to_proc, unless_to_proc)
- end
-
- specify { expect(config.matches?(test_action)).to be(expected) }
- end
- end
-end
diff --git a/spec/controllers/concerns/controller_with_feature_category_spec.rb b/spec/controllers/concerns/controller_with_feature_category_spec.rb
index e603a7d14c4..55e84755f5c 100644
--- a/spec/controllers/concerns/controller_with_feature_category_spec.rb
+++ b/spec/controllers/concerns/controller_with_feature_category_spec.rb
@@ -2,7 +2,6 @@
require 'fast_spec_helper'
require_relative "../../../app/controllers/concerns/controller_with_feature_category"
-require_relative "../../../app/controllers/concerns/controller_with_feature_category/config"
RSpec.describe ControllerWithFeatureCategory do
describe ".feature_category_for_action" do
@@ -14,17 +13,15 @@ RSpec.describe ControllerWithFeatureCategory do
let(:controller) do
Class.new(base_controller) do
- feature_category :baz
- feature_category :foo, except: %w(update edit)
- feature_category :bar, only: %w(index show)
- feature_category :quux, only: %w(destroy)
- feature_category :quuz, only: %w(destroy)
+ feature_category :foo, %w(update edit)
+ feature_category :bar, %w(index show)
+ feature_category :quux, %w(destroy)
end
end
let(:subclass) do
Class.new(controller) do
- feature_category :qux, only: %w(index)
+ feature_category :baz, %w(subclass_index)
end
end
@@ -33,34 +30,31 @@ RSpec.describe ControllerWithFeatureCategory do
end
it "returns the expected category", :aggregate_failures do
- expect(controller.feature_category_for_action("update")).to eq(:baz)
- expect(controller.feature_category_for_action("hello")).to eq(:foo)
+ expect(controller.feature_category_for_action("update")).to eq(:foo)
expect(controller.feature_category_for_action("index")).to eq(:bar)
+ expect(controller.feature_category_for_action("destroy")).to eq(:quux)
end
- it "returns the closest match for categories defined in subclasses" do
- expect(subclass.feature_category_for_action("index")).to eq(:qux)
- expect(subclass.feature_category_for_action("show")).to eq(:bar)
+ it "returns the expected category for categories defined in subclasses" do
+ expect(subclass.feature_category_for_action("subclass_index")).to eq(:baz)
end
- it "returns the last defined feature category when multiple match" do
- expect(controller.feature_category_for_action("destroy")).to eq(:quuz)
- end
-
- it "raises an error when using including and excluding the same action" do
+ it "raises an error when defining for the controller and for individual actions" do
expect do
Class.new(base_controller) do
- feature_category :hello, only: [:world], except: [:world]
+ feature_category :hello
+ feature_category :goodbye, [:world]
end
- end.to raise_error(%r(cannot configure both `only` and `except`))
+ end.to raise_error(ArgumentError, "hello is defined for all actions, but other categories are set")
end
- it "raises an error when using unknown arguments" do
+ it "raises an error when multiple calls define the same action" do
expect do
Class.new(base_controller) do
- feature_category :hello, hello: :world
+ feature_category :hello, [:world]
+ feature_category :goodbye, ["world"]
end
- end.to raise_error(%r(unknown arguments))
+ end.to raise_error(ArgumentError, "Actions have multiple feature categories: world")
end
end
end
diff --git a/spec/controllers/concerns/redis_tracking_spec.rb b/spec/controllers/concerns/redis_tracking_spec.rb
index 3795fca5576..1b41e1820b1 100644
--- a/spec/controllers/concerns/redis_tracking_spec.rb
+++ b/spec/controllers/concerns/redis_tracking_spec.rb
@@ -36,21 +36,9 @@ RSpec.describe RedisTracking do
end
end
- context 'with usage ping disabled' do
- it 'does not track the event' do
- stub_feature_flags(feature => true)
- allow(Gitlab::CurrentSettings).to receive(:usage_ping_enabled?).and_return(false)
-
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
-
- get :index
- end
- end
-
- context 'with feature enabled and usage ping enabled' do
+ context 'with feature enabled' do
before do
stub_feature_flags(feature => true)
- allow(Gitlab::CurrentSettings).to receive(:usage_ping_enabled?).and_return(true)
end
context 'when user is logged in' do
diff --git a/spec/controllers/dashboard_controller_spec.rb b/spec/controllers/dashboard_controller_spec.rb
index c838affa239..9b78f841cce 100644
--- a/spec/controllers/dashboard_controller_spec.rb
+++ b/spec/controllers/dashboard_controller_spec.rb
@@ -15,6 +15,16 @@ RSpec.describe DashboardController do
describe 'GET issues' do
it_behaves_like 'issuables list meta-data', :issue, :issues
it_behaves_like 'issuables requiring filter', :issues
+
+ it 'lists only incidents and issues' do
+ issue = create(:incident, project: project, author: user)
+ incident = create(:incident, project: project, author: user)
+ create(:quality_test_case, project: project, author: user)
+
+ get :issues, params: { author_id: user.id }
+
+ expect(assigns(:issues)).to match_array([issue, incident])
+ end
end
describe 'GET merge requests' do
diff --git a/spec/controllers/every_controller_spec.rb b/spec/controllers/every_controller_spec.rb
index 4785ee9ed8f..d333c98ccf7 100644
--- a/spec/controllers/every_controller_spec.rb
+++ b/spec/controllers/every_controller_spec.rb
@@ -17,20 +17,27 @@ RSpec.describe "Every controller" do
.compact
.select { |route| route[:controller].present? && route[:action].present? }
.map { |route| [constantize_controller(route[:controller]), route[:action]] }
- .reject { |route| route.first.nil? || !route.first.include?(ControllerWithFeatureCategory) }
+ .select { |(controller, action)| controller&.include?(ControllerWithFeatureCategory) }
+ .reject { |(controller, action)| controller == Devise::UnlocksController }
end
let_it_be(:routes_without_category) do
controller_actions.map do |controller, action|
- "#{controller}##{action}" unless controller.feature_category_for_action(action)
+ next if controller.feature_category_for_action(action)
+ next unless controller.to_s.start_with?('B', 'C', 'D', 'E', 'F', 'Projects::MergeRequestsController')
+
+ "#{controller}##{action}"
end.compact
end
it "has feature categories" do
- pending("We'll work on defining categories for all controllers: "\
- "https://gitlab.com/gitlab-com/gl-infra/scalability/-/issues/463")
+ routes_without_category.map { |x| x.split('#') }.group_by(&:first).each do |controller, actions|
+ puts controller
+ puts actions.map { |x| ":#{x.last}" }.sort.join(', ')
+ puts ''
+ end
- expect(routes_without_category).to be_empty, "#{routes_without_category.first(10)} did not have a category"
+ expect(routes_without_category).to be_empty, "#{routes_without_category} did not have a category"
end
it "completed controllers don't get new routes without categories" do
@@ -74,9 +81,9 @@ RSpec.describe "Every controller" do
end
def actions_defined_in_feature_category_config(controller)
- feature_category_configs = controller.send(:class_attributes)[:feature_category_config]
- feature_category_configs.map do |config|
- Array(config.send(:only)) + Array(config.send(:except))
- end.flatten.uniq.map(&:to_s)
+ controller.send(:class_attributes)[:feature_category_config]
+ .values
+ .flatten
+ .map(&:to_s)
end
end
diff --git a/spec/controllers/groups/clusters_controller_spec.rb b/spec/controllers/groups/clusters_controller_spec.rb
index 81d5bc7770f..140b7b0f2a8 100644
--- a/spec/controllers/groups/clusters_controller_spec.rb
+++ b/spec/controllers/groups/clusters_controller_spec.rb
@@ -271,6 +271,7 @@ RSpec.describe Groups::ClustersController do
expect(cluster).to be_kubernetes
expect(cluster.provider_gcp).to be_legacy_abac
expect(cluster).to be_managed
+ expect(cluster).to be_namespace_per_environment
end
context 'when legacy_abac param is false' do
@@ -358,6 +359,7 @@ RSpec.describe Groups::ClustersController do
expect(cluster).to be_user
expect(cluster).to be_kubernetes
expect(cluster).to be_managed
+ expect(cluster).to be_namespace_per_environment
end
end
@@ -387,6 +389,7 @@ RSpec.describe Groups::ClustersController do
expect(cluster).to be_user
expect(cluster).to be_kubernetes
expect(cluster).to be_platform_kubernetes_rbac
+ expect(cluster).to be_namespace_per_environment
end
end
@@ -716,6 +719,7 @@ RSpec.describe Groups::ClustersController do
enabled: false,
name: 'my-new-cluster-name',
managed: false,
+ namespace_per_environment: false,
domain: domain
}
}
@@ -729,6 +733,7 @@ RSpec.describe Groups::ClustersController do
expect(cluster.enabled).to be_falsey
expect(cluster.name).to eq('my-new-cluster-name')
expect(cluster).not_to be_managed
+ expect(cluster).not_to be_namespace_per_environment
end
end
diff --git a/spec/controllers/groups/group_links_controller_spec.rb b/spec/controllers/groups/group_links_controller_spec.rb
index 07299382230..d179e268748 100644
--- a/spec/controllers/groups/group_links_controller_spec.rb
+++ b/spec/controllers/groups/group_links_controller_spec.rb
@@ -136,10 +136,15 @@ RSpec.describe Groups::GroupLinksController do
let(:expiry_date) { 1.month.from_now.to_date }
subject do
- post(:update, params: { group_id: shared_group,
- id: link.id,
- group_link: { group_access: Gitlab::Access::GUEST,
- expires_at: expiry_date } })
+ post(
+ :update,
+ params: {
+ group_id: shared_group,
+ id: link.id,
+ group_link: { group_access: Gitlab::Access::GUEST, expires_at: expiry_date }
+ },
+ format: :json
+ )
end
context 'when user has admin access to the shared group' do
@@ -160,6 +165,26 @@ RSpec.describe Groups::GroupLinksController do
expect(link.expires_at).to eq(expiry_date)
end
+ context 'when `expires_at` is set' do
+ it 'returns correct json response' do
+ travel_to Time.now.utc.beginning_of_day
+
+ subject
+
+ expect(json_response).to eq({ "expires_in" => "about 1 month", "expires_soon" => false })
+ end
+ end
+
+ context 'when `expires_at` is not set' do
+ let(:expiry_date) { nil }
+
+ it 'returns empty json response' do
+ subject
+
+ expect(json_response).to be_empty
+ end
+ end
+
it 'updates project permissions' do
expect { subject }.to change { group_member.can?(:create_release, project) }.from(true).to(false)
end
diff --git a/spec/controllers/groups/labels_controller_spec.rb b/spec/controllers/groups/labels_controller_spec.rb
index 20ee19b01d1..33041f1af9f 100644
--- a/spec/controllers/groups/labels_controller_spec.rb
+++ b/spec/controllers/groups/labels_controller_spec.rb
@@ -9,6 +9,8 @@ RSpec.describe Groups::LabelsController do
before do
group.add_owner(user)
+ # by default FFs are enabled in specs so we turn it off
+ stub_feature_flags(show_inherited_labels: false)
sign_in(user)
end
@@ -32,11 +34,41 @@ RSpec.describe Groups::LabelsController do
subgroup.add_owner(user)
end
- it 'returns ancestor group labels' do
- get :index, params: { group_id: subgroup, include_ancestor_groups: true, only_group_labels: true }, format: :json
+ RSpec.shared_examples 'returns ancestor group labels' do
+ it 'returns ancestor group labels' do
+ get :index, params: params, format: :json
- label_ids = json_response.map {|label| label['title']}
- expect(label_ids).to match_array([group_label_1.title, subgroup_label_1.title])
+ label_ids = json_response.map {|label| label['title']}
+ expect(label_ids).to match_array([group_label_1.title, subgroup_label_1.title])
+ end
+ end
+
+ context 'when include_ancestor_groups true' do
+ let(:params) { { group_id: subgroup, include_ancestor_groups: true, only_group_labels: true } }
+
+ it_behaves_like 'returns ancestor group labels'
+ end
+
+ context 'when include_ancestor_groups false' do
+ let(:params) { { group_id: subgroup, only_group_labels: true } }
+
+ it 'does not return ancestor group labels', :aggregate_failures do
+ get :index, params: params, format: :json
+
+ label_ids = json_response.map {|label| label['title']}
+ expect(label_ids).to match_array([subgroup_label_1.title])
+ expect(label_ids).not_to include([group_label_1.title])
+ end
+ end
+
+ context 'when show_inherited_labels enabled' do
+ let(:params) { { group_id: subgroup } }
+
+ before do
+ stub_feature_flags(show_inherited_labels: true)
+ end
+
+ it_behaves_like 'returns ancestor group labels'
end
end
@@ -56,4 +88,43 @@ RSpec.describe Groups::LabelsController do
expect(response).to have_gitlab_http_status(:ok)
end
end
+
+ describe 'DELETE #destroy' do
+ context 'when current user has ability to destroy the label' do
+ before do
+ sign_in(user)
+ end
+
+ it 'removes the label' do
+ label = create(:group_label, group: group)
+ delete :destroy, params: { group_id: group.to_param, id: label.to_param }
+
+ expect { label.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ context 'when label is succesfuly destroyed' do
+ it 'redirects to the group labels page' do
+ label = create(:group_label, group: group)
+ delete :destroy, params: { group_id: group.to_param, id: label.to_param }
+
+ expect(response).to redirect_to(group_labels_path)
+ end
+ end
+ end
+
+ context 'when current_user does not have ability to destroy the label' do
+ let(:another_user) { create(:user) }
+
+ before do
+ sign_in(another_user)
+ end
+
+ it 'responds with status 404' do
+ label = create(:group_label, group: group)
+ delete :destroy, params: { group_id: group.to_param, id: label.to_param }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
end
diff --git a/spec/controllers/groups/milestones_controller_spec.rb b/spec/controllers/groups/milestones_controller_spec.rb
index 5c7b88a218a..2c85fe482e2 100644
--- a/spec/controllers/groups/milestones_controller_spec.rb
+++ b/spec/controllers/groups/milestones_controller_spec.rb
@@ -9,7 +9,6 @@ RSpec.describe Groups::MilestonesController do
let(:user) { create(:user) }
let(:title) { '肯定不是中文的问题' }
let(:milestone) { create(:milestone, project: project) }
- let(:milestone_path) { group_milestone_path(group, milestone.safe_title, title: milestone.title) }
let(:milestone_params) do
{
@@ -25,6 +24,12 @@ RSpec.describe Groups::MilestonesController do
project.add_maintainer(user)
end
+ it_behaves_like 'milestone tabs' do
+ let(:milestone) { create(:milestone, group: group) }
+ let(:milestone_path) { group_milestone_path(group, milestone.iid) }
+ let(:request_params) { { group_id: group, id: milestone.iid } }
+ end
+
describe '#index' do
describe 'as HTML' do
render_views
diff --git a/spec/controllers/groups/registry/repositories_controller_spec.rb b/spec/controllers/groups/registry/repositories_controller_spec.rb
index ddac8fc5002..ae982b02a4f 100644
--- a/spec/controllers/groups/registry/repositories_controller_spec.rb
+++ b/spec/controllers/groups/registry/repositories_controller_spec.rb
@@ -87,7 +87,7 @@ RSpec.describe Groups::Registry::RepositoriesController do
it_behaves_like 'with name parameter'
- it_behaves_like 'a gitlab tracking event', described_class.name, 'list_repositories'
+ it_behaves_like 'a package tracking event', described_class.name, 'list_repositories'
context 'with project in subgroup' do
let_it_be(:test_group) { create(:group, parent: group ) }
diff --git a/spec/controllers/groups/settings/ci_cd_controller_spec.rb b/spec/controllers/groups/settings/ci_cd_controller_spec.rb
index f11bb66caab..880d5fe8951 100644
--- a/spec/controllers/groups/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/groups/settings/ci_cd_controller_spec.rb
@@ -225,4 +225,25 @@ RSpec.describe Groups::Settings::CiCdController do
end
end
end
+
+ describe 'GET #runner_setup_scripts' do
+ before do
+ group.add_owner(user)
+ end
+
+ it 'renders the setup scripts' do
+ get :runner_setup_scripts, params: { os: 'linux', arch: 'amd64', group_id: group }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to have_key("install")
+ expect(json_response).to have_key("register")
+ end
+
+ it 'renders errors if they occur' do
+ get :runner_setup_scripts, params: { os: 'foo', arch: 'bar', group_id: group }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to have_key("errors")
+ end
+ end
end
diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb
index 35d8c0b7c6d..90f348a3421 100644
--- a/spec/controllers/groups_controller_spec.rb
+++ b/spec/controllers/groups_controller_spec.rb
@@ -2,18 +2,18 @@
require 'spec_helper'
-RSpec.describe GroupsController do
+RSpec.describe GroupsController, factory_default: :keep do
include ExternalAuthorizationServiceHelpers
- let(:user) { create(:user) }
- let(:admin) { create(:admin) }
- let(:group) { create(:group, :public) }
- let(:project) { create(:project, namespace: group) }
- let!(:group_member) { create(:group_member, group: group, user: user) }
- let!(:owner) { group.add_owner(create(:user)).user }
- let!(:maintainer) { group.add_maintainer(create(:user)).user }
- let!(:developer) { group.add_developer(create(:user)).user }
- let!(:guest) { group.add_guest(create(:user)).user }
+ let_it_be_with_refind(:group) { create_default(:group, :public) }
+ let_it_be_with_refind(:project) { create(:project, namespace: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:group_member) { create(:group_member, group: group, user: user) }
+ let_it_be(:owner) { group.add_owner(create(:user)).user }
+ let_it_be(:maintainer) { group.add_maintainer(create(:user)).user }
+ let_it_be(:developer) { group.add_developer(create(:user)).user }
+ let_it_be(:guest) { group.add_guest(create(:user)).user }
shared_examples 'member with ability to create subgroups' do
it 'renders the new page' do
@@ -57,7 +57,6 @@ RSpec.describe GroupsController do
describe 'GET #show' do
before do
sign_in(user)
- project
end
let(:format) { :html }
@@ -82,7 +81,6 @@ RSpec.describe GroupsController do
describe 'GET #details' do
before do
sign_in(user)
- project
end
let(:format) { :html }
@@ -131,12 +129,9 @@ RSpec.describe GroupsController do
end
describe 'GET #activity' do
- render_views
-
context 'as json' do
before do
sign_in(user)
- project
end
it 'includes events from all projects in group and subgroups', :sidekiq_might_not_need_inline do
@@ -157,10 +152,6 @@ RSpec.describe GroupsController do
end
context 'when user has no permission to see the event' do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:project) { create(:project, group: group) }
-
let(:project_with_restricted_access) do
create(:project, :public, issues_access_level: ProjectFeature::PRIVATE, group: group)
end
@@ -398,8 +389,8 @@ RSpec.describe GroupsController do
end
describe 'GET #issues', :sidekiq_might_not_need_inline do
- let(:issue_1) { create(:issue, project: project, title: 'foo') }
- let(:issue_2) { create(:issue, project: project, title: 'bar') }
+ let_it_be(:issue_1) { create(:issue, project: project, title: 'foo') }
+ let_it_be(:issue_2) { create(:issue, project: project, title: 'bar') }
before do
create_list(:award_emoji, 3, awardable: issue_2)
@@ -409,6 +400,15 @@ RSpec.describe GroupsController do
sign_in(user)
end
+ it 'lists only incidents and issues' do
+ incident = create(:incident, project: project)
+ create(:quality_test_case, project: project)
+
+ get :issues, params: { id: group.to_param }
+
+ expect(assigns(:issues)).to match_array([issue_1, issue_2, incident])
+ end
+
context 'sorting by votes' do
it 'sorts most popular issues' do
get :issues, params: { id: group.to_param, sort: 'upvotes_desc' }
@@ -552,8 +552,6 @@ RSpec.describe GroupsController do
end
context 'when there is a conflicting group path' do
- render_views
-
let!(:conflict_group) { create(:group, path: SecureRandom.hex(12) ) }
let!(:old_name) { group.name }
@@ -794,6 +792,7 @@ RSpec.describe GroupsController do
context 'when transferring to a subgroup goes right' do
let(:new_parent_group) { create(:group, :public) }
+ let(:group) { create(:group, :public) }
let!(:group_member) { create(:group_member, :owner, group: group, user: user) }
let!(:new_parent_group_member) { create(:group_member, :owner, group: new_parent_group, user: user) }
@@ -805,11 +804,8 @@ RSpec.describe GroupsController do
}
end
- it 'returns a notice' do
+ it 'returns a notice and redirects to the new path' do
expect(flash[:notice]).to eq("Group '#{group.name}' was successfully transferred.")
- end
-
- it 'redirects to the new path' do
expect(response).to redirect_to("/#{new_parent_group.path}/#{group.path}")
end
end
@@ -826,17 +822,15 @@ RSpec.describe GroupsController do
}
end
- it 'returns a notice' do
+ it 'returns a notice and redirects to the new path' do
expect(flash[:notice]).to eq("Group '#{group.name}' was successfully transferred.")
- end
-
- it 'redirects to the new path' do
expect(response).to redirect_to("/#{group.path}")
end
end
context 'When the transfer goes wrong' do
let(:new_parent_group) { create(:group, :public) }
+ let(:group) { create(:group, :public) }
let!(:group_member) { create(:group_member, :owner, group: group, user: user) }
let!(:new_parent_group_member) { create(:group_member, :owner, group: new_parent_group, user: user) }
@@ -850,17 +844,15 @@ RSpec.describe GroupsController do
}
end
- it 'returns an alert' do
+ it 'returns an alert and redirects to the current path' do
expect(flash[:alert]).to eq "Transfer failed: namespace directory cannot be moved"
- end
-
- it 'redirects to the current path' do
expect(response).to redirect_to(edit_group_path(group))
end
end
context 'when the user is not allowed to transfer the group' do
let(:new_parent_group) { create(:group, :public) }
+ let(:group) { create(:group, :public) }
let!(:group_member) { create(:group_member, :guest, group: group, user: user) }
let!(:new_parent_group_member) { create(:group_member, :guest, group: new_parent_group, user: user) }
@@ -879,6 +871,7 @@ RSpec.describe GroupsController do
context 'transferring when a project has container images' do
let(:group) { create(:group, :public, :nested) }
+ let(:project) { create(:project, namespace: group) }
let!(:group_member) { create(:group_member, :owner, group: group, user: user) }
before do
@@ -979,6 +972,8 @@ RSpec.describe GroupsController do
end
context 'when there is no file available to download' do
+ let(:admin) { create(:admin) }
+
before do
sign_in(admin)
end
@@ -1149,9 +1144,7 @@ RSpec.describe GroupsController do
describe "GET #activity as JSON" do
include DesignManagementTestHelpers
- render_views
- let(:project) { create(:project, :public, group: group) }
let(:other_project) { create(:project, :public, group: group) }
def get_activity
diff --git a/spec/controllers/import/bulk_imports_controller_spec.rb b/spec/controllers/import/bulk_imports_controller_spec.rb
new file mode 100644
index 00000000000..9fe15162158
--- /dev/null
+++ b/spec/controllers/import/bulk_imports_controller_spec.rb
@@ -0,0 +1,137 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Import::BulkImportsController do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ context 'when user is signed in' do
+ context 'when bulk_import feature flag is enabled' do
+ before do
+ stub_feature_flags(bulk_import: true)
+ end
+
+ describe 'POST configure' do
+ context 'when no params are passed in' do
+ it 'clears out existing session' do
+ post :configure
+
+ expect(session[:bulk_import_gitlab_access_token]).to be_nil
+ expect(session[:bulk_import_gitlab_url]).to be_nil
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(status_import_bulk_import_url)
+ end
+ end
+
+ it 'sets the session variables' do
+ token = 'token'
+ url = 'https://gitlab.example'
+
+ post :configure, params: { bulk_import_gitlab_access_token: token, bulk_import_gitlab_url: url }
+
+ expect(session[:bulk_import_gitlab_access_token]).to eq(token)
+ expect(session[:bulk_import_gitlab_url]).to eq(url)
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(status_import_bulk_import_url)
+ end
+
+ it 'strips access token with spaces' do
+ token = 'token'
+
+ post :configure, params: { bulk_import_gitlab_access_token: " #{token} " }
+
+ expect(session[:bulk_import_gitlab_access_token]).to eq(token)
+ expect(controller).to redirect_to(status_import_bulk_import_url)
+ end
+ end
+
+ describe 'GET status' do
+ context 'when host url is local or not http' do
+ %w[https://localhost:3000 http://192.168.0.1 ftp://testing].each do |url|
+ before do
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
+
+ session[:bulk_import_gitlab_access_token] = 'test'
+ session[:bulk_import_gitlab_url] = url
+ end
+
+ it 'denies network request' do
+ get :status
+
+ expect(controller).to redirect_to(new_group_path)
+ expect(flash[:alert]).to eq('Specified URL cannot be used: "Only allowed schemes are http, https"')
+ end
+ end
+
+ context 'when local requests are allowed' do
+ %w[https://localhost:3000 http://192.168.0.1].each do |url|
+ before do
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: true)
+
+ session[:bulk_import_gitlab_access_token] = 'test'
+ session[:bulk_import_gitlab_url] = url
+ end
+
+ it 'allows network request' do
+ get :status
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+ end
+ end
+ end
+
+ context 'when gitlab_api_imports feature flag is disabled' do
+ before do
+ stub_feature_flags(bulk_import: false)
+ end
+
+ context 'POST configure' do
+ it 'returns 404' do
+ post :configure
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'GET status' do
+ it 'returns 404' do
+ get :status
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
+
+ context 'when user is signed out' do
+ before do
+ sign_out(user)
+ end
+
+ context 'POST configure' do
+ it 'redirects to sign in page' do
+ post :configure
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+
+ context 'GET status' do
+ it 'redirects to sign in page' do
+ get :status
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/import/manifest_controller_spec.rb b/spec/controllers/import/manifest_controller_spec.rb
index ec8bd45b65c..6b21b45e698 100644
--- a/spec/controllers/import/manifest_controller_spec.rb
+++ b/spec/controllers/import/manifest_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Import::ManifestController do
+RSpec.describe Import::ManifestController, :clean_gitlab_redis_shared_state do
include ImportSpecHelper
let_it_be(:user) { create(:user) }
@@ -16,42 +16,93 @@ RSpec.describe Import::ManifestController do
sign_in(user)
end
- def assign_session_group
- session[:manifest_import_repositories] = []
- session[:manifest_import_group_id] = group.id
+ describe 'POST upload' do
+ context 'with a valid manifest' do
+ it 'saves the manifest and redirects to the status page', :aggregate_failures do
+ post :upload, params: {
+ group_id: group.id,
+ manifest: fixture_file_upload('spec/fixtures/aosp_manifest.xml')
+ }
+
+ metadata = Gitlab::ManifestImport::Metadata.new(user)
+
+ expect(metadata.group_id).to eq(group.id)
+ expect(metadata.repositories.size).to eq(660)
+ expect(metadata.repositories.first).to include(name: 'platform/build', path: 'build/make')
+
+ expect(response).to redirect_to(status_import_manifest_path)
+ end
+ end
+
+ context 'with an invalid manifest' do
+ it 'displays an error' do
+ post :upload, params: {
+ group_id: group.id,
+ manifest: fixture_file_upload('spec/fixtures/invalid_manifest.xml')
+ }
+
+ expect(assigns(:errors)).to be_present
+ end
+ end
+
+ context 'when the user cannot create projects in the group' do
+ it 'displays an error' do
+ sign_in(create(:user))
+
+ post :upload, params: {
+ group_id: group.id,
+ manifest: fixture_file_upload('spec/fixtures/aosp_manifest.xml')
+ }
+
+ expect(assigns(:errors)).to be_present
+ end
+ end
end
describe 'GET status' do
- let(:repo1) { OpenStruct.new(id: 'test1', url: 'http://demo.host/test1') }
- let(:repo2) { OpenStruct.new(id: 'test2', url: 'http://demo.host/test2') }
+ let(:repo1) { { id: 'test1', url: 'http://demo.host/test1' } }
+ let(:repo2) { { id: 'test2', url: 'http://demo.host/test2' } }
let(:repos) { [repo1, repo2] }
- before do
- assign_session_group
+ shared_examples 'status action' do
+ it "returns variables for json request" do
+ project = create(:project, import_type: 'manifest', creator_id: user.id)
- session[:manifest_import_repositories] = repos
- end
+ get :status, format: :json
- it "returns variables for json request" do
- project = create(:project, import_type: 'manifest', creator_id: user.id)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
+ expect(json_response.dig("provider_repos", 0, "id")).to eq(repo1[:id])
+ expect(json_response.dig("provider_repos", 1, "id")).to eq(repo2[:id])
+ expect(json_response.dig("namespaces", 0, "id")).to eq(group.id)
+ end
- get :status, format: :json
+ it "does not show already added project" do
+ project = create(:project, import_type: 'manifest', namespace: user.namespace, import_status: :finished, import_url: repo1[:url])
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
- expect(json_response.dig("provider_repos", 0, "id")).to eq(repo1.id)
- expect(json_response.dig("provider_repos", 1, "id")).to eq(repo2.id)
- expect(json_response.dig("namespaces", 0, "id")).to eq(group.id)
+ get :status, format: :json
+
+ expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
+ expect(json_response.dig("provider_repos").length).to eq(1)
+ expect(json_response.dig("provider_repos", 0, "id")).not_to eq(repo1[:id])
+ end
end
- it "does not show already added project" do
- project = create(:project, import_type: 'manifest', namespace: user.namespace, import_status: :finished, import_url: repo1.url)
+ context 'when the data is stored via Gitlab::ManifestImport::Metadata' do
+ before do
+ Gitlab::ManifestImport::Metadata.new(user).save(repos, group.id)
+ end
+
+ include_examples 'status action'
+ end
- get :status, format: :json
+ context 'when the data is stored in the user session' do
+ before do
+ session[:manifest_import_repositories] = repos
+ session[:manifest_import_group_id] = group.id
+ end
- expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
- expect(json_response.dig("provider_repos").length).to eq(1)
- expect(json_response.dig("provider_repos", 0, "id")).not_to eq(repo1.id)
+ include_examples 'status action'
end
end
end
diff --git a/spec/controllers/invites_controller_spec.rb b/spec/controllers/invites_controller_spec.rb
index a083cfac981..75a972d2f95 100644
--- a/spec/controllers/invites_controller_spec.rb
+++ b/spec/controllers/invites_controller_spec.rb
@@ -17,8 +17,53 @@ RSpec.describe InvitesController, :snowplow do
}
end
- before do
- controller.instance_variable_set(:@member, member)
+ shared_examples 'invalid token' do
+ context 'when invite token is not valid' do
+ let(:params) { { id: '_bogus_token_' } }
+
+ it 'renders the 404 page' do
+ request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ shared_examples "tracks the 'accepted' event for the invitation reminders experiment" do
+ before do
+ stub_experiment(invitation_reminders: true)
+ allow(Gitlab::Experimentation).to receive(:enabled_for_attribute?).with(:invitation_reminders, member.invite_email).and_return(experimental_group)
+ end
+
+ context 'when in the control group' do
+ let(:experimental_group) { false }
+
+ it "tracks the 'accepted' event" do
+ request
+
+ expect_snowplow_event(
+ category: 'Growth::Acquisition::Experiment::InvitationReminders',
+ label: md5_member_global_id,
+ property: 'control_group',
+ action: 'accepted'
+ )
+ end
+ end
+
+ context 'when in the experimental group' do
+ let(:experimental_group) { true }
+
+ it "tracks the 'accepted' event" do
+ request
+
+ expect_snowplow_event(
+ category: 'Growth::Acquisition::Experiment::InvitationReminders',
+ label: md5_member_global_id,
+ property: 'experimental_group',
+ action: 'accepted'
+ )
+ end
+ end
end
describe 'GET #show' do
@@ -39,7 +84,7 @@ RSpec.describe InvitesController, :snowplow do
end
it 'forces re-confirmation if email does not match signed in user' do
- member.invite_email = 'bogus@email.com'
+ member.update!(invite_email: 'bogus@email.com')
expect do
request
@@ -64,8 +109,8 @@ RSpec.describe InvitesController, :snowplow do
it 'tracks the user as experiment group' do
request
- expect_snowplow_event(snowplow_event.merge(action: 'opened'))
- expect_snowplow_event(snowplow_event.merge(action: 'accepted'))
+ expect_snowplow_event(**snowplow_event.merge(action: 'opened'))
+ expect_snowplow_event(**snowplow_event.merge(action: 'accepted'))
end
end
@@ -76,10 +121,13 @@ RSpec.describe InvitesController, :snowplow do
it 'tracks the user as control group' do
request
- expect_snowplow_event(snowplow_event.merge(action: 'opened'))
- expect_snowplow_event(snowplow_event.merge(action: 'accepted'))
+ expect_snowplow_event(**snowplow_event.merge(action: 'opened'))
+ expect_snowplow_event(**snowplow_event.merge(action: 'accepted'))
end
end
+
+ it_behaves_like "tracks the 'accepted' event for the invitation reminders experiment"
+ it_behaves_like 'invalid token'
end
context 'when not logged in' do
@@ -125,7 +173,7 @@ RSpec.describe InvitesController, :snowplow do
it 'tracks the user as experiment group' do
request
- expect_snowplow_event(snowplow_event.merge(action: 'accepted'))
+ expect_snowplow_event(**snowplow_event.merge(action: 'accepted'))
end
end
@@ -136,8 +184,31 @@ RSpec.describe InvitesController, :snowplow do
it 'tracks the user as control group' do
request
- expect_snowplow_event(snowplow_event.merge(action: 'accepted'))
+ expect_snowplow_event(**snowplow_event.merge(action: 'accepted'))
end
end
+
+ it_behaves_like "tracks the 'accepted' event for the invitation reminders experiment"
+ it_behaves_like 'invalid token'
+ end
+
+ describe 'POST #decline for link in UI' do
+ before do
+ sign_in(user)
+ end
+
+ subject(:request) { post :decline, params: params }
+
+ it_behaves_like 'invalid token'
+ end
+
+ describe 'GET #decline for link in email' do
+ before do
+ sign_in(user)
+ end
+
+ subject(:request) { get :decline, params: params }
+
+ it_behaves_like 'invalid token'
end
end
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
index 51a451570c5..52cd6869b04 100644
--- a/spec/controllers/projects/clusters_controller_spec.rb
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -251,6 +251,7 @@ RSpec.describe Projects::ClustersController do
cluster: {
name: 'new-cluster',
managed: '1',
+ namespace_per_environment: '0',
provider_gcp_attributes: {
gcp_project_id: 'gcp-project-12345',
legacy_abac: legacy_abac_param
@@ -278,6 +279,7 @@ RSpec.describe Projects::ClustersController do
expect(project.clusters.first).to be_kubernetes
expect(project.clusters.first.provider_gcp).to be_legacy_abac
expect(project.clusters.first.managed?).to be_truthy
+ expect(project.clusters.first.namespace_per_environment?).to be_falsy
end
context 'when legacy_abac param is false' do
@@ -369,6 +371,7 @@ RSpec.describe Projects::ClustersController do
expect(project.clusters.first).to be_user
expect(project.clusters.first).to be_kubernetes
+ expect(project.clusters.first).to be_namespace_per_environment
end
end
@@ -400,6 +403,7 @@ RSpec.describe Projects::ClustersController do
expect(cluster).to be_user
expect(cluster).to be_kubernetes
expect(cluster).to be_platform_kubernetes_rbac
+ expect(cluster).to be_namespace_per_environment
end
end
@@ -726,6 +730,7 @@ RSpec.describe Projects::ClustersController do
enabled: false,
name: 'my-new-cluster-name',
managed: false,
+ namespace_per_environment: false,
platform_kubernetes_attributes: {
namespace: 'my-namespace'
}
@@ -742,6 +747,7 @@ RSpec.describe Projects::ClustersController do
expect(cluster.enabled).to be_falsey
expect(cluster.name).to eq('my-new-cluster-name')
expect(cluster).not_to be_managed
+ expect(cluster).not_to be_namespace_per_environment
expect(cluster.platform_kubernetes.namespace).to eq('my-namespace')
end
diff --git a/spec/controllers/projects/group_links_controller_spec.rb b/spec/controllers/projects/group_links_controller_spec.rb
index 762ef795f6e..3baadde46dc 100644
--- a/spec/controllers/projects/group_links_controller_spec.rb
+++ b/spec/controllers/projects/group_links_controller_spec.rb
@@ -3,10 +3,10 @@
require 'spec_helper'
RSpec.describe Projects::GroupLinksController do
- let(:group) { create(:group, :private) }
- let(:group2) { create(:group, :private) }
- let(:project) { create(:project, :private, group: group2) }
- let(:user) { create(:user) }
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:group2) { create(:group, :private) }
+ let_it_be(:project) { create(:project, :private, group: group2) }
+ let_it_be(:user) { create(:user) }
before do
project.add_maintainer(user)
@@ -142,4 +142,47 @@ RSpec.describe Projects::GroupLinksController do
end
end
end
+
+ describe '#update' do
+ let_it_be(:link) do
+ create(
+ :project_group_link,
+ {
+ project: project,
+ group: group
+ }
+ )
+ end
+
+ let(:expiry_date) { 1.month.from_now.to_date }
+
+ before do
+ travel_to Time.now.utc.beginning_of_day
+
+ put(
+ :update,
+ params: {
+ namespace_id: project.namespace.to_param,
+ project_id: project.to_param,
+ id: link.id,
+ group_link: { group_access: Gitlab::Access::GUEST, expires_at: expiry_date }
+ },
+ format: :json
+ )
+ end
+
+ context 'when `expires_at` is set' do
+ it 'returns correct json response' do
+ expect(json_response).to eq({ "expires_in" => "about 1 month", "expires_soon" => false })
+ end
+ end
+
+ context 'when `expires_at` is not set' do
+ let(:expiry_date) { nil }
+
+ it 'returns empty json response' do
+ expect(json_response).to be_empty
+ end
+ end
+ end
end
diff --git a/spec/controllers/projects/import/jira_controller_spec.rb b/spec/controllers/projects/import/jira_controller_spec.rb
index b82735a56b3..37a7fce0c23 100644
--- a/spec/controllers/projects/import/jira_controller_spec.rb
+++ b/spec/controllers/projects/import/jira_controller_spec.rb
@@ -12,7 +12,6 @@ RSpec.describe Projects::Import::JiraController do
def ensure_correct_config
sign_in(user)
project.add_maintainer(user)
- stub_feature_flags(jira_issue_import: true)
stub_jira_service_test
end
@@ -77,7 +76,6 @@ RSpec.describe Projects::Import::JiraController do
before do
sign_in(user)
project.add_maintainer(user)
- stub_feature_flags(jira_issue_import: true)
end
context 'when Jira service is not enabled for the project' do
diff --git a/spec/controllers/projects/incidents_controller_spec.rb b/spec/controllers/projects/incidents_controller_spec.rb
index 2baae0661cb..1b47f9f6abf 100644
--- a/spec/controllers/projects/incidents_controller_spec.rb
+++ b/spec/controllers/projects/incidents_controller_spec.rb
@@ -3,44 +3,127 @@
require 'spec_helper'
RSpec.describe Projects::IncidentsController do
- let_it_be(:project) { create(:project) }
+ let_it_be_with_refind(:project) { create(:project) }
let_it_be(:developer) { create(:user) }
let_it_be(:guest) { create(:user) }
+ let_it_be(:anonymous) { nil }
before_all do
- project.add_developer(developer)
project.add_guest(guest)
+ project.add_developer(developer)
+ end
+
+ before do
+ sign_in(user) if user
+ end
+
+ subject { make_request }
+
+ shared_examples 'not found' do
+ include_examples 'returning response status', :not_found
+ end
+
+ shared_examples 'login required' do
+ it 'redirects to the login page' do
+ subject
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
end
describe 'GET #index' do
def make_request
- get :index, params: { namespace_id: project.namespace, project_id: project }
+ get :index, params: project_params
end
- it 'shows the page for user with developer role' do
- sign_in(developer)
- make_request
+ let(:user) { developer }
+
+ it 'shows the page' do
+ subject
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:index)
end
context 'when user is unauthorized' do
- it 'redirects to the login page' do
- sign_out(developer)
- make_request
+ let(:user) { anonymous }
+
+ it_behaves_like 'login required'
+ end
+
+ context 'when user is a guest' do
+ let(:user) { guest }
+
+ it 'shows the page' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:index)
+ end
+ end
+ end
+
+ describe 'GET #show' do
+ def make_request
+ get :show, params: project_params(id: resource)
+ end
+
+ let_it_be(:resource) { create(:incident, project: project) }
+ let(:user) { developer }
+
+ it 'renders incident page' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:show)
- expect(response).to redirect_to(new_user_session_path)
+ expect(assigns(:incident)).to be_present
+ expect(assigns(:incident).author.association(:status)).to be_loaded
+ expect(assigns(:issue)).to be_present
+ expect(assigns(:noteable)).to eq(assigns(:incident))
+ end
+
+ context 'with feature flag disabled' do
+ before do
+ stub_feature_flags(issues_incident_details: false)
end
+
+ it_behaves_like 'not found'
+ end
+
+ context 'with non existing id' do
+ let(:resource) { non_existing_record_id }
+
+ it_behaves_like 'not found'
+ end
+
+ context 'for issue' do
+ let_it_be(:resource) { create(:issue, project: project) }
+
+ it_behaves_like 'not found'
end
context 'when user is a guest' do
- it 'shows 404' do
- sign_in(guest)
- make_request
+ let(:user) { guest }
+
+ it 'shows the page' do
+ subject
- expect(response).to have_gitlab_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:show)
end
end
+
+ context 'when unauthorized' do
+ let(:user) { anonymous }
+
+ it_behaves_like 'login required'
+ end
+ end
+
+ private
+
+ def project_params(opts = {})
+ opts.reverse_merge(namespace_id: project.namespace, project_id: project)
end
end
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index 94cce1964ca..d7c22d46e90 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -121,13 +121,6 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:build).id).to eq(job.id)
end
-
- it 'has the correct build collection' do
- builds = assigns(:builds).map(&:id)
-
- expect(builds).to include(job.id, second_job.id)
- expect(builds).not_to include(third_job.id)
- end
end
context 'when job does not exist' do
diff --git a/spec/controllers/projects/labels_controller_spec.rb b/spec/controllers/projects/labels_controller_spec.rb
index f213d104747..8a3c55033cb 100644
--- a/spec/controllers/projects/labels_controller_spec.rb
+++ b/spec/controllers/projects/labels_controller_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Projects::LabelsController do
- let(:group) { create(:group) }
- let(:project) { create(:project, namespace: group) }
- let(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project, reload: true) { create(:project, namespace: group) }
+ let_it_be(:user) { create(:user) }
before do
project.add_maintainer(user)
@@ -14,16 +14,21 @@ RSpec.describe Projects::LabelsController do
end
describe 'GET #index' do
- let!(:label_1) { create(:label, project: project, priority: 1, title: 'Label 1') }
- let!(:label_2) { create(:label, project: project, priority: 3, title: 'Label 2') }
- let!(:label_3) { create(:label, project: project, priority: 1, title: 'Label 3') }
- let!(:label_4) { create(:label, project: project, title: 'Label 4') }
- let!(:label_5) { create(:label, project: project, title: 'Label 5') }
-
- let!(:group_label_1) { create(:group_label, group: group, title: 'Group Label 1') }
- let!(:group_label_2) { create(:group_label, group: group, title: 'Group Label 2') }
- let!(:group_label_3) { create(:group_label, group: group, title: 'Group Label 3') }
- let!(:group_label_4) { create(:group_label, group: group, title: 'Group Label 4') }
+ let_it_be(:label_1) { create(:label, project: project, priority: 1, title: 'Label 1') }
+ let_it_be(:label_2) { create(:label, project: project, priority: 3, title: 'Label 2') }
+ let_it_be(:label_3) { create(:label, project: project, priority: 1, title: 'Label 3') }
+ let_it_be(:label_4) { create(:label, project: project, title: 'Label 4') }
+ let_it_be(:label_5) { create(:label, project: project, title: 'Label 5') }
+
+ let_it_be(:group_label_1) { create(:group_label, group: group, title: 'Group Label 1') }
+ let_it_be(:group_label_2) { create(:group_label, group: group, title: 'Group Label 2') }
+ let_it_be(:group_label_3) { create(:group_label, group: group, title: 'Group Label 3') }
+ let_it_be(:group_label_4) { create(:group_label, group: group, title: 'Group Label 4') }
+
+ let_it_be(:group_labels) { [group_label_3, group_label_4]}
+ let_it_be(:project_labels) { [label_4, label_5]}
+ let_it_be(:group_priority_labels) { [group_label_1, group_label_2]}
+ let_it_be(:project_priority_labels) { [label_1, label_2, label_3]}
before do
create(:label_priority, project: project, label: group_label_1, priority: 3)
@@ -68,6 +73,60 @@ RSpec.describe Projects::LabelsController do
end
end
+ context 'with subgroups' do
+ let_it_be(:subgroup) { create(:group, parent: group) }
+ let_it_be(:subgroup_label_1) { create(:group_label, group: subgroup, title: 'subgroup_label_1') }
+ let_it_be(:subgroup_label_2) { create(:group_label, group: subgroup, title: 'subgroup_label_2') }
+
+ before do
+ project.update!(namespace: subgroup)
+ subgroup.add_owner(user)
+ create(:label_priority, project: project, label: subgroup_label_2, priority: 1)
+ end
+
+ RSpec.shared_examples 'returns ancestor group labels' do
+ it 'returns ancestor group labels', :aggregate_failures do
+ get :index, params: params
+
+ expect(assigns(:labels)).to match_array([subgroup_label_1] + group_labels + project_labels)
+ expect(assigns(:prioritized_labels)).to match_array([subgroup_label_2] + group_priority_labels + project_priority_labels)
+ end
+ end
+
+ context 'when show_inherited_labels disabled' do
+ before do
+ stub_feature_flags(show_inherited_labels: false)
+ end
+
+ context 'when include_ancestor_groups false' do
+ let(:params) { { namespace_id: project.namespace.to_param, project_id: project } }
+
+ it 'does not return ancestor group labels', :aggregate_failures do
+ get :index, params: params
+
+ expect(assigns(:labels)).to match_array([subgroup_label_1] + project_labels)
+ expect(assigns(:prioritized_labels)).to match_array([subgroup_label_2] + project_priority_labels)
+ end
+ end
+
+ context 'when include_ancestor_groups true' do
+ let(:params) { { namespace_id: project.namespace.to_param, project_id: project, include_ancestor_groups: true } }
+
+ it_behaves_like 'returns ancestor group labels'
+ end
+ end
+
+ context 'when show_inherited_labels enabled' do
+ let(:params) { { namespace_id: project.namespace.to_param, project_id: project } }
+
+ before do
+ stub_feature_flags(show_inherited_labels: true)
+ end
+
+ it_behaves_like 'returns ancestor group labels'
+ end
+ end
+
def list_labels
get :index, params: { namespace_id: project.namespace.to_param, project_id: project }
end
@@ -75,7 +134,7 @@ RSpec.describe Projects::LabelsController do
describe 'POST #generate' do
context 'personal project' do
- let(:personal_project) { create(:project, namespace: user.namespace) }
+ let_it_be(:personal_project) { create(:project, namespace: user.namespace) }
it 'creates labels' do
post :generate, params: { namespace_id: personal_project.namespace.to_param, project_id: personal_project }
@@ -116,8 +175,8 @@ RSpec.describe Projects::LabelsController do
end
describe 'POST #promote' do
- let!(:promoted_label_name) { "Promoted Label" }
- let!(:label_1) { create(:label, title: promoted_label_name, project: project) }
+ let_it_be(:promoted_label_name) { "Promoted Label" }
+ let_it_be(:label_1) { create(:label, title: promoted_label_name, project: project) }
context 'not group reporters' do
it 'denies access' do
@@ -196,7 +255,7 @@ RSpec.describe Projects::LabelsController do
end
context 'when requesting a redirected path' do
- let!(:redirect_route) { project.redirect_routes.create(path: project.full_path + 'old') }
+ let_it_be(:redirect_route) { project.redirect_routes.create(path: project.full_path + 'old') }
it 'redirects to the canonical path' do
get :index, params: { namespace_id: project.namespace, project_id: project.to_param + 'old' }
@@ -242,7 +301,7 @@ RSpec.describe Projects::LabelsController do
end
context 'when requesting a redirected path' do
- let!(:redirect_route) { project.redirect_routes.create(path: project.full_path + 'old') }
+ let_it_be(:redirect_route) { project.redirect_routes.create(path: project.full_path + 'old') }
it 'returns not found' do
post :generate, params: { namespace_id: project.namespace, project_id: project.to_param + 'old' }
diff --git a/spec/controllers/projects/milestones_controller_spec.rb b/spec/controllers/projects/milestones_controller_spec.rb
index fa32d32f552..9e5d41b1075 100644
--- a/spec/controllers/projects/milestones_controller_spec.rb
+++ b/spec/controllers/projects/milestones_controller_spec.rb
@@ -17,7 +17,9 @@ RSpec.describe Projects::MilestonesController do
controller.instance_variable_set(:@project, project)
end
- it_behaves_like 'milestone tabs'
+ it_behaves_like 'milestone tabs' do
+ let(:request_params) { { namespace_id: project.namespace, project_id: project, id: milestone.iid } }
+ end
describe "#show" do
render_views
diff --git a/spec/controllers/projects/registry/tags_controller_spec.rb b/spec/controllers/projects/registry/tags_controller_spec.rb
index 6adee35b60a..59df9e78a3c 100644
--- a/spec/controllers/projects/registry/tags_controller_spec.rb
+++ b/spec/controllers/projects/registry/tags_controller_spec.rb
@@ -109,7 +109,7 @@ RSpec.describe Projects::Registry::TagsController do
it 'tracks the event' do
expect_delete_tags(%w[test.])
- expect(controller).to receive(:track_event).with(:delete_tag)
+ expect(controller).to receive(:track_event).with(:delete_tag, {})
destroy_tag('test.')
end
diff --git a/spec/controllers/projects/releases/evidences_controller_spec.rb b/spec/controllers/projects/releases/evidences_controller_spec.rb
index d5a9665d6a5..0ec4cdf2a31 100644
--- a/spec/controllers/projects/releases/evidences_controller_spec.rb
+++ b/spec/controllers/projects/releases/evidences_controller_spec.rb
@@ -113,18 +113,6 @@ RSpec.describe Projects::Releases::EvidencesController do
it_behaves_like 'does not show the issue in evidence'
- context 'when the issue is confidential' do
- let(:issue) { create(:issue, :confidential, project: project) }
-
- it_behaves_like 'does not show the issue in evidence'
- end
-
- context 'when the user is the author of the confidential issue' do
- let(:issue) { create(:issue, :confidential, project: project, author: user) }
-
- it_behaves_like 'does not show the issue in evidence'
- end
-
context 'when project is private' do
let(:project) { create(:project, :repository, :private) }
@@ -143,32 +131,16 @@ RSpec.describe Projects::Releases::EvidencesController do
it_behaves_like 'does not show the issue in evidence'
- context 'when the issue is confidential' do
- let(:issue) { create(:issue, :confidential, project: project) }
-
- it_behaves_like 'does not show the issue in evidence'
- end
-
- context 'when the user is the author of the confidential issue' do
- let(:issue) { create(:issue, :confidential, project: project, author: user) }
-
- it_behaves_like 'does not show the issue in evidence'
- end
-
context 'when project is private' do
let(:project) { create(:project, :repository, :private) }
- it 'returns evidence ' do
- subject
-
- expect(json_response).to eq(evidence.summary)
- end
+ it_behaves_like 'does not show the issue in evidence'
end
context 'when project restricts the visibility of issues to project members only' do
let(:project) { create(:project, :repository, :issues_private) }
- it_behaves_like 'evidence not found'
+ it_behaves_like 'does not show the issue in evidence'
end
end
diff --git a/spec/controllers/projects/runners_controller_spec.rb b/spec/controllers/projects/runners_controller_spec.rb
index 66f20bd50c4..2443a823070 100644
--- a/spec/controllers/projects/runners_controller_spec.rb
+++ b/spec/controllers/projects/runners_controller_spec.rb
@@ -73,4 +73,45 @@ RSpec.describe Projects::RunnersController do
expect(runner.active).to eq(false)
end
end
+
+ describe '#toggle_shared_runners' do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, group: group) }
+
+ it 'toggles shared_runners_enabled when the group allows shared runners' do
+ project.update!(shared_runners_enabled: true)
+
+ post :toggle_shared_runners, params: params
+
+ project.reload
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(project.shared_runners_enabled).to eq(false)
+ end
+
+ it 'toggles shared_runners_enabled when the group disallows shared runners but allows overrides' do
+ group.update!(shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: true)
+ project.update!(shared_runners_enabled: false)
+
+ post :toggle_shared_runners, params: params
+
+ project.reload
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(project.shared_runners_enabled).to eq(true)
+ end
+
+ it 'does not enable if the group disallows shared runners' do
+ group.update!(shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: false)
+ project.update!(shared_runners_enabled: false)
+
+ post :toggle_shared_runners, params: params
+
+ project.reload
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(project.shared_runners_enabled).to eq(false)
+ expect(flash[:alert]).to eq("Cannot enable shared runners because parent group does not allow it")
+ end
+ end
end
diff --git a/spec/controllers/projects/serverless/functions_controller_spec.rb b/spec/controllers/projects/serverless/functions_controller_spec.rb
index 7f558ad9231..75135839a06 100644
--- a/spec/controllers/projects/serverless/functions_controller_spec.rb
+++ b/spec/controllers/projects/serverless/functions_controller_spec.rb
@@ -206,7 +206,7 @@ RSpec.describe Projects::Serverless::FunctionsController do
context 'on Knative 0.5.0' do
before do
- prepare_knative_stubs(knative_05_service(knative_stub_options))
+ prepare_knative_stubs(knative_05_service(**knative_stub_options))
end
include_examples 'GET #show with valid data'
@@ -214,7 +214,7 @@ RSpec.describe Projects::Serverless::FunctionsController do
context 'on Knative 0.6.0' do
before do
- prepare_knative_stubs(knative_06_service(knative_stub_options))
+ prepare_knative_stubs(knative_06_service(**knative_stub_options))
end
include_examples 'GET #show with valid data'
@@ -222,7 +222,7 @@ RSpec.describe Projects::Serverless::FunctionsController do
context 'on Knative 0.7.0' do
before do
- prepare_knative_stubs(knative_07_service(knative_stub_options))
+ prepare_knative_stubs(knative_07_service(**knative_stub_options))
end
include_examples 'GET #show with valid data'
@@ -230,7 +230,7 @@ RSpec.describe Projects::Serverless::FunctionsController do
context 'on Knative 0.9.0' do
before do
- prepare_knative_stubs(knative_09_service(knative_stub_options))
+ prepare_knative_stubs(knative_09_service(**knative_stub_options))
end
include_examples 'GET #show with valid data'
@@ -275,7 +275,7 @@ RSpec.describe Projects::Serverless::FunctionsController do
context 'on Knative 0.5.0' do
before do
- prepare_knative_stubs(knative_05_service(knative_stub_options))
+ prepare_knative_stubs(knative_05_service(**knative_stub_options))
end
include_examples 'GET #index with data'
@@ -283,7 +283,7 @@ RSpec.describe Projects::Serverless::FunctionsController do
context 'on Knative 0.6.0' do
before do
- prepare_knative_stubs(knative_06_service(knative_stub_options))
+ prepare_knative_stubs(knative_06_service(**knative_stub_options))
end
include_examples 'GET #index with data'
@@ -291,7 +291,7 @@ RSpec.describe Projects::Serverless::FunctionsController do
context 'on Knative 0.7.0' do
before do
- prepare_knative_stubs(knative_07_service(knative_stub_options))
+ prepare_knative_stubs(knative_07_service(**knative_stub_options))
end
include_examples 'GET #index with data'
@@ -299,7 +299,7 @@ RSpec.describe Projects::Serverless::FunctionsController do
context 'on Knative 0.9.0' do
before do
- prepare_knative_stubs(knative_09_service(knative_stub_options))
+ prepare_knative_stubs(knative_09_service(**knative_stub_options))
end
include_examples 'GET #index with data'
diff --git a/spec/controllers/projects/settings/access_tokens_controller_spec.rb b/spec/controllers/projects/settings/access_tokens_controller_spec.rb
index 4743ab2b7c1..f293027a69a 100644
--- a/spec/controllers/projects/settings/access_tokens_controller_spec.rb
+++ b/spec/controllers/projects/settings/access_tokens_controller_spec.rb
@@ -14,28 +14,21 @@ RSpec.describe Projects::Settings::AccessTokensController do
sign_in(user)
end
- shared_examples 'feature unavailability' do
- context 'when flag is disabled' do
- before do
- stub_feature_flags(resource_access_token: false)
- end
+ shared_examples 'feature unavailable' do
+ let_it_be(:project) { create(:project) }
- it { is_expected.to have_gitlab_http_status(:not_found) }
+ before do
+ allow(Gitlab).to receive(:com?).and_return(false)
+ project.add_developer(user)
end
- context 'when environment is Gitlab.com' do
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
- end
-
- it { is_expected.to have_gitlab_http_status(:not_found) }
- end
+ it { is_expected.to have_gitlab_http_status(:not_found) }
end
describe '#index' do
subject { get :index, params: { namespace_id: project.namespace, project_id: project } }
- it_behaves_like 'feature unavailability'
+ it_behaves_like 'feature unavailable'
context 'when feature is available' do
let_it_be(:bot_user) { create(:user, :project_bot) }
@@ -84,7 +77,7 @@ RSpec.describe Projects::Settings::AccessTokensController do
let_it_be(:access_token_params) { {} }
- it_behaves_like 'feature unavailability'
+ it_behaves_like 'feature unavailable'
context 'when feature is available' do
let_it_be(:access_token_params) { { name: 'Nerd bot', scopes: ["api"], expires_at: 1.month.since.to_date } }
@@ -148,7 +141,7 @@ RSpec.describe Projects::Settings::AccessTokensController do
project.add_maintainer(bot_user)
end
- it_behaves_like 'feature unavailability'
+ it_behaves_like 'feature unavailable'
context 'when feature is available' do
before do
@@ -185,6 +178,5 @@ RSpec.describe Projects::Settings::AccessTokensController do
def enable_feature
allow(Gitlab).to receive(:com?).and_return(false)
- stub_feature_flags(resource_access_token: true)
end
end
diff --git a/spec/controllers/projects/settings/ci_cd_controller_spec.rb b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
index 8498ff49826..9e26eca88f2 100644
--- a/spec/controllers/projects/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
@@ -266,4 +266,21 @@ RSpec.describe Projects::Settings::CiCdController do
end
end
end
+
+ describe 'GET #runner_setup_scripts' do
+ it 'renders the setup scripts' do
+ get :runner_setup_scripts, params: { os: 'linux', arch: 'amd64', namespace_id: project.namespace, project_id: project }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to have_key("install")
+ expect(json_response).to have_key("register")
+ end
+
+ it 'renders errors if they occur' do
+ get :runner_setup_scripts, params: { os: 'foo', arch: 'bar', namespace_id: project.namespace, project_id: project }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to have_key("errors")
+ end
+ end
end
diff --git a/spec/controllers/projects/static_site_editor_controller_spec.rb b/spec/controllers/projects/static_site_editor_controller_spec.rb
index 7883c7e6f81..1cd32177e17 100644
--- a/spec/controllers/projects/static_site_editor_controller_spec.rb
+++ b/spec/controllers/projects/static_site_editor_controller_spec.rb
@@ -5,9 +5,11 @@ require 'spec_helper'
RSpec.describe Projects::StaticSiteEditorController do
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:user) { create(:user) }
- let(:data) { instance_double(Hash) }
+ let(:data) { { key: 'value' } }
describe 'GET show' do
+ render_views
+
let(:default_params) do
{
namespace_id: project.namespace,
@@ -50,41 +52,78 @@ RSpec.describe Projects::StaticSiteEditorController do
end
end
- %w[developer maintainer].each do |role|
- context "as #{role}" do
- before_all do
- project.add_role(user, role)
+ context "as developer" do
+ before do
+ project.add_role(user, 'developer')
+ sign_in(user)
+ get :show, params: default_params
+ end
+
+ it 'renders the edit page' do
+ expect(response).to render_template(:show)
+ end
+
+ it 'assigns ref and path variables' do
+ expect(assigns(:ref)).to eq('master')
+ expect(assigns(:path)).to eq('README.md')
+ end
+
+ context 'when combination of ref and path is incorrect' do
+ let(:default_params) { super().merge(id: 'unknown') }
+
+ it 'responds with 404 page' do
+ expect(response).to have_gitlab_http_status(:not_found)
end
+ end
+
+ context 'when invalid config file' do
+ let(:service_response) { ServiceResponse.error(message: 'invalid') }
- before do
- sign_in(user)
- get :show, params: default_params
+ it 'redirects to project page and flashes error message' do
+ expect(response).to redirect_to(project_path(project))
+ expect(response).to set_flash[:alert].to('invalid')
end
+ end
- it 'renders the edit page' do
- expect(response).to render_template(:show)
+ context 'with a service response payload containing multiple data types' do
+ let(:data) do
+ {
+ a_string: 'string',
+ an_array: [
+ {
+ foo: 'bar'
+ }
+ ],
+ an_integer: 123,
+ a_hash: {
+ a_deeper_hash: {
+ foo: 'bar'
+ }
+ },
+ a_boolean: true
+ }
end
- it 'assigns a required variables' do
- expect(assigns(:data)).to eq(data)
- expect(assigns(:ref)).to eq('master')
- expect(assigns(:path)).to eq('README.md')
+ let(:assigns_data) { assigns(:data) }
+
+ it 'leaves data values which are strings as strings' do
+ expect(assigns_data[:a_string]).to eq('string')
end
- context 'when combination of ref and path is incorrect' do
- let(:default_params) { super().merge(id: 'unknown') }
+ it 'leaves data values which are integers as integers' do
+ expect(assigns_data[:an_integer]).to eq(123)
+ end
- it 'responds with 404 page' do
- expect(response).to have_gitlab_http_status(:not_found)
- end
+ it 'serializes data values which are booleans to JSON' do
+ expect(assigns_data[:a_boolean]).to eq('true')
end
- context 'when invalid config file' do
- let(:service_response) { ServiceResponse.error(message: 'invalid') }
+ it 'serializes data values which are arrays to JSON' do
+ expect(assigns_data[:an_array]).to eq('[{"foo":"bar"}]')
+ end
- it 'returns 422' do
- expect(response).to have_gitlab_http_status(:unprocessable_entity)
- end
+ it 'serializes data values which are hashes to JSON' do
+ expect(assigns_data[:a_hash]).to eq('{"a_deeper_hash":{"foo":"bar"}}')
end
end
end
diff --git a/spec/controllers/projects/tags_controller_spec.rb b/spec/controllers/projects/tags_controller_spec.rb
index d213d003bed..57760088183 100644
--- a/spec/controllers/projects/tags_controller_spec.rb
+++ b/spec/controllers/projects/tags_controller_spec.rb
@@ -131,4 +131,25 @@ RSpec.describe Projects::TagsController do
end
end
end
+
+ describe 'DELETE #destroy' do
+ let(:tag) { project.repository.add_tag(user, 'fake-tag', 'master') }
+ let(:request) do
+ delete(:destroy, params: { id: tag.name, namespace_id: project.namespace.to_param, project_id: project })
+ end
+
+ before do
+ project.add_developer(user)
+ sign_in(user)
+ end
+
+ it 'deletes tag' do
+ request
+
+ expect(response).to be_successful
+ expect(response.body).to include("Tag was removed")
+
+ expect(project.repository.find_tag(tag.name)).not_to be_present
+ end
+ end
end
diff --git a/spec/controllers/runner_setup_controller_spec.rb b/spec/controllers/runner_setup_controller_spec.rb
new file mode 100644
index 00000000000..0b237500907
--- /dev/null
+++ b/spec/controllers/runner_setup_controller_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe RunnerSetupController do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'GET #platforms' do
+ it 'renders the platforms' do
+ get :platforms
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to have_key("windows")
+ expect(json_response).to have_key("kubernetes")
+ end
+ end
+end
diff --git a/spec/controllers/sessions_controller_spec.rb b/spec/controllers/sessions_controller_spec.rb
index 688539f2a03..47d234df22b 100644
--- a/spec/controllers/sessions_controller_spec.rb
+++ b/spec/controllers/sessions_controller_spec.rb
@@ -78,6 +78,9 @@ RSpec.describe SessionsController do
end
context 'when using standard authentications' do
+ let(:user) { create(:user) }
+ let(:post_action) { post(:create, params: { user: { login: user.username, password: user.password } }) }
+
context 'invalid password' do
it 'does not authenticate user' do
post(:create, params: { user: { login: 'invalid', password: 'invalid' } })
@@ -87,6 +90,26 @@ RSpec.describe SessionsController do
end
end
+ context 'a blocked user' do
+ it 'does not authenticate the user' do
+ user.block!
+ post_action
+
+ expect(@request.env['warden']).not_to be_authenticated
+ expect(flash[:alert]).to include('Your account has been blocked')
+ end
+ end
+
+ context 'an internal user' do
+ it 'does not authenticate the user' do
+ user.ghost!
+ post_action
+
+ expect(@request.env['warden']).not_to be_authenticated
+ expect(flash[:alert]).to include('Your account does not have the required permission to login')
+ end
+ end
+
context 'when using valid password', :clean_gitlab_redis_shared_state do
let(:user) { create(:user) }
let(:user_params) { { login: user.username, password: user.password } }
diff --git a/spec/factories/alert_management/alerts.rb b/spec/factories/alert_management/alerts.rb
index d931947fff1..d0546657ccf 100644
--- a/spec/factories/alert_management/alerts.rb
+++ b/spec/factories/alert_management/alerts.rb
@@ -100,7 +100,7 @@ FactoryBot.define do
end
trait :prometheus do
- monitoring_tool { Gitlab::AlertManagement::AlertParams::MONITORING_TOOLS[:prometheus] }
+ monitoring_tool { Gitlab::AlertManagement::Payload::MONITORING_TOOLS[:prometheus] }
payload do
{
annotations: {
@@ -123,5 +123,17 @@ FactoryBot.define do
with_description
low
end
+
+ trait :from_payload do
+ after(:build) do |alert|
+ alert_params = ::Gitlab::AlertManagement::Payload.parse(
+ alert.project,
+ alert.payload,
+ monitoring_tool: alert.monitoring_tool
+ ).alert_params
+
+ alert.assign_attributes(alert_params)
+ end
+ end
end
end
diff --git a/spec/factories/alert_management/http_integrations.rb b/spec/factories/alert_management/http_integrations.rb
new file mode 100644
index 00000000000..9311cb3e114
--- /dev/null
+++ b/spec/factories/alert_management/http_integrations.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :alert_management_http_integration, class: 'AlertManagement::HttpIntegration' do
+ project
+ active { true }
+ name { 'DataDog' }
+ endpoint_identifier { SecureRandom.hex(4) }
+
+ trait :inactive do
+ active { false }
+ end
+ end
+end
diff --git a/spec/factories/alerting/alert.rb b/spec/factories/alerting/alert.rb
deleted file mode 100644
index 285bb14efa2..00000000000
--- a/spec/factories/alerting/alert.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-FactoryBot.define do
- factory :alerting_alert, class: 'Gitlab::Alerting::Alert' do
- project
- payload { {} }
-
- transient do
- metric_id { nil }
-
- after(:build) do |alert, evaluator|
- unless alert.payload.key?('startsAt')
- alert.payload['startsAt'] = Time.now.rfc3339
- end
-
- if metric_id = evaluator.metric_id
- alert.payload['labels'] ||= {}
- alert.payload['labels']['gitlab_alert_id'] = metric_id.to_s
- end
- end
- end
-
- skip_create
- end
-end
diff --git a/spec/factories/authentication_event.rb b/spec/factories/authentication_event.rb
new file mode 100644
index 00000000000..ff539c6f5c4
--- /dev/null
+++ b/spec/factories/authentication_event.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :authentication_event do
+ user
+ provider { :standard }
+ user_name { 'Jane Doe' }
+ ip_address { '127.0.0.1' }
+ result { :failed }
+ end
+end
diff --git a/spec/factories/ci/build_pending_states.rb b/spec/factories/ci/build_pending_states.rb
index 765b7f005b9..eddd74b1068 100644
--- a/spec/factories/ci/build_pending_states.rb
+++ b/spec/factories/ci/build_pending_states.rb
@@ -3,7 +3,7 @@
FactoryBot.define do
factory :ci_build_pending_state, class: 'Ci::BuildPendingState' do
build factory: :ci_build
- trace_checksum { 'crc32:12345678' }
+ trace_checksum { 'crc32:bc614e' }
state { 'success' }
end
end
diff --git a/spec/factories/ci/build_trace_chunks.rb b/spec/factories/ci/build_trace_chunks.rb
index 7c348f4b7e4..d996b41b648 100644
--- a/spec/factories/ci/build_trace_chunks.rb
+++ b/spec/factories/ci/build_trace_chunks.rb
@@ -53,5 +53,18 @@ FactoryBot.define do
trait :fog_without_data do
data_store { :fog }
end
+
+ trait :persisted do
+ data_store { :database}
+
+ transient do
+ initial_data { 'test data' }
+ end
+
+ after(:build) do |chunk, evaluator|
+ Ci::BuildTraceChunks::Database.new.set_data(chunk, evaluator.initial_data)
+ chunk.checksum = chunk.class.crc32(evaluator.initial_data)
+ end
+ end
end
end
diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb
index 6174bfbfbb7..5fec6dd0d78 100644
--- a/spec/factories/ci/pipelines.rb
+++ b/spec/factories/ci/pipelines.rb
@@ -23,7 +23,7 @@ FactoryBot.define do
factory :ci_pipeline do
transient { ci_ref_presence { true } }
- after(:build) do |pipeline, evaluator|
+ before(:create) do |pipeline, evaluator|
pipeline.ensure_ci_ref! if evaluator.ci_ref_presence && pipeline.ci_ref_id.nil?
end
diff --git a/spec/factories/design_management/designs.rb b/spec/factories/design_management/designs.rb
index 66c33c9ece0..38d0545483c 100644
--- a/spec/factories/design_management/designs.rb
+++ b/spec/factories/design_management/designs.rb
@@ -75,7 +75,7 @@ FactoryBot.define do
end
# Use this trait if you want versions in a particular history, but don't
- # want to pay for gitlay calls.
+ # want to pay for gitaly calls.
trait :with_versions do
transient do
deleted { false }
diff --git a/spec/factories/events.rb b/spec/factories/events.rb
index ecbda5fbfd3..6c9f1ba0137 100644
--- a/spec/factories/events.rb
+++ b/spec/factories/events.rb
@@ -18,6 +18,7 @@ FactoryBot.define do
trait(:destroyed) { action { :destroyed } }
trait(:expired) { action { :expired } }
trait(:archived) { action { :archived } }
+ trait(:approved) { action { :approved } }
factory :closed_issue_event do
action { :closed }
@@ -55,6 +56,16 @@ FactoryBot.define do
action { :created }
target { design }
end
+
+ factory :project_created_event do
+ project factory: :project
+ action { :created }
+ end
+
+ factory :project_imported_event do
+ project factory: [:project, :with_import_url]
+ action { :created }
+ end
end
factory :push_event, class: 'PushEvent' do
diff --git a/spec/factories/group_import_states.rb b/spec/factories/group_import_states.rb
index 0b491d444fa..47d4b480b12 100644
--- a/spec/factories/group_import_states.rb
+++ b/spec/factories/group_import_states.rb
@@ -3,6 +3,7 @@
FactoryBot.define do
factory :group_import_state, class: 'GroupImportState', traits: %i[created] do
association :group, factory: :group
+ association :user, factory: :user
trait :created do
status { 0 }
diff --git a/spec/factories/instance_statistics/measurement.rb b/spec/factories/instance_statistics/measurement.rb
index fb180c23214..f9398cd3061 100644
--- a/spec/factories/instance_statistics/measurement.rb
+++ b/spec/factories/instance_statistics/measurement.rb
@@ -13,5 +13,13 @@ FactoryBot.define do
trait :group_count do
identifier { :groups }
end
+
+ trait :pipelines_succeeded_count do
+ identifier { :pipelines_succeeded }
+ end
+
+ trait :pipelines_skipped_count do
+ identifier { :pipelines_skipped }
+ end
end
end
diff --git a/spec/factories/issue_email_participants.rb b/spec/factories/issue_email_participants.rb
new file mode 100644
index 00000000000..730e224b01e
--- /dev/null
+++ b/spec/factories/issue_email_participants.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :issue_email_participant do
+ issue
+ email { generate(:email) }
+ end
+end
diff --git a/spec/factories/merge_request_diffs.rb b/spec/factories/merge_request_diffs.rb
index fdb7f52f3bd..481cabdae6d 100644
--- a/spec/factories/merge_request_diffs.rb
+++ b/spec/factories/merge_request_diffs.rb
@@ -2,13 +2,7 @@
FactoryBot.define do
factory :merge_request_diff do
- merge_request do
- build(:merge_request) do |merge_request|
- # MergeRequest should not create a MergeRequestDiff in the callback
- allow(merge_request).to receive(:ensure_merge_request_diff)
- end
- end
-
+ association :merge_request, factory: :merge_request_without_merge_request_diff
state { :collected }
commits_count { 1 }
diff --git a/spec/factories/merge_requests.rb b/spec/factories/merge_requests.rb
index 6836d5d71f0..67f547dfbbb 100644
--- a/spec/factories/merge_requests.rb
+++ b/spec/factories/merge_requests.rb
@@ -286,5 +286,7 @@ FactoryBot.define do
merge_request.update!(labels: evaluator.labels)
end
end
+
+ factory :merge_request_without_merge_request_diff, class: 'MergeRequestWithoutMergeRequestDiff'
end
end
diff --git a/spec/factories/namespaces.rb b/spec/factories/namespaces.rb
index 0dcec086da9..0ec977b8234 100644
--- a/spec/factories/namespaces.rb
+++ b/spec/factories/namespaces.rb
@@ -63,5 +63,13 @@ FactoryBot.define do
)
end
end
+
+ trait :shared_runners_disabled do
+ shared_runners_enabled { false }
+ end
+
+ trait :allow_descendants_override_disabled_shared_runners do
+ allow_descendants_override_disabled_shared_runners { true }
+ end
end
end
diff --git a/spec/factories/packages.rb b/spec/factories/packages.rb
index 52b2a32cd3b..3562f57654e 100644
--- a/spec/factories/packages.rb
+++ b/spec/factories/packages.rb
@@ -91,6 +91,12 @@ FactoryBot.define do
end
end
+ factory :golang_package do
+ sequence(:name) { |n| "golang.org/x/pkg-#{n}"}
+ sequence(:version) { |n| "v1.0.#{n}" }
+ package_type { :golang }
+ end
+
factory :conan_package do
conan_metadatum
@@ -141,160 +147,6 @@ FactoryBot.define do
package
end
- factory :package_file, class: 'Packages::PackageFile' do
- package
-
- file_name { 'somefile.txt' }
-
- transient do
- file_fixture { 'spec/fixtures/packages/conan/recipe_files/conanfile.py' }
- end
-
- after(:build) do |package_file, evaluator|
- package_file.file = fixture_file_upload(evaluator.file_fixture)
- end
-
- factory :conan_package_file do
- package { create(:conan_package, without_package_files: true) }
-
- transient do
- without_loaded_metadatum { false }
- end
-
- trait(:conan_recipe_file) do
- after :create do |package_file, evaluator|
- unless evaluator.without_loaded_metadatum
- create :conan_file_metadatum, :recipe_file, package_file: package_file
- end
- end
-
- file_fixture { 'spec/fixtures/packages/conan/recipe_files/conanfile.py' }
- file_name { 'conanfile.py' }
- file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
- file_md5 { '12345abcde' }
- size { 400.kilobytes }
- end
-
- trait(:conan_recipe_manifest) do
- after :create do |package_file, evaluator|
- unless evaluator.without_loaded_metadatum
- create :conan_file_metadatum, :recipe_file, package_file: package_file
- end
- end
-
- file_fixture { 'spec/fixtures/packages/conan/recipe_files/conanmanifest.txt' }
- file_name { 'conanmanifest.txt' }
- file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
- file_md5 { '12345abcde' }
- size { 400.kilobytes }
- end
-
- trait(:conan_package_manifest) do
- after :create do |package_file, evaluator|
- unless evaluator.without_loaded_metadatum
- create :conan_file_metadatum, :package_file, package_file: package_file
- end
- end
-
- file_fixture { 'spec/fixtures/packages/conan/package_files/conanmanifest.txt' }
- file_name { 'conanmanifest.txt' }
- file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
- file_md5 { '12345abcde' }
- size { 400.kilobytes }
- end
-
- trait(:conan_package_info) do
- after :create do |package_file, evaluator|
- unless evaluator.without_loaded_metadatum
- create :conan_file_metadatum, :package_file, package_file: package_file
- end
- end
-
- file_fixture { 'spec/fixtures/packages/conan/package_files/conaninfo.txt' }
- file_name { 'conaninfo.txt' }
- file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
- file_md5 { '12345abcde' }
- size { 400.kilobytes }
- end
-
- trait(:conan_package) do
- after :create do |package_file, evaluator|
- unless evaluator.without_loaded_metadatum
- create :conan_file_metadatum, :package_file, package_file: package_file
- end
- end
-
- file_fixture { 'spec/fixtures/packages/conan/package_files/conan_package.tgz' }
- file_name { 'conan_package.tgz' }
- file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
- file_md5 { '12345abcde' }
- size { 400.kilobytes }
- end
- end
-
- trait(:jar) do
- file_fixture { 'spec/fixtures/packages/maven/my-app-1.0-20180724.124855-1.jar' }
- file_name { 'my-app-1.0-20180724.124855-1.jar' }
- file_sha1 { '4f0bfa298744d505383fbb57c554d4f5c12d88b3' }
- size { 100.kilobytes }
- end
-
- trait(:pom) do
- file_fixture { 'spec/fixtures/packages/maven/my-app-1.0-20180724.124855-1.pom' }
- file_name { 'my-app-1.0-20180724.124855-1.pom' }
- file_sha1 { '19c975abd49e5102ca6c74a619f21e0cf0351c57' }
- size { 200.kilobytes }
- end
-
- trait(:xml) do
- file_fixture { 'spec/fixtures/packages/maven/maven-metadata.xml' }
- file_name { 'maven-metadata.xml' }
- file_sha1 { '42b1bdc80de64953b6876f5a8c644f20204011b0' }
- size { 300.kilobytes }
- end
-
- trait(:npm) do
- file_fixture { 'spec/fixtures/packages/npm/foo-1.0.1.tgz' }
- file_name { 'foo-1.0.1.tgz' }
- file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
- verified_at { Date.current }
- verification_checksum { '4437b5775e61455588a7e5187a2e5c58c680694260bbe5501c235ec690d17f83' }
- size { 400.kilobytes }
- end
-
- trait(:nuget) do
- package
- file_fixture { 'spec/fixtures/packages/nuget/package.nupkg' }
- file_name { 'package.nupkg' }
- file_sha1 { '5fe852b2a6abd96c22c11fa1ff2fb19d9ce58b57' }
- size { 300.kilobytes }
- end
-
- trait(:pypi) do
- package
- file_fixture { 'spec/fixtures/packages/pypi/sample-project.tar.gz' }
- file_name { 'sample-project-1.0.0.tar.gz' }
- file_sha1 { '2c0cfbed075d3fae226f051f0cc771b533e01aff' }
- file_md5 { '0a7392d24f42f83068fa3767c5310052' }
- file_sha256 { '440e5e148a25331bbd7991575f7d54933c0ebf6cc735a18ee5066ac1381bb590' }
- size { 1149.bytes }
- end
-
- trait(:object_storage) do
- file_store { Packages::PackageFileUploader::Store::REMOTE }
- end
-
- trait(:checksummed) do
- verification_checksum { 'abc' }
- end
-
- trait(:checksum_failure) do
- verification_failure { 'Could not calculate the checksum' }
- end
-
- factory :package_file_with_file, traits: [:jar]
- end
-
factory :maven_metadatum, class: 'Packages::Maven::Metadatum' do
association :package, package_type: :maven
path { 'my/company/app/my-app/1.0-SNAPSHOT' }
diff --git a/spec/factories/packages/package_file.rb b/spec/factories/packages/package_file.rb
new file mode 100644
index 00000000000..bcca48fb086
--- /dev/null
+++ b/spec/factories/packages/package_file.rb
@@ -0,0 +1,165 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :package_file, class: 'Packages::PackageFile' do
+ package
+
+ file_name { 'somefile.txt' }
+
+ transient do
+ file_fixture { 'spec/fixtures/packages/conan/recipe_files/conanfile.py' }
+ end
+
+ after(:build) do |package_file, evaluator|
+ package_file.file = fixture_file_upload(evaluator.file_fixture)
+ end
+
+ factory :conan_package_file do
+ package { create(:conan_package, without_package_files: true) }
+
+ transient do
+ without_loaded_metadatum { false }
+ end
+
+ trait(:conan_recipe_file) do
+ after :create do |package_file, evaluator|
+ unless evaluator.without_loaded_metadatum
+ create :conan_file_metadatum, :recipe_file, package_file: package_file
+ end
+ end
+
+ file_fixture { 'spec/fixtures/packages/conan/recipe_files/conanfile.py' }
+ file_name { 'conanfile.py' }
+ file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
+ file_md5 { '12345abcde' }
+ size { 400.kilobytes }
+ end
+
+ trait(:conan_recipe_manifest) do
+ after :create do |package_file, evaluator|
+ unless evaluator.without_loaded_metadatum
+ create :conan_file_metadatum, :recipe_file, package_file: package_file
+ end
+ end
+
+ file_fixture { 'spec/fixtures/packages/conan/recipe_files/conanmanifest.txt' }
+ file_name { 'conanmanifest.txt' }
+ file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
+ file_md5 { '12345abcde' }
+ size { 400.kilobytes }
+ end
+
+ trait(:conan_package_manifest) do
+ after :create do |package_file, evaluator|
+ unless evaluator.without_loaded_metadatum
+ create :conan_file_metadatum, :package_file, package_file: package_file
+ end
+ end
+
+ file_fixture { 'spec/fixtures/packages/conan/package_files/conanmanifest.txt' }
+ file_name { 'conanmanifest.txt' }
+ file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
+ file_md5 { '12345abcde' }
+ size { 400.kilobytes }
+ end
+
+ trait(:conan_package_info) do
+ after :create do |package_file, evaluator|
+ unless evaluator.without_loaded_metadatum
+ create :conan_file_metadatum, :package_file, package_file: package_file
+ end
+ end
+
+ file_fixture { 'spec/fixtures/packages/conan/package_files/conaninfo.txt' }
+ file_name { 'conaninfo.txt' }
+ file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
+ file_md5 { '12345abcde' }
+ size { 400.kilobytes }
+ end
+
+ trait(:conan_package) do
+ after :create do |package_file, evaluator|
+ unless evaluator.without_loaded_metadatum
+ create :conan_file_metadatum, :package_file, package_file: package_file
+ end
+ end
+
+ file_fixture { 'spec/fixtures/packages/conan/package_files/conan_package.tgz' }
+ file_name { 'conan_package.tgz' }
+ file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
+ file_md5 { '12345abcde' }
+ size { 400.kilobytes }
+ end
+ end
+
+ trait(:jar) do
+ file_fixture { 'spec/fixtures/packages/maven/my-app-1.0-20180724.124855-1.jar' }
+ file_name { 'my-app-1.0-20180724.124855-1.jar' }
+ file_sha1 { '4f0bfa298744d505383fbb57c554d4f5c12d88b3' }
+ size { 100.kilobytes }
+ end
+
+ trait(:pom) do
+ file_fixture { 'spec/fixtures/packages/maven/my-app-1.0-20180724.124855-1.pom' }
+ file_name { 'my-app-1.0-20180724.124855-1.pom' }
+ file_sha1 { '19c975abd49e5102ca6c74a619f21e0cf0351c57' }
+ size { 200.kilobytes }
+ end
+
+ trait(:xml) do
+ file_fixture { 'spec/fixtures/packages/maven/maven-metadata.xml' }
+ file_name { 'maven-metadata.xml' }
+ file_sha1 { '42b1bdc80de64953b6876f5a8c644f20204011b0' }
+ size { 300.kilobytes }
+ end
+
+ trait(:npm) do
+ file_fixture { 'spec/fixtures/packages/npm/foo-1.0.1.tgz' }
+ file_name { 'foo-1.0.1.tgz' }
+ file_sha1 { 'be93151dc23ac34a82752444556fe79b32c7a1ad' }
+ verified_at { Date.current }
+ verification_checksum { '4437b5775e61455588a7e5187a2e5c58c680694260bbe5501c235ec690d17f83' }
+ size { 400.kilobytes }
+ end
+
+ trait(:nuget) do
+ package
+ file_fixture { 'spec/fixtures/packages/nuget/package.nupkg' }
+ file_name { 'package.nupkg' }
+ file_sha1 { '5fe852b2a6abd96c22c11fa1ff2fb19d9ce58b57' }
+ size { 300.kilobytes }
+ end
+
+ trait(:pypi) do
+ package
+ file_fixture { 'spec/fixtures/packages/pypi/sample-project.tar.gz' }
+ file_name { 'sample-project-1.0.0.tar.gz' }
+ file_sha1 { '2c0cfbed075d3fae226f051f0cc771b533e01aff' }
+ file_md5 { '0a7392d24f42f83068fa3767c5310052' }
+ file_sha256 { '440e5e148a25331bbd7991575f7d54933c0ebf6cc735a18ee5066ac1381bb590' }
+ size { 1149.bytes }
+ end
+
+ trait(:generic) do
+ package
+ file_fixture { 'spec/fixtures/packages/generic/myfile.tar.gz' }
+ file_name { "#{package.name}.tar.gz" }
+ file_sha256 { '440e5e148a25331bbd7991575f7d54933c0ebf6cc735a18ee5066ac1381bb590' }
+ size { 1149.bytes }
+ end
+
+ trait(:object_storage) do
+ file_store { Packages::PackageFileUploader::Store::REMOTE }
+ end
+
+ trait(:checksummed) do
+ verification_checksum { 'abc' }
+ end
+
+ trait(:checksum_failure) do
+ verification_failure { 'Could not calculate the checksum' }
+ end
+
+ factory :package_file_with_file, traits: [:jar]
+ end
+end
diff --git a/spec/factories/pages_deployments.rb b/spec/factories/pages_deployments.rb
index 1bea003d683..20b8e4782df 100644
--- a/spec/factories/pages_deployments.rb
+++ b/spec/factories/pages_deployments.rb
@@ -4,9 +4,12 @@ FactoryBot.define do
factory :pages_deployment, class: 'PagesDeployment' do
project
file_store { ObjectStorage::SUPPORTED_STORES.first }
- size { 1.megabytes }
- # TODO: replace with proper file uploaded in https://gitlab.com/gitlab-org/gitlab/-/issues/245295
- file { "dummy string" }
+ after(:build) do |deployment, _evaluator|
+ deployment.file = fixture_file_upload(
+ Rails.root.join("spec/fixtures/pages.zip")
+ )
+ deployment.size = deployment.file.size
+ end
end
end
diff --git a/spec/factories/project_tracing_settings.rb b/spec/factories/project_tracing_settings.rb
new file mode 100644
index 00000000000..05c1529c18e
--- /dev/null
+++ b/spec/factories/project_tracing_settings.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :project_tracing_setting do
+ project
+ external_url { 'https://example.com' }
+ end
+end
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index 147413557d6..091c9d5a245 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -285,6 +285,12 @@ FactoryBot.define do
end
end
+ trait :with_import_url do
+ import_finished
+
+ import_url { generate(:url) }
+ end
+
trait(:wiki_enabled) { wiki_access_level { ProjectFeature::ENABLED } }
trait(:wiki_disabled) { wiki_access_level { ProjectFeature::DISABLED } }
trait(:wiki_private) { wiki_access_level { ProjectFeature::PRIVATE } }
diff --git a/spec/factories/prometheus_metrics.rb b/spec/factories/prometheus_metrics.rb
index 83e3845f1c3..503d392a524 100644
--- a/spec/factories/prometheus_metrics.rb
+++ b/spec/factories/prometheus_metrics.rb
@@ -9,6 +9,7 @@ FactoryBot.define do
group { :business }
project
legend { 'legend' }
+ dashboard_path { '.gitlab/dashboards/dashboard_path.yml'}
trait :common do
common { true }
diff --git a/spec/factories/services.rb b/spec/factories/services.rb
index 9056fd97f13..13997080817 100644
--- a/spec/factories/services.rb
+++ b/spec/factories/services.rb
@@ -81,7 +81,7 @@ FactoryBot.define do
project_key { nil }
end
- after(:build) do |service, evaluator|
+ before(:create) do |service, evaluator|
if evaluator.create_data
create(:jira_tracker_data, service: service,
url: evaluator.url, api_url: evaluator.api_url, jira_issue_transition_id: evaluator.jira_issue_transition_id,
@@ -130,7 +130,7 @@ FactoryBot.define do
new_issue_url { 'http://new-issue.example.com' }
end
- after(:build) do |service, evaluator|
+ before(:create) do |service, evaluator|
if evaluator.create_data
create(:issue_tracker_data, service: service,
project_url: evaluator.project_url, issues_url: evaluator.issues_url, new_issue_url: evaluator.new_issue_url
@@ -151,7 +151,7 @@ FactoryBot.define do
project_identifier_code { 'PRJ-1' }
end
- after(:build) do |service, evaluator|
+ before(:create) do |service, evaluator|
create(:open_project_tracker_data, service: service,
url: evaluator.url, api_url: evaluator.api_url, token: evaluator.token,
closed_status_id: evaluator.closed_status_id, project_identifier_code: evaluator.project_identifier_code
diff --git a/spec/factories/terraform/state.rb b/spec/factories/terraform/state.rb
index 9decc89ef39..d80c1315e28 100644
--- a/spec/factories/terraform/state.rb
+++ b/spec/factories/terraform/state.rb
@@ -17,16 +17,6 @@ FactoryBot.define do
locked_by_user { create(:user) }
end
- trait(:checksummed) do
- with_file
- verification_checksum { 'abc' }
- end
-
- trait(:checksum_failure) do
- with_file
- verification_failure { 'Could not calculate the checksum' }
- end
-
trait :with_version do
after(:create) do |state|
create(:terraform_state_version, :with_file, terraform_state: state)
diff --git a/spec/factories/terraform/state_version.rb b/spec/factories/terraform/state_version.rb
index d1bd78215e3..b45bd01fd3c 100644
--- a/spec/factories/terraform/state_version.rb
+++ b/spec/factories/terraform/state_version.rb
@@ -7,5 +7,13 @@ FactoryBot.define do
sequence(:version)
file { fixture_file_upload('spec/fixtures/terraform/terraform.tfstate', 'application/json') }
+
+ trait(:checksummed) do
+ verification_checksum { 'abc' }
+ end
+
+ trait(:checksum_failure) do
+ verification_failure { 'Could not calculate the checksum' }
+ end
end
end
diff --git a/spec/factories/todos.rb b/spec/factories/todos.rb
index 0b5d00cff67..97a1265c46a 100644
--- a/spec/factories/todos.rb
+++ b/spec/factories/todos.rb
@@ -12,6 +12,10 @@ FactoryBot.define do
action { Todo::ASSIGNED }
end
+ trait :review_requested do
+ action { Todo::REVIEW_REQUESTED }
+ end
+
trait :mentioned do
action { Todo::MENTIONED }
end
diff --git a/spec/factories/usage_data.rb b/spec/factories/usage_data.rb
index 5b20205a235..2f7c6fde3e4 100644
--- a/spec/factories/usage_data.rb
+++ b/spec/factories/usage_data.rb
@@ -50,6 +50,9 @@ FactoryBot.define do
create(:protected_branch, project: projects[0])
create(:protected_branch, name: 'main', project: projects[0])
+ # Tracing
+ create(:project_tracing_setting, project: projects[0])
+
# Incident Labeled Issues
incident_label = create(:label, :incident, project: projects[0])
create(:labeled_issue, project: projects[0], labels: [incident_label])
@@ -97,16 +100,19 @@ FactoryBot.define do
create(:grafana_integration, project: projects[1], enabled: true)
create(:grafana_integration, project: projects[2], enabled: false)
- create(:package, project: projects[0])
- create(:package, project: projects[0])
- create(:package, project: projects[1])
+ create(:package, project: projects[0], created_at: 3.days.ago)
+ create(:package, project: projects[0], created_at: 3.days.ago)
+ create(:package, project: projects[1], created_at: 3.days.ago)
create(:package, created_at: 2.months.ago, project: projects[1])
+ # User Preferences
+ create(:user_preference, gitpod_enabled: true)
+
ProjectFeature.first.update_attribute('repository_access_level', 0)
# Create fresh & a month (28-days SMAU) old data
env = create(:environment, project: projects[3])
- [2, 29].each do |n|
+ [3, 31].each do |n|
deployment_options = { created_at: n.days.ago, project: env.project, environment: env }
create(:deployment, :failed, deployment_options)
create(:deployment, :success, deployment_options)
diff --git a/spec/factories/wiki_pages.rb b/spec/factories/wiki_pages.rb
index cc866d336a4..3397277839e 100644
--- a/spec/factories/wiki_pages.rb
+++ b/spec/factories/wiki_pages.rb
@@ -9,7 +9,7 @@ FactoryBot.define do
content { 'Content for wiki page' }
format { :markdown }
message { nil }
- project { association(:project, :wiki_repo) }
+ project { association(:project) }
container { project }
wiki { association(:wiki, container: container) }
page { OpenStruct.new(url_path: title) }
@@ -18,6 +18,7 @@ FactoryBot.define do
initialize_with do
new(wiki, page).tap do |page|
page.attributes = {
+ slug: title&.tr(' ', '-'),
title: title,
content: content,
format: format
diff --git a/spec/factories/wikis.rb b/spec/factories/wikis.rb
index 96578fdcee6..86d98bfd756 100644
--- a/spec/factories/wikis.rb
+++ b/spec/factories/wikis.rb
@@ -3,8 +3,8 @@
FactoryBot.define do
factory :wiki do
transient do
- container { association(:project, :wiki_repo) }
- user { association(:user) }
+ container { association(:project) }
+ user { container.default_owner || association(:user) }
end
initialize_with { Wiki.for_container(container, user) }
@@ -12,7 +12,7 @@ FactoryBot.define do
factory :project_wiki do
transient do
- project { association(:project, :wiki_repo) }
+ project { association(:project) }
end
container { project }
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index 38f0b813183..e1df1c69351 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -130,6 +130,38 @@ RSpec.describe 'Admin updates settings', :clean_gitlab_redis_shared_state, :do_n
expect(user_internal_regex['placeholder']).to eq 'Regex pattern'
end
+ context 'Change Sign-up restrictions' do
+ context 'Require Admin approval for new signup setting' do
+ context 'when feature is enabled' do
+ before do
+ stub_feature_flags(admin_approval_for_new_user_signups: true)
+ end
+
+ it 'changes the setting' do
+ page.within('.as-signup') do
+ check 'Require admin approval for new sign-ups'
+ click_button 'Save changes'
+ end
+
+ expect(current_settings.require_admin_approval_after_user_signup).to be_truthy
+ expect(page).to have_content "Application settings saved successfully"
+ end
+ end
+
+ context 'when feature is disabled' do
+ before do
+ stub_feature_flags(admin_approval_for_new_user_signups: false)
+ end
+
+ it 'does not show the the setting' do
+ page.within('.as-signup') do
+ expect(page).not_to have_selector('.application_setting_require_admin_approval_after_user_signup')
+ end
+ end
+ end
+ end
+ end
+
it 'Change Sign-in restrictions' do
page.within('.as-signin') do
fill_in 'Home page URL', with: 'https://about.gitlab.com/'
diff --git a/spec/features/admin/admin_users_spec.rb b/spec/features/admin/admin_users_spec.rb
index a37210d2acc..8b7750ab4ec 100644
--- a/spec/features/admin/admin_users_spec.rb
+++ b/spec/features/admin/admin_users_spec.rb
@@ -31,6 +31,7 @@ RSpec.describe "Admin::Users" do
expect(page).to have_content(current_user.last_activity_on.strftime("%e %b, %Y"))
expect(page).to have_content(user.email)
expect(page).to have_content(user.name)
+ expect(page).to have_content('Projects')
expect(page).to have_button('Block')
expect(page).to have_button('Deactivate')
expect(page).to have_button('Delete user')
@@ -48,6 +49,19 @@ RSpec.describe "Admin::Users" do
end
end
+ context 'user project count' do
+ before do
+ project = create(:project)
+ project.add_maintainer(current_user)
+ end
+
+ it 'displays count of users projects' do
+ visit admin_users_path
+
+ expect(page.find("[data-testid='user-project-count-#{current_user.id}']").text).to eq("1")
+ end
+ end
+
describe 'search and sort' do
before do
create(:user, name: 'Foo Bar', last_activity_on: 3.days.ago)
@@ -606,7 +620,7 @@ RSpec.describe "Admin::Users" do
end
end
- describe 'show user keys' do
+ describe 'show user keys', :js do
let!(:key1) do
create(:key, user: user, title: "ssh-rsa Key1", key: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC4FIEBXGi4bPU8kzxMefudPIJ08/gNprdNTaO9BR/ndy3+58s2HCTw2xCHcsuBmq+TsAqgEidVq4skpqoTMB+Uot5Uzp9z4764rc48dZiI661izoREoKnuRQSsRqUTHg5wrLzwxlQbl1MVfRWQpqiz/5KjBC7yLEb9AbusjnWBk8wvC1bQPQ1uLAauEA7d836tgaIsym9BrLsMVnR4P1boWD3Xp1B1T/ImJwAGHvRmP/ycIqmKdSpMdJXwxcb40efWVj0Ibbe7ii9eeoLdHACqevUZi6fwfbymdow+FeqlkPoHyGg3Cu4vD/D8+8cRc7mE/zGCWcQ15Var83Tczour Key1")
end
@@ -629,7 +643,11 @@ RSpec.describe "Admin::Users" do
expect(page).to have_content(key2.title)
expect(page).to have_content(key2.key)
- click_link 'Remove'
+ click_button 'Delete'
+
+ page.within('.modal') do
+ page.click_button('Delete')
+ end
expect(page).not_to have_content(key2.title)
end
diff --git a/spec/features/admin/admin_uses_repository_checks_spec.rb b/spec/features/admin/admin_uses_repository_checks_spec.rb
index b8851c28531..44642983a36 100644
--- a/spec/features/admin/admin_uses_repository_checks_spec.rb
+++ b/spec/features/admin/admin_uses_repository_checks_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe 'Admin uses repository checks', :request_store, :clean_gitlab_red
)
visit_admin_project_page(project)
- page.within('.alert') do
+ page.within('.gl-alert') do
expect(page.text).to match(/Last repository check \(just now\) failed/)
end
end
diff --git a/spec/features/admin/clusters/eks_spec.rb b/spec/features/admin/clusters/eks_spec.rb
index ef49aebc7c5..ad7122bf182 100644
--- a/spec/features/admin/clusters/eks_spec.rb
+++ b/spec/features/admin/clusters/eks_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe 'Instance-level AWS EKS Cluster', :js do
before do
visit admin_clusters_path
- click_link 'Add Kubernetes cluster'
+ click_link 'Integrate with a cluster certificate'
end
context 'when user creates a cluster on AWS EKS' do
diff --git a/spec/features/admin/dashboard_spec.rb b/spec/features/admin/dashboard_spec.rb
index 4ffa5e3be0b..acb8fb54e11 100644
--- a/spec/features/admin/dashboard_spec.rb
+++ b/spec/features/admin/dashboard_spec.rb
@@ -28,11 +28,9 @@ RSpec.describe 'admin visits dashboard' do
describe 'Users statistic' do
let_it_be(:users_statistics) { create(:users_statistics) }
+ let_it_be(:users_count_label) { Gitlab.ee? ? 'Billable users 71' : 'Active users 71' }
it 'shows correct amounts of users', :aggregate_failures do
- expected_active_users_text = Gitlab.ee? ? 'Active users (Billable users) 71' : 'Active users 71'
-
- sign_in(create(:admin))
visit admin_dashboard_stats_path
expect(page).to have_content('Users without a Group and Project 23')
@@ -42,9 +40,9 @@ RSpec.describe 'admin visits dashboard' do
expect(page).to have_content('Users with highest role Maintainer 6')
expect(page).to have_content('Users with highest role Owner 5')
expect(page).to have_content('Bots 2')
- expect(page).to have_content(expected_active_users_text)
expect(page).to have_content('Blocked users 7')
expect(page).to have_content('Total users 78')
+ expect(page).to have_content(users_count_label)
end
end
end
diff --git a/spec/features/boards/add_issues_modal_spec.rb b/spec/features/boards/add_issues_modal_spec.rb
index d432825e113..d4a26ba0098 100644
--- a/spec/features/boards/add_issues_modal_spec.rb
+++ b/spec/features/boards/add_issues_modal_spec.rb
@@ -79,7 +79,7 @@ RSpec.describe 'Issue Boards add issue modal', :js do
it 'loads issues' do
page.within('.add-issues-modal') do
- page.within('.nav-links') do
+ page.within('.gl-tabs') do
expect(page).to have_content('2')
end
@@ -146,7 +146,7 @@ RSpec.describe 'Issue Boards add issue modal', :js do
page.within('.add-issues-modal') do
first('.board-card .board-card-number').click
- page.within('.nav-links') do
+ page.within('.gl-tabs') do
expect(page).to have_content('Selected issues 1')
end
end
diff --git a/spec/features/boards/boards_spec.rb b/spec/features/boards/boards_spec.rb
index e36378bd34e..eb8c1826dfd 100644
--- a/spec/features/boards/boards_spec.rb
+++ b/spec/features/boards/boards_spec.rb
@@ -24,33 +24,11 @@ RSpec.describe 'Issue Boards', :js do
context 'no lists' do
before do
visit project_board_path(project, board)
- wait_for_requests
- expect(page).to have_selector('.board', count: 3)
- end
-
- it 'shows blank state' do
- expect(page).to have_content('Welcome to your Issue Board!')
- end
-
- it 'shows tooltip on add issues button' do
- button = page.find('.filter-dropdown-container button', text: 'Add issues')
-
- expect(button[:"data-original-title"]).to eq("Please add a list to your board first")
- end
-
- it 'hides the blank state when clicking nevermind button' do
- page.within(find('.board-blank-state')) do
- click_button("Nevermind, I'll use my own")
- end
- expect(page).to have_selector('.board', count: 2)
end
it 'creates default lists' do
lists = ['Open', 'To Do', 'Doing', 'Closed']
- page.within(find('.board-blank-state')) do
- click_button('Add default lists')
- end
wait_for_requests
expect(page).to have_selector('.board', count: 4)
diff --git a/spec/features/calendar_spec.rb b/spec/features/calendar_spec.rb
index 346f305f0d0..5f58fa420fb 100644
--- a/spec/features/calendar_spec.rb
+++ b/spec/features/calendar_spec.rb
@@ -180,7 +180,7 @@ RSpec.describe 'Contributions Calendar', :js do
before do
push_code_contribution
- Timecop.freeze(Date.yesterday) do
+ travel_to(Date.yesterday) do
Issues::CreateService.new(contributed_project, user, issue_params).execute
end
end
diff --git a/spec/features/clusters/cluster_detail_page_spec.rb b/spec/features/clusters/cluster_detail_page_spec.rb
index 4f7f62d00a5..31d6bcda9e8 100644
--- a/spec/features/clusters/cluster_detail_page_spec.rb
+++ b/spec/features/clusters/cluster_detail_page_spec.rb
@@ -87,6 +87,7 @@ RSpec.describe 'Clusterable > Show page' do
within('#advanced-settings-section') do
expect(page).to have_content('Google Kubernetes Engine')
expect(page).to have_content('Manage your Kubernetes cluster by visiting')
+ expect_common_advanced_options
end
end
end
@@ -117,6 +118,7 @@ RSpec.describe 'Clusterable > Show page' do
within('#advanced-settings-section') do
expect(page).not_to have_content('Google Kubernetes Engine')
expect(page).not_to have_content('Manage your Kubernetes cluster by visiting')
+ expect_common_advanced_options
end
end
end
@@ -176,4 +178,14 @@ RSpec.describe 'Clusterable > Show page' do
let(:cluster) { create(:cluster, :provided_by_user, :instance) }
end
end
+
+ private
+
+ def expect_common_advanced_options
+ aggregate_failures do
+ expect(page).to have_content('Cluster management project')
+ expect(page).to have_content('Clear cluster cache')
+ expect(page).to have_content('Remove Kubernetes cluster integration')
+ end
+ end
end
diff --git a/spec/features/commits_spec.rb b/spec/features/commits_spec.rb
index e66a40720da..97ee891dbb8 100644
--- a/spec/features/commits_spec.rb
+++ b/spec/features/commits_spec.rb
@@ -140,6 +140,7 @@ RSpec.describe 'Commits' do
context 'when accessing internal project with disallowed access', :js do
before do
+ stub_feature_flags(graphql_pipeline_header: false)
project.update(
visibility_level: Gitlab::VisibilityLevel::INTERNAL,
public_builds: false)
diff --git a/spec/features/dashboard/todos/todos_filtering_spec.rb b/spec/features/dashboard/todos/todos_filtering_spec.rb
index f60b07c976e..b1464af4194 100644
--- a/spec/features/dashboard/todos/todos_filtering_spec.rb
+++ b/spec/features/dashboard/todos/todos_filtering_spec.rb
@@ -130,6 +130,7 @@ RSpec.describe 'Dashboard > User filters todos', :js do
before do
create(:todo, :build_failed, user: user_1, author: user_2, project: project_1)
create(:todo, :marked, user: user_1, author: user_2, project: project_1, target: issue1)
+ create(:todo, :review_requested, user: user_1, author: user_2, project: project_1, target: issue1)
end
it 'filters by Assigned' do
@@ -138,6 +139,12 @@ RSpec.describe 'Dashboard > User filters todos', :js do
expect_to_see_action(:assigned)
end
+ it 'filters by Review Requested' do
+ filter_action('Review requested')
+
+ expect_to_see_action(:review_requested)
+ end
+
it 'filters by Mentioned' do
filter_action('Mentioned')
@@ -168,6 +175,7 @@ RSpec.describe 'Dashboard > User filters todos', :js do
def expect_to_see_action(action_name)
action_names = {
assigned: ' assigned you ',
+ review_requested: ' requested a review of ',
mentioned: ' mentioned ',
marked: ' added a todo for ',
build_failed: ' build failed for '
diff --git a/spec/features/dashboard/todos/todos_spec.rb b/spec/features/dashboard/todos/todos_spec.rb
index cf773d2caed..7243b5d3483 100644
--- a/spec/features/dashboard/todos/todos_spec.rb
+++ b/spec/features/dashboard/todos/todos_spec.rb
@@ -197,6 +197,21 @@ RSpec.describe 'Dashboard Todos' do
end
end
end
+
+ context 'review request todo' do
+ let(:merge_request) { create(:merge_request, title: "Fixes issue") }
+
+ before do
+ create(:todo, :review_requested, user: user, project: project, target: merge_request, author: user)
+ visit dashboard_todos_path
+ end
+
+ it 'shows you set yourself as an reviewer message' do
+ page.within('.js-todos-all') do
+ expect(page).to have_content("You requested a review of merge request #{merge_request.to_reference} \"Fixes issue\" at #{project.namespace.owner_name} / #{project.name} from yourself")
+ end
+ end
+ end
end
context 'User has done todos', :js do
diff --git a/spec/features/expand_collapse_diffs_spec.rb b/spec/features/expand_collapse_diffs_spec.rb
index e705f2916da..49343cc7a57 100644
--- a/spec/features/expand_collapse_diffs_spec.rb
+++ b/spec/features/expand_collapse_diffs_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Expand and collapse diffs', :js do
let(:project) { create(:project, :repository) }
before do
+ stub_feature_flags(increased_diff_limits: false)
sign_in(create(:admin))
# Ensure that undiffable.md is in .gitattributes
diff --git a/spec/features/groups/clusters/eks_spec.rb b/spec/features/groups/clusters/eks_spec.rb
index 5a62741250a..c361c502cbb 100644
--- a/spec/features/groups/clusters/eks_spec.rb
+++ b/spec/features/groups/clusters/eks_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe 'Group AWS EKS Cluster', :js do
before do
visit group_clusters_path(group)
- click_link 'Add Kubernetes cluster'
+ click_link 'Integrate with a cluster certificate'
end
context 'when user creates a cluster on AWS EKS' do
diff --git a/spec/features/groups/clusters/user_spec.rb b/spec/features/groups/clusters/user_spec.rb
index 90253451d6b..97f8864aab2 100644
--- a/spec/features/groups/clusters/user_spec.rb
+++ b/spec/features/groups/clusters/user_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'User Cluster', :js do
before do
visit group_clusters_path(group)
- click_link 'Add Kubernetes cluster'
+ click_link 'Integrate with a cluster certificate'
click_link 'Connect existing cluster'
end
@@ -66,6 +66,10 @@ RSpec.describe 'User Cluster', :js do
expect(page.find_field('cluster[platform_kubernetes_attributes][authorization_type]', disabled: true)).to be_checked
end
end
+
+ it 'user sees namespace per environment is enabled by default' do
+ expect(page).to have_checked_field('Namespace per environment')
+ end
end
context 'when user filled form with invalid parameters' do
@@ -125,7 +129,7 @@ RSpec.describe 'User Cluster', :js do
it 'user sees creation form with the successful message' do
expect(page).to have_content('Kubernetes cluster integration was successfully removed.')
- expect(page).to have_link('Add Kubernetes cluster')
+ expect(page).to have_link('Integrate with a cluster certificate')
end
end
end
diff --git a/spec/features/groups/members/manage_groups_spec.rb b/spec/features/groups/members/manage_groups_spec.rb
index e3bbbd4d73b..8a7d997d5ac 100644
--- a/spec/features/groups/members/manage_groups_spec.rb
+++ b/spec/features/groups/members/manage_groups_spec.rb
@@ -6,62 +6,115 @@ RSpec.describe 'Groups > Members > Manage groups', :js do
include Select2Helper
include Spec::Support::Helpers::Features::ListRowsHelpers
- let(:user) { create(:user) }
- let(:shared_with_group) { create(:group) }
- let(:shared_group) { create(:group) }
+ let_it_be(:user) { create(:user) }
before do
stub_feature_flags(vue_group_members_list: false)
- shared_group.add_owner(user)
sign_in(user)
end
- it 'add group to group' do
- visit group_group_members_path(shared_group)
+ context 'when group link does not exist' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:group_to_add) { create(:group) }
- add_group(shared_with_group.id, 'Reporter')
+ before do
+ group.add_owner(user)
+ visit group_group_members_path(group)
+ end
- click_groups_tab
+ it 'add group to group' do
+ add_group(group_to_add.id, 'Reporter')
- page.within(first_row) do
- expect(page).to have_content(shared_with_group.name)
- expect(page).to have_content('Reporter')
+ click_groups_tab
+
+ page.within(first_row) do
+ expect(page).to have_content(group_to_add.name)
+ expect(page).to have_content('Reporter')
+ end
end
end
- it 'remove group from group' do
- create(:group_group_link, shared_group: shared_group,
- shared_with_group: shared_with_group, group_access: ::Gitlab::Access::DEVELOPER)
+ context 'when group link exists' do
+ let_it_be(:shared_with_group) { create(:group) }
+ let_it_be(:shared_group) { create(:group) }
- visit group_group_members_path(shared_group)
+ let(:additional_link_attrs) { {} }
- click_groups_tab
+ let_it_be(:group_link, refind: true) do
+ create(
+ :group_group_link,
+ shared_group: shared_group,
+ shared_with_group: shared_with_group,
+ group_access: ::Gitlab::Access::DEVELOPER
+ )
+ end
- expect(page).to have_content(shared_with_group.name)
+ before do
+ travel_to Time.now.utc.beginning_of_day
+ group_link.update!(additional_link_attrs)
- accept_confirm do
- find(:css, '#tab-groups li', text: shared_with_group.name).find(:css, 'a.btn-remove').click
+ shared_group.add_owner(user)
+ visit group_group_members_path(shared_group)
end
- expect(page).not_to have_content(shared_with_group.name)
- end
+ it 'remove group from group' do
+ click_groups_tab
+
+ expect(page).to have_content(shared_with_group.name)
+
+ accept_confirm do
+ find(:css, '#tab-groups li', text: shared_with_group.name).find(:css, 'a.btn-remove').click
+ end
+
+ expect(page).not_to have_content(shared_with_group.name)
+ end
- it 'update group to owner level' do
- create(:group_group_link, shared_group: shared_group,
- shared_with_group: shared_with_group, group_access: ::Gitlab::Access::DEVELOPER)
+ it 'update group to owner level' do
+ click_groups_tab
- visit group_group_members_path(shared_group)
+ page.within(first_row) do
+ click_button('Developer')
+ click_link('Maintainer')
- click_groups_tab
+ wait_for_requests
- page.within(first_row) do
- click_button('Developer')
- click_link('Maintainer')
+ expect(page).to have_button('Maintainer')
+ end
+ end
+
+ it 'updates expiry date' do
+ click_groups_tab
+
+ expires_at_field = "member_expires_at_#{shared_with_group.id}"
+ fill_in "member_expires_at_#{shared_with_group.id}", with: 3.days.from_now.to_date
+ find_field(expires_at_field).native.send_keys :enter
wait_for_requests
- expect(page).to have_button('Maintainer')
+ page.within(find('li.group_member')) do
+ expect(page).to have_content('Expires in 3 days')
+ end
+ end
+
+ context 'when expiry date is set' do
+ let(:additional_link_attrs) { { expires_at: 3.days.from_now.to_date } }
+
+ it 'clears expiry date' do
+ click_groups_tab
+
+ page.within(find('li.group_member')) do
+ expect(page).to have_content('Expires in 3 days')
+
+ page.within(find('.js-edit-member-form')) do
+ find('.js-clear-input').click
+ end
+
+ wait_for_requests
+
+ expect(page).not_to have_content('Expires in')
+ end
+ end
end
end
diff --git a/spec/features/groups/navbar_spec.rb b/spec/features/groups/navbar_spec.rb
index 60f1c404e78..e81f2370d10 100644
--- a/spec/features/groups/navbar_spec.rb
+++ b/spec/features/groups/navbar_spec.rb
@@ -72,4 +72,12 @@ RSpec.describe 'Group navbar' do
it_behaves_like 'verified navigation bar'
end
+
+ context 'when invite team members is not available' do
+ it 'does not display the js-invite-members-trigger' do
+ visit group_path(group)
+
+ expect(page).not_to have_selector('.js-invite-members-trigger')
+ end
+ end
end
diff --git a/spec/features/groups/packages_spec.rb b/spec/features/groups/packages_spec.rb
index d81e4aa70cf..60e0c08b3d4 100644
--- a/spec/features/groups/packages_spec.rb
+++ b/spec/features/groups/packages_spec.rb
@@ -48,7 +48,7 @@ RSpec.describe 'Group Packages' do
it 'allows you to navigate to the project page' do
page.within('[data-qa-selector="packages-table"]') do
- click_link project.name
+ find('[data-qa-selector="package-path"]', text: project.name).click
end
expect(page).to have_current_path(project_path(project))
diff --git a/spec/features/groups/show_spec.rb b/spec/features/groups/show_spec.rb
index ec30f34199d..304573ecd6e 100644
--- a/spec/features/groups/show_spec.rb
+++ b/spec/features/groups/show_spec.rb
@@ -184,4 +184,17 @@ RSpec.describe 'Group show page' do
expect(page).to have_selector('.notifications-btn.disabled', visible: true)
end
end
+
+ context 'page og:description' do
+ let(:group) { create(:group, description: '**Lorem** _ipsum_ dolor sit [amet](https://example.com)') }
+ let(:maintainer) { create(:user) }
+
+ before do
+ group.add_maintainer(maintainer)
+ sign_in(maintainer)
+ visit path
+ end
+
+ it_behaves_like 'page meta description', 'Lorem ipsum dolor sit amet'
+ end
end
diff --git a/spec/features/incidents/incident_details_spec.rb b/spec/features/incidents/incident_details_spec.rb
new file mode 100644
index 00000000000..6db767dbddb
--- /dev/null
+++ b/spec/features/incidents/incident_details_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Incident details', :js do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:incident) { create(:incident, project: project, author: developer) }
+
+ before_all do
+ project.add_developer(developer)
+ end
+
+ before do
+ sign_in(developer)
+
+ visit project_issues_incident_path(project, incident)
+ wait_for_requests
+ end
+
+ context 'when a developer+ displays the incident' do
+ it 'shows the incident' do
+ page.within('.issuable-details') do
+ expect(find('h2')).to have_content(incident.title)
+ end
+ end
+
+ it 'does not show design management' do
+ expect(page).not_to have_selector('.js-design-management')
+ end
+ end
+end
diff --git a/spec/features/invites_spec.rb b/spec/features/invites_spec.rb
index 3954de56eea..8a4c4cc1eca 100644
--- a/spec/features/invites_spec.rb
+++ b/spec/features/invites_spec.rb
@@ -81,10 +81,10 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
end
end
- context 'when inviting a user using their email address' do
+ context 'when inviting a user' do
let(:new_user) { build_stubbed(:user) }
let(:invite_email) { new_user.email }
- let(:group_invite) { create(:group_member, :invited, group: group, invite_email: invite_email) }
+ let(:group_invite) { create(:group_member, :invited, group: group, invite_email: invite_email, created_by: owner) }
let!(:project_invite) { create(:project_member, :invited, project: project, invite_email: invite_email) }
context 'when user has not signed in yet' do
@@ -210,30 +210,43 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
context 'when declining the invitation' do
let(:send_email_confirmation) { true }
- context 'when signed in' do
- before do
- sign_in(user)
- visit invite_path(group_invite.raw_invite_token)
+ context 'as an existing user' do
+ let(:group_invite) { create(:group_member, user: user, group: group, created_by: owner) }
+
+ context 'when signed in' do
+ before do
+ sign_in(user)
+ visit decline_invite_path(group_invite.raw_invite_token)
+ end
+
+ it 'declines application and redirects to dashboard' do
+ expect(current_path).to eq(dashboard_projects_path)
+ expect(page).to have_content('You have declined the invitation to join group Owned.')
+ expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
+ end
end
- it 'declines application and redirects to dashboard' do
- page.click_link 'Decline'
+ context 'when signed out' do
+ before do
+ visit decline_invite_path(group_invite.raw_invite_token)
+ end
- expect(current_path).to eq(dashboard_projects_path)
- expect(page).to have_content('You have declined the invitation to join group Owned.')
- expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
+ it 'declines application and redirects to sign in page' do
+ expect(current_path).to eq(new_user_session_path)
+ expect(page).to have_content('You have declined the invitation to join group Owned.')
+ expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
+ end
end
end
- context 'when signed out' do
+ context 'as a non-existing user' do
before do
visit decline_invite_path(group_invite.raw_invite_token)
end
- it 'declines application and redirects to sign in page' do
- expect(current_path).to eq(new_user_session_path)
-
- expect(page).to have_content('You have declined the invitation to join group Owned.')
+ it 'declines application and shows a decline page' do
+ expect(current_path).to eq(decline_invite_path(group_invite.raw_invite_token))
+ expect(page).to have_content('You successfully declined the invitation')
expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
end
end
diff --git a/spec/features/issuables/close_reopen_report_toggle_spec.rb b/spec/features/issuables/close_reopen_report_toggle_spec.rb
index 5ea89a7984f..6e99cfb3293 100644
--- a/spec/features/issuables/close_reopen_report_toggle_spec.rb
+++ b/spec/features/issuables/close_reopen_report_toggle_spec.rb
@@ -23,7 +23,15 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle' do
expect(container).to have_content("Close #{human_model_name}")
expect(container).to have_content('Report abuse')
expect(container).to have_content("Report #{human_model_name.pluralize} that are abusive, inappropriate or spam.")
- expect(container).to have_selector('.close-item.droplab-item-selected')
+
+ if issuable.is_a?(MergeRequest)
+ page.within('.js-issuable-close-dropdown') do
+ expect(page).to have_link('Close merge request')
+ end
+ else
+ expect(container).to have_selector('.close-item.droplab-item-selected')
+ end
+
expect(container).to have_selector('.report-item')
expect(container).not_to have_selector('.report-item.droplab-item-selected')
expect(container).not_to have_selector('.reopen-item')
@@ -123,7 +131,7 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle' do
it 'shows only the `Edit` button' do
expect(page).to have_link('Edit')
- expect(page).not_to have_link('Report abuse')
+ expect(page).to have_link('Report abuse')
expect(page).not_to have_button('Close merge request')
expect(page).not_to have_button('Reopen merge request')
end
diff --git a/spec/features/issuables/markdown_references/internal_references_spec.rb b/spec/features/issuables/markdown_references/internal_references_spec.rb
index aceaea8d2ed..07d4271eed7 100644
--- a/spec/features/issuables/markdown_references/internal_references_spec.rb
+++ b/spec/features/issuables/markdown_references/internal_references_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe "Internal references", :js do
add_note("##{public_project_issue.to_reference(private_project)}")
end
- context "when user doesn't have access to private project" do
+ context "when user doesn't have access to private project", quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/257832' do
before do
sign_in(public_project_user)
@@ -52,7 +52,7 @@ RSpec.describe "Internal references", :js do
visit(project_issue_path(public_project, public_project_issue))
end
- it "doesn't show any references" do
+ it "doesn't show any references", quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/257832' do
page.within(".issue-details") do
expect(page).not_to have_content("#merge-requests .merge-requests-title")
end
@@ -94,7 +94,7 @@ RSpec.describe "Internal references", :js do
add_note("##{public_project_merge_request.to_reference(private_project)}")
end
- context "when user doesn't have access to private project" do
+ context "when user doesn't have access to private project", quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/257832' do
before do
sign_in(public_project_user)
@@ -121,7 +121,7 @@ RSpec.describe "Internal references", :js do
visit(project_merge_request_path(public_project, public_project_merge_request))
end
- it "doesn't show any references" do
+ it "doesn't show any references", quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/257832' do
page.within(".merge-request-details") do
expect(page).not_to have_content("#merge-requests .merge-requests-title")
end
diff --git a/spec/features/issuables/merge_request_discussion_lock_spec.rb b/spec/features/issuables/merge_request_discussion_lock_spec.rb
new file mode 100644
index 00000000000..4e0265839f6
--- /dev/null
+++ b/spec/features/issuables/merge_request_discussion_lock_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+# TODO use shared examples to merge this spec with discussion_lock_spec.rb
+# https://gitlab.com/gitlab-org/gitlab/-/issues/255910
+
+require 'spec_helper'
+
+RSpec.describe 'Merge Request Discussion Lock', :js do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public, :repository) }
+ let(:merge_request) { create(:merge_request, source_project: project, author: user) }
+
+ before do
+ sign_in(user)
+ end
+
+ context 'when a user is a team member' do
+ before do
+ project.add_developer(user)
+ end
+
+ context 'when the discussion is unlocked' do
+ it 'the user can lock the merge_request' do
+ visit project_merge_request_path(merge_request.project, merge_request)
+
+ expect(find('.issuable-sidebar')).to have_content('Unlocked')
+
+ page.within('.issuable-sidebar') do
+ find('.lock-edit').click
+ click_button('Lock')
+ end
+
+ expect(find('[data-testid="lock-status"]')).to have_content('Locked')
+ end
+ end
+
+ context 'when the discussion is locked' do
+ before do
+ merge_request.update_attribute(:discussion_locked, true)
+ visit project_merge_request_path(merge_request.project, merge_request)
+ end
+
+ it 'the user can unlock the merge_request' do
+ expect(find('.issuable-sidebar')).to have_content('Locked')
+
+ page.within('.issuable-sidebar') do
+ find('.lock-edit').click
+ click_button('Unlock')
+ end
+
+ expect(find('[data-testid="lock-status"]')).to have_content('Unlocked')
+ end
+ end
+ end
+
+ context 'when a user is not a team member' do
+ context 'when the discussion is unlocked' do
+ before do
+ visit project_merge_request_path(merge_request.project, merge_request)
+ end
+
+ it 'the user can not lock the merge_request' do
+ expect(find('.issuable-sidebar')).to have_content('Unlocked')
+ expect(find('.issuable-sidebar')).not_to have_selector('.lock-edit')
+ end
+ end
+
+ context 'when the discussion is locked' do
+ before do
+ merge_request.update_attribute(:discussion_locked, true)
+ visit project_merge_request_path(merge_request.project, merge_request)
+ end
+
+ it 'the user can not unlock the merge_request' do
+ expect(find('.issuable-sidebar')).to have_content('Locked')
+ expect(find('.issuable-sidebar')).not_to have_selector('.lock-edit')
+ end
+ end
+ end
+end
diff --git a/spec/features/issues/gfm_autocomplete_spec.rb b/spec/features/issues/gfm_autocomplete_spec.rb
index 0165fba9ace..ff78b9e608f 100644
--- a/spec/features/issues/gfm_autocomplete_spec.rb
+++ b/spec/features/issues/gfm_autocomplete_spec.rb
@@ -6,7 +6,9 @@ RSpec.describe 'GFM autocomplete', :js do
let_it_be(:user_xss_title) { 'eve <img src=x onerror=alert(2)&lt;img src=x onerror=alert(1)&gt;' }
let_it_be(:user_xss) { create(:user, name: user_xss_title, username: 'xss.user') }
let_it_be(:user) { create(:user, name: '💃speciąl someone💃', username: 'someone.special') }
- let_it_be(:project) { create(:project) }
+ let_it_be(:group) { create(:group, name: 'Ancestor') }
+ let_it_be(:child_group) { create(:group, parent: group, name: 'My group') }
+ let_it_be(:project) { create(:project, group: child_group) }
let_it_be(:label) { create(:label, project: project, title: 'special+') }
let(:issue) { create(:issue, project: project) }
@@ -530,7 +532,7 @@ RSpec.describe 'GFM autocomplete', :js do
expect(page).to have_selector('.tribute-container', visible: true)
- expect(find('.tribute-container ul', visible: true).text).to have_content(user_xss.username)
+ expect(find('.tribute-container ul', visible: true)).to have_text(user_xss.username)
end
it 'selects the first item for assignee dropdowns' do
@@ -558,6 +560,24 @@ RSpec.describe 'GFM autocomplete', :js do
expect(find('.tribute-container ul', visible: true)).to have_content(user.name)
end
+ context 'when autocompleting for groups' do
+ it 'shows the group when searching for the name of the group' do
+ page.within '.timeline-content-form' do
+ find('#note-body').native.send_keys('@mygroup')
+ end
+
+ expect(find('.tribute-container ul', visible: true)).to have_text('My group')
+ end
+
+ it 'does not show the group when searching for the name of the parent of the group' do
+ page.within '.timeline-content-form' do
+ find('#note-body').native.send_keys('@ancestor')
+ end
+
+ expect(find('.tribute-container ul', visible: true)).not_to have_text('My group')
+ end
+ end
+
context 'if a selected value has special characters' do
it 'wraps the result in double quotes' do
note = find('#note-body')
diff --git a/spec/features/issues/user_sees_live_update_spec.rb b/spec/features/issues/user_sees_live_update_spec.rb
index c9b751715bc..d27cdb774a5 100644
--- a/spec/features/issues/user_sees_live_update_spec.rb
+++ b/spec/features/issues/user_sees_live_update_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe 'Issues > User sees live update', :js do
expect(page).to have_css('.sidebar-item-warning-message')
within('.sidebar-item-warning-message') do
- find('.btn-close').click
+ find('[data-testid="confidential-toggle"]').click
end
wait_for_requests
diff --git a/spec/features/issues/user_views_issue_spec.rb b/spec/features/issues/user_views_issue_spec.rb
index 3f18764aa58..9b1c8be1513 100644
--- a/spec/features/issues/user_views_issue_spec.rb
+++ b/spec/features/issues/user_views_issue_spec.rb
@@ -5,7 +5,7 @@ require "spec_helper"
RSpec.describe "User views issue" do
let_it_be(:project) { create(:project_empty_repo, :public) }
let_it_be(:user) { create(:user) }
- let_it_be(:issue) { create(:issue, project: project, description: "# Description header", author: user) }
+ let_it_be(:issue) { create(:issue, project: project, description: "# Description header\n\n**Lorem** _ipsum_ dolor sit [amet](https://example.com)", author: user) }
let_it_be(:note) { create(:note, noteable: issue, project: project, author: user) }
before_all do
@@ -20,6 +20,8 @@ RSpec.describe "User views issue" do
it { expect(page).to have_header_with_correct_id_and_link(1, "Description header", "description-header") }
+ it_behaves_like 'page meta description', ' Description header Lorem ipsum dolor sit amet'
+
it 'shows the merge request and issue actions', :aggregate_failures do
expect(page).to have_link('New issue')
expect(page).to have_button('Create merge request')
diff --git a/spec/features/merge_request/batch_comments_spec.rb b/spec/features/merge_request/batch_comments_spec.rb
index 40f6482c948..c8fc23bebf9 100644
--- a/spec/features/merge_request/batch_comments_spec.rb
+++ b/spec/features/merge_request/batch_comments_spec.rb
@@ -41,7 +41,6 @@ RSpec.describe 'Merge request > Batch comments', :js do
write_comment
page.within('.review-bar-content') do
- click_button 'Finish review'
click_button 'Submit review'
end
@@ -64,18 +63,6 @@ RSpec.describe 'Merge request > Batch comments', :js do
expect(page).to have_selector('.note:not(.draft-note)', text: 'Line is wrong')
end
- it 'discards review' do
- write_comment
-
- click_button 'Discard review'
-
- click_button 'Delete all pending comments'
-
- wait_for_requests
-
- expect(page).not_to have_selector('.draft-note-component')
- end
-
it 'deletes draft note' do
write_comment
@@ -149,7 +136,6 @@ RSpec.describe 'Merge request > Batch comments', :js do
write_reply_to_discussion(resolve: true)
page.within('.review-bar-content') do
- click_button 'Finish review'
click_button 'Submit review'
end
@@ -192,7 +178,6 @@ RSpec.describe 'Merge request > Batch comments', :js do
write_reply_to_discussion(button_text: 'Start a review', unresolve: true)
page.within('.review-bar-content') do
- click_button 'Finish review'
click_button 'Submit review'
end
diff --git a/spec/features/merge_request/maintainer_edits_fork_spec.rb b/spec/features/merge_request/maintainer_edits_fork_spec.rb
index 0e65cb358da..a98bfd1c8a4 100644
--- a/spec/features/merge_request/maintainer_edits_fork_spec.rb
+++ b/spec/features/merge_request/maintainer_edits_fork_spec.rb
@@ -26,7 +26,12 @@ RSpec.describe 'a maintainer edits files on a source-branch of an MR from a fork
visit project_merge_request_path(target_project, merge_request)
click_link 'Changes'
wait_for_requests
- first('.js-file-title').find('.js-edit-blob').click
+
+ page.within(first('.js-file-title')) do
+ find('.js-diff-more-actions').click
+ find('.js-edit-blob').click
+ end
+
wait_for_requests
end
diff --git a/spec/features/merge_request/user_comments_on_diff_spec.rb b/spec/features/merge_request/user_comments_on_diff_spec.rb
index 3a199951b56..ad1ad067935 100644
--- a/spec/features/merge_request/user_comments_on_diff_spec.rb
+++ b/spec/features/merge_request/user_comments_on_diff_spec.rb
@@ -34,7 +34,8 @@ RSpec.describe 'User comments on a diff', :js do
page.within('.diff-files-holder > div:nth-child(3)') do
expect(page).to have_content('Line is wrong')
- find('.js-btn-vue-toggle-comments').click
+ find('.js-diff-more-actions').click
+ click_button 'Hide comments on this file'
expect(page).not_to have_content('Line is wrong')
end
@@ -67,7 +68,8 @@ RSpec.describe 'User comments on a diff', :js do
# Hide the comment.
page.within('.diff-files-holder > div:nth-child(3)') do
- find('.js-btn-vue-toggle-comments').click
+ find('.js-diff-more-actions').click
+ click_button 'Hide comments on this file'
expect(page).not_to have_content('Line is wrong')
end
@@ -80,7 +82,8 @@ RSpec.describe 'User comments on a diff', :js do
# Show the comment.
page.within('.diff-files-holder > div:nth-child(3)') do
- find('.js-btn-vue-toggle-comments').click
+ find('.js-diff-more-actions').click
+ click_button 'Show comments on this file'
end
# Now both the comments should be shown.
diff --git a/spec/features/merge_request/user_edits_mr_spec.rb b/spec/features/merge_request/user_edits_mr_spec.rb
index 397ca70f4a1..817b4e0b48e 100644
--- a/spec/features/merge_request/user_edits_mr_spec.rb
+++ b/spec/features/merge_request/user_edits_mr_spec.rb
@@ -21,24 +21,6 @@ RSpec.describe 'Merge request > User edits MR' do
it_behaves_like 'an editable merge request'
end
- context 'when merge_request_reviewers is turned on' do
- before do
- stub_feature_flags(merge_request_reviewers: true)
- end
-
- context 'non-fork merge request' do
- include_context 'merge request edit context'
- it_behaves_like 'an editable merge request with reviewers'
- end
-
- context 'for a forked project' do
- let(:source_project) { fork_project(target_project, nil, repository: true) }
-
- include_context 'merge request edit context'
- it_behaves_like 'an editable merge request with reviewers'
- end
- end
-
context 'when merge_request_reviewers is turned off' do
before do
stub_feature_flags(merge_request_reviewers: false)
diff --git a/spec/features/merge_request/user_expands_diff_spec.rb b/spec/features/merge_request/user_expands_diff_spec.rb
index 0e39cce13a1..0cdc87de761 100644
--- a/spec/features/merge_request/user_expands_diff_spec.rb
+++ b/spec/features/merge_request/user_expands_diff_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'User expands diff', :js do
let(:merge_request) { create(:merge_request, source_branch: 'expand-collapse-files', source_project: project, target_project: project) }
before do
+ stub_feature_flags(increased_diff_limits: false)
visit(diffs_project_merge_request_path(project, merge_request))
wait_for_requests
@@ -14,11 +15,11 @@ RSpec.describe 'User expands diff', :js do
it 'allows user to expand diff' do
page.within find('[id="19763941ab80e8c09871c0a425f0560d9053bcb3"]') do
- find('[data-testid="expandButton"]').click
+ click_link 'Click to expand it.'
wait_for_requests
- expect(page).not_to have_content('Expand File')
+ expect(page).not_to have_content('Click to expand it.')
expect(page).to have_selector('.code')
end
end
diff --git a/spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb b/spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb
new file mode 100644
index 00000000000..f5bca7cf015
--- /dev/null
+++ b/spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Merge request > User marks merge request as draft', :js do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public, :repository) }
+ let(:merge_request) { create(:merge_request, source_project: project) }
+
+ before do
+ project.add_maintainer(user)
+
+ sign_in(user)
+
+ visit project_merge_request_path(project, merge_request)
+ end
+
+ it 'toggles draft status' do
+ click_link 'Mark as draft'
+
+ expect(page).to have_content("Draft: #{merge_request.title}")
+
+ page.within('.detail-page-header-actions') do
+ click_link 'Mark as ready'
+ end
+
+ expect(page).to have_content(merge_request.title)
+ end
+end
diff --git a/spec/features/merge_request/user_reopens_merge_request_spec.rb b/spec/features/merge_request/user_reopens_merge_request_spec.rb
index 7866ece84ac..4a05a3be59a 100644
--- a/spec/features/merge_request/user_reopens_merge_request_spec.rb
+++ b/spec/features/merge_request/user_reopens_merge_request_spec.rb
@@ -15,7 +15,11 @@ RSpec.describe 'User reopens a merge requests', :js do
end
it 'reopens a merge request' do
- click_button('Reopen merge request', match: :first)
+ find('.js-issuable-close-dropdown .dropdown-toggle').click
+
+ click_link('Reopen merge request', match: :first)
+
+ wait_for_requests
page.within('.status-box') do
expect(page).to have_content('Open')
diff --git a/spec/features/merge_request/user_resolves_wip_mr_spec.rb b/spec/features/merge_request/user_resolves_wip_mr_spec.rb
index a9d4c4df507..b67167252e1 100644
--- a/spec/features/merge_request/user_resolves_wip_mr_spec.rb
+++ b/spec/features/merge_request/user_resolves_wip_mr_spec.rb
@@ -35,7 +35,9 @@ RSpec.describe 'Merge request > User resolves Work in Progress', :js do
expect(page.find('.ci-widget-content')).to have_content("Pipeline ##{pipeline.id}")
expect(page).to have_content "This merge request is still a work in progress."
- click_button('Mark as ready')
+ page.within('.mr-state-widget') do
+ click_button('Mark as ready')
+ end
wait_for_requests
diff --git a/spec/features/merge_request/user_sees_diff_spec.rb b/spec/features/merge_request/user_sees_diff_spec.rb
index 7a3a14e61e3..a7713ed9964 100644
--- a/spec/features/merge_request/user_sees_diff_spec.rb
+++ b/spec/features/merge_request/user_sees_diff_spec.rb
@@ -63,7 +63,7 @@ RSpec.describe 'Merge request > User sees diff', :js do
visit diffs_project_merge_request_path(project, merge_request)
# Throws `Capybara::Poltergeist::InvalidSelector` if we try to use `#hash` syntax
- expect(page).to have_selector("[id=\"#{changelog_id}\"] a.js-edit-blob")
+ expect(page).to have_selector("[id=\"#{changelog_id}\"] .js-edit-blob", visible: false)
end
end
@@ -73,6 +73,7 @@ RSpec.describe 'Merge request > User sees diff', :js do
visit diffs_project_merge_request_path(project, merge_request)
# Throws `Capybara::Poltergeist::InvalidSelector` if we try to use `#hash` syntax
+ find("[id=\"#{changelog_id}\"] .js-diff-more-actions").click
find("[id=\"#{changelog_id}\"] .js-edit-blob").click
expect(page).to have_selector('.js-fork-suggestion-button', count: 1)
diff --git a/spec/features/merge_request/user_sees_page_metadata_spec.rb b/spec/features/merge_request/user_sees_page_metadata_spec.rb
new file mode 100644
index 00000000000..7b3e07152a0
--- /dev/null
+++ b/spec/features/merge_request/user_sees_page_metadata_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Merge request > User sees page metadata' do
+ let(:merge_request) { create(:merge_request, description: '**Lorem** _ipsum_ dolor sit [amet](https://example.com)') }
+ let(:project) { merge_request.target_project }
+ let(:user) { project.creator }
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ visit project_merge_request_path(project, merge_request)
+ end
+
+ it_behaves_like 'page meta description', 'Lorem ipsum dolor sit amet'
+end
diff --git a/spec/features/merge_request/user_sees_pipelines_spec.rb b/spec/features/merge_request/user_sees_pipelines_spec.rb
index 8e15ba6cf8d..107fc002ebd 100644
--- a/spec/features/merge_request/user_sees_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_pipelines_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
wait_for_requests
- expect(page.find('.js-run-mr-pipeline')).to have_text('Run Pipeline')
+ expect(page.find('[data-testid="run_pipeline_button"]')).to have_text('Run Pipeline')
end
end
@@ -66,7 +66,7 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
wait_for_requests
- expect(page.find('.js-run-mr-pipeline')).to have_text('Run Pipeline')
+ expect(page.find('[data-testid="run_pipeline_button"]')).to have_text('Run Pipeline')
end
end
end
diff --git a/spec/features/merge_request/user_sees_suggest_pipeline_spec.rb b/spec/features/merge_request/user_sees_suggest_pipeline_spec.rb
new file mode 100644
index 00000000000..93807512d9c
--- /dev/null
+++ b/spec/features/merge_request/user_sees_suggest_pipeline_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Merge request > User sees suggest pipeline', :js do
+ let(:merge_request) { create(:merge_request) }
+ let(:project) { merge_request.source_project }
+ let(:user) { project.creator }
+
+ before do
+ stub_application_setting(auto_devops_enabled: false)
+ stub_experiment(suggest_pipeline: true)
+ stub_experiment_for_user(suggest_pipeline: true)
+ project.add_maintainer(user)
+ sign_in(user)
+ visit project_merge_request_path(project, merge_request)
+ end
+
+ it 'shows the suggest pipeline widget and then allows dismissal correctly' do
+ expect(page).to have_content('Are you adding technical debt or code vulnerabilities?')
+
+ page.within '.mr-pipeline-suggest' do
+ find('[data-testid="close"]').click
+ end
+
+ wait_for_requests
+
+ expect(page).not_to have_content('Are you adding technical debt or code vulnerabilities?')
+
+ # Reload so we know the user callout was registered
+ visit page.current_url
+
+ expect(page).not_to have_content('Are you adding technical debt or code vulnerabilities?')
+ end
+end
diff --git a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
index 39495832547..abdbd3a6f8a 100644
--- a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
+++ b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
@@ -119,7 +119,8 @@ RSpec.describe 'User comments on a diff', :js do
it 'can add and remove suggestions from a batch' do
files.each_with_index do |file, index|
page.within("[id='#{file[:hash]}']") do
- find("button[title='Show full file']").click
+ find('.js-diff-more-actions').click
+ click_button 'Show full file'
wait_for_requests
click_diff_line(find("[id='#{file[:line_code]}']"))
@@ -130,7 +131,9 @@ RSpec.describe 'User comments on a diff', :js do
wait_for_requests
end
end
+ end
+ files.each_with_index do |file, index|
page.within("[id='#{file[:hash]}']") do
expect(page).not_to have_content('Applied')
diff --git a/spec/features/merge_request/user_views_open_merge_request_spec.rb b/spec/features/merge_request/user_views_open_merge_request_spec.rb
index 448844ae57d..e8998f9457a 100644
--- a/spec/features/merge_request/user_views_open_merge_request_spec.rb
+++ b/spec/features/merge_request/user_views_open_merge_request_spec.rb
@@ -22,7 +22,24 @@ RSpec.describe 'User views an open merge request' do
# returns the whole document, not the node's actual parent element
expect(find(:xpath, "#{node.path}/..").text).to eq(merge_request.description[2..-1])
- expect(page).to have_content(merge_request.title).and have_content(merge_request.description)
+ expect(page).to have_content(merge_request.title)
+ end
+
+ it 'has reviewers in sidebar' do
+ expect(page).to have_css('.reviewer')
+ end
+ end
+
+ context 'when merge_request_reviewers is turned off' do
+ let(:project) { create(:project, :public, :repository) }
+
+ before do
+ stub_feature_flags(merge_request_reviewers: false)
+ visit(merge_request_path(merge_request))
+ end
+
+ it 'has reviewers in sidebar' do
+ expect(page).not_to have_css('.reviewer')
end
end
diff --git a/spec/features/milestones/user_views_milestone_spec.rb b/spec/features/milestones/user_views_milestone_spec.rb
index 420f8d49483..9c19f842427 100644
--- a/spec/features/milestones/user_views_milestone_spec.rb
+++ b/spec/features/milestones/user_views_milestone_spec.rb
@@ -4,15 +4,27 @@ require 'spec_helper'
RSpec.describe "User views milestone" do
let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
- let_it_be(:milestone) { create(:milestone, project: project) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :repository, group: group) }
+ let_it_be(:milestone) { create(:milestone, project: project, description: '**Lorem** _ipsum_ dolor sit [amet](https://example.com)') }
let_it_be(:labels) { create_list(:label, 2, project: project) }
- before do
+ before_all do
project.add_developer(user)
+ end
+
+ before do
sign_in(user)
end
+ context 'page description' do
+ before do
+ visit(project_milestone_path(project, milestone))
+ end
+
+ it_behaves_like 'page meta description', 'Lorem ipsum dolor sit amet'
+ end
+
it "avoids N+1 database queries" do
issue_params = { project: project, assignees: [user], author: user, milestone: milestone, labels: labels }.freeze
@@ -25,7 +37,7 @@ RSpec.describe "User views milestone" do
expect { visit_milestone }.not_to exceed_query_limit(control)
end
- context 'limiting milestone issues' do
+ context 'issues list', :js do
before_all do
2.times do
create(:issue, milestone: milestone, project: project)
@@ -34,6 +46,28 @@ RSpec.describe "User views milestone" do
end
end
+ context 'for a project milestone' do
+ it 'does not show the project name' do
+ visit(project_milestone_path(project, milestone))
+
+ wait_for_requests
+
+ expect(page.find('#tab-issues')).not_to have_text(project.name)
+ end
+ end
+
+ context 'for a group milestone' do
+ let(:group_milestone) { create(:milestone, group: group) }
+
+ it 'shows the project name' do
+ create(:issue, project: project, milestone: group_milestone)
+
+ visit(group_milestone_path(group, group_milestone))
+
+ expect(page.find('#tab-issues')).to have_text(project.name)
+ end
+ end
+
context 'when issues on milestone are over DISPLAY_ISSUES_LIMIT' do
it "limits issues to display and shows warning" do
stub_const('Milestoneish::DISPLAY_ISSUES_LIMIT', 3)
@@ -56,6 +90,40 @@ RSpec.describe "User views milestone" do
end
end
+ context 'merge requests list', :js do
+ context 'for a project milestone' do
+ it 'does not show the project name' do
+ create(:merge_request, source_project: project, milestone: milestone)
+
+ visit(project_milestone_path(project, milestone))
+
+ within('.js-milestone-tabs') do
+ click_link('Merge Requests')
+ end
+
+ wait_for_requests
+
+ expect(page.find('#tab-merge-requests')).not_to have_text(project.name)
+ end
+ end
+
+ context 'for a group milestone' do
+ let(:group_milestone) { create(:milestone, group: group) }
+
+ it 'shows the project name' do
+ create(:merge_request, source_project: project, milestone: group_milestone)
+
+ visit(group_milestone_path(group, group_milestone))
+
+ within('.js-milestone-tabs') do
+ click_link('Merge Requests')
+ end
+
+ expect(page.find('#tab-merge-requests')).to have_text(project.name)
+ end
+ end
+ end
+
private
def visit_milestone
diff --git a/spec/features/milestones/user_views_milestones_spec.rb b/spec/features/milestones/user_views_milestones_spec.rb
index 3f606577121..f8b4b802a60 100644
--- a/spec/features/milestones/user_views_milestones_spec.rb
+++ b/spec/features/milestones/user_views_milestones_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe "User views milestones" do
.and have_content("Merge Requests")
end
- context "with issues" do
+ context "with issues", :js do
let_it_be(:issue) { create(:issue, project: project, milestone: milestone) }
let_it_be(:closed_issue) { create(:closed_issue, project: project, milestone: milestone) }
@@ -33,7 +33,6 @@ RSpec.describe "User views milestones" do
.and have_selector("#tab-issues li.issuable-row", count: 2)
.and have_content(issue.title)
.and have_content(closed_issue.title)
- .and have_selector("#tab-merge-requests")
end
end
diff --git a/spec/features/operations_sidebar_link_spec.rb b/spec/features/operations_sidebar_link_spec.rb
new file mode 100644
index 00000000000..32e2833dafb
--- /dev/null
+++ b/spec/features/operations_sidebar_link_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Operations dropdown sidebar' do
+ let_it_be(:project) { create(:project, :repository) }
+ let(:user) { create(:user) }
+
+ before do
+ project.add_role(user, role)
+ sign_in(user)
+ visit project_issues_path(project)
+ end
+
+ context 'user has guest role' do
+ let(:role) { :guest }
+
+ it 'has the correct `Operations` menu items' do
+ expect(page).to have_link(title: 'Incidents', href: project_incidents_path(project))
+
+ expect(page).not_to have_link(title: 'Metrics', href: project_metrics_dashboard_path(project))
+ expect(page).not_to have_link(title: 'Alerts', href: project_alert_management_index_path(project))
+ expect(page).not_to have_link(title: 'Environments', href: project_environments_path(project))
+ expect(page).not_to have_link(title: 'Error Tracking', href: project_error_tracking_index_path(project))
+ expect(page).not_to have_link(title: 'Product Analytics', href: project_product_analytics_path(project))
+ expect(page).not_to have_link(title: 'Serverless', href: project_serverless_functions_path(project))
+ expect(page).not_to have_link(title: 'Logs', href: project_logs_path(project))
+ expect(page).not_to have_link(title: 'Kubernetes', href: project_clusters_path(project))
+ end
+ end
+
+ context 'user has reporter role' do
+ let(:role) { :reporter }
+
+ it 'has the correct `Operations` menu items' do
+ expect(page).to have_link(title: 'Metrics', href: project_metrics_dashboard_path(project))
+ expect(page).to have_link(title: 'Incidents', href: project_incidents_path(project))
+ expect(page).to have_link(title: 'Environments', href: project_environments_path(project))
+ expect(page).to have_link(title: 'Error Tracking', href: project_error_tracking_index_path(project))
+ expect(page).to have_link(title: 'Product Analytics', href: project_product_analytics_path(project))
+
+ expect(page).not_to have_link(title: 'Alerts', href: project_alert_management_index_path(project))
+ expect(page).not_to have_link(title: 'Serverless', href: project_serverless_functions_path(project))
+ expect(page).not_to have_link(title: 'Logs', href: project_logs_path(project))
+ expect(page).not_to have_link(title: 'Kubernetes', href: project_clusters_path(project))
+ end
+ end
+
+ context 'user has developer role' do
+ let(:role) { :developer }
+
+ it 'has the correct `Operations` menu items' do
+ expect(page).to have_link(title: 'Metrics', href: project_metrics_dashboard_path(project))
+ expect(page).to have_link(title: 'Alerts', href: project_alert_management_index_path(project))
+ expect(page).to have_link(title: 'Incidents', href: project_incidents_path(project))
+ expect(page).to have_link(title: 'Environments', href: project_environments_path(project))
+ expect(page).to have_link(title: 'Error Tracking', href: project_error_tracking_index_path(project))
+ expect(page).to have_link(title: 'Product Analytics', href: project_product_analytics_path(project))
+ expect(page).to have_link(title: 'Logs', href: project_logs_path(project))
+
+ expect(page).not_to have_link(title: 'Serverless', href: project_serverless_functions_path(project))
+ expect(page).not_to have_link(title: 'Kubernetes', href: project_clusters_path(project))
+ end
+ end
+
+ context 'user has maintainer role' do
+ let(:role) { :maintainer }
+
+ it 'has the correct `Operations` menu items' do
+ expect(page).to have_link(title: 'Metrics', href: project_metrics_dashboard_path(project))
+ expect(page).to have_link(title: 'Alerts', href: project_alert_management_index_path(project))
+ expect(page).to have_link(title: 'Incidents', href: project_incidents_path(project))
+ expect(page).to have_link(title: 'Environments', href: project_environments_path(project))
+ expect(page).to have_link(title: 'Error Tracking', href: project_error_tracking_index_path(project))
+ expect(page).to have_link(title: 'Product Analytics', href: project_product_analytics_path(project))
+ expect(page).to have_link(title: 'Serverless', href: project_serverless_functions_path(project))
+ expect(page).to have_link(title: 'Logs', href: project_logs_path(project))
+ expect(page).to have_link(title: 'Kubernetes', href: project_clusters_path(project))
+ end
+ end
+end
diff --git a/spec/features/profiles/keys_spec.rb b/spec/features/profiles/keys_spec.rb
index b5e784a749f..23bbe9c1587 100644
--- a/spec/features/profiles/keys_spec.rb
+++ b/spec/features/profiles/keys_spec.rb
@@ -71,21 +71,35 @@ RSpec.describe 'Profile > SSH Keys' do
expect(page).to have_content(key.title)
end
- it 'User removes a key via the key index' do
- create(:key, user: user)
- visit profile_keys_path
+ describe 'User removes a key', :js do
+ shared_examples 'removes key' do
+ it 'removes key' do
+ visit path
+ click_button('Delete')
- click_link('Remove')
+ page.within('.modal') do
+ page.click_button('Delete')
+ end
- expect(page).to have_content('Your SSH keys (0)')
- end
+ expect(page).to have_content('Your SSH keys (0)')
+ end
+ end
- it 'User removes a key via its details page' do
- key = create(:key, user: user)
- visit profile_key_path(key)
+ context 'via the key index' do
+ before do
+ create(:key, user: user)
+ end
+
+ let(:path) { profile_keys_path }
- click_link('Remove')
+ it_behaves_like 'removes key'
+ end
- expect(page).to have_content('Your SSH keys (0)')
+ context 'via its details page' do
+ let(:key) { create(:key, user: user) }
+ let(:path) { profile_keys_path(key) }
+
+ it_behaves_like 'removes key'
+ end
end
end
diff --git a/spec/features/projects/activity/user_sees_design_comment_spec.rb b/spec/features/projects/activity/user_sees_design_comment_spec.rb
index e60deba65f0..3a8e2790858 100644
--- a/spec/features/projects/activity/user_sees_design_comment_spec.rb
+++ b/spec/features/projects/activity/user_sees_design_comment_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'Projects > Activity > User sees design comment', :js do
let_it_be(:design) { create(:design, issue: issue) }
let(:design_activity) do
- "#{commenter.name} #{commenter.to_reference} commented on design"
+ "#{commenter.name} #{commenter.to_reference} commented on design #{design.to_reference}"
end
let(:issue_activity) do
diff --git a/spec/features/projects/blobs/edit_spec.rb b/spec/features/projects/blobs/edit_spec.rb
index 5aca994f53e..90d9263b515 100644
--- a/spec/features/projects/blobs/edit_spec.rb
+++ b/spec/features/projects/blobs/edit_spec.rb
@@ -20,9 +20,14 @@ RSpec.describe 'Editing file blob', :js do
sign_in(user)
end
- def edit_and_commit(commit_changes: true)
+ def edit_and_commit(commit_changes: true, is_diff: false)
wait_for_requests
- find('.js-edit-blob').click
+
+ if is_diff
+ first('.js-diff-more-actions').click
+ end
+
+ first('.js-edit-blob').click
fill_editor(content: 'class NextFeature\\nend\\n')
if commit_changes
@@ -38,7 +43,7 @@ RSpec.describe 'Editing file blob', :js do
context 'from MR diff' do
before do
visit diffs_project_merge_request_path(project, merge_request)
- edit_and_commit
+ edit_and_commit(is_diff: true)
end
it 'returns me to the mr' do
diff --git a/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb b/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
index 8b43687c71c..023e00a3e02 100644
--- a/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
+++ b/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'User follows pipeline suggest nudge spec when feature is enabled
describe 'viewing the new blob page' do
before do
- stub_feature_flags(suggest_pipeline: true)
+ stub_experiment_for_user(suggest_pipeline: true)
sign_in(user)
end
diff --git a/spec/features/projects/branches_spec.rb b/spec/features/projects/branches_spec.rb
index 0e2444c5434..4224fdbc1fc 100644
--- a/spec/features/projects/branches_spec.rb
+++ b/spec/features/projects/branches_spec.rb
@@ -21,11 +21,11 @@ RSpec.describe 'Branches' do
before do
# Add 4 stale branches
(1..4).reverse_each do |i|
- Timecop.freeze((threshold + i).ago) { create_file(message: "a commit in stale-#{i}", branch_name: "stale-#{i}") }
+ travel_to((threshold + i).ago) { create_file(message: "a commit in stale-#{i}", branch_name: "stale-#{i}") }
end
# Add 6 active branches
(1..6).each do |i|
- Timecop.freeze((threshold - i).ago) { create_file(message: "a commit in active-#{i}", branch_name: "active-#{i}") }
+ travel_to((threshold - i).ago) { create_file(message: "a commit in active-#{i}", branch_name: "active-#{i}") }
end
end
diff --git a/spec/features/projects/clusters/eks_spec.rb b/spec/features/projects/clusters/eks_spec.rb
index c5feef6c6f3..9f3f331cfab 100644
--- a/spec/features/projects/clusters/eks_spec.rb
+++ b/spec/features/projects/clusters/eks_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe 'AWS EKS Cluster', :js do
before do
visit project_clusters_path(project)
- click_link 'Add Kubernetes cluster'
+ click_link 'Integrate with a cluster certificate'
end
context 'when user creates a cluster on AWS EKS' do
diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb
index 04339d20d77..a0519d88532 100644
--- a/spec/features/projects/clusters/gcp_spec.rb
+++ b/spec/features/projects/clusters/gcp_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
before do
visit project_clusters_path(project)
- click_link 'Add Kubernetes cluster'
+ click_link 'Integrate with a cluster certificate'
click_link 'Create new cluster'
click_link 'Google GKE'
end
@@ -143,7 +143,7 @@ RSpec.describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
before do
visit project_clusters_path(project)
- click_link 'Add Kubernetes cluster'
+ click_link 'Connect cluster with certificate'
click_link 'Connect existing cluster'
end
@@ -162,7 +162,7 @@ RSpec.describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
it 'user sees creation form with the successful message' do
expect(page).to have_content('Kubernetes cluster integration was successfully removed.')
- expect(page).to have_link('Add Kubernetes cluster')
+ expect(page).to have_link('Integrate with a cluster certificate')
end
end
end
@@ -178,7 +178,7 @@ RSpec.describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
end
it 'user sees offer on cluster create page' do
- click_link 'Add Kubernetes cluster'
+ click_link 'Integrate with a cluster certificate'
expect(page).to have_css('.gcp-signup-offer')
end
@@ -192,10 +192,10 @@ RSpec.describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
it 'user does not see offer after dismissing' do
expect(page).to have_css('.gcp-signup-offer')
- find('.gcp-signup-offer .close').click
+ find('.gcp-signup-offer .js-close').click
wait_for_requests
- click_link 'Add Kubernetes cluster'
+ click_link 'Integrate with a cluster certificate'
expect(page).not_to have_css('.gcp-signup-offer')
end
diff --git a/spec/features/projects/clusters/user_spec.rb b/spec/features/projects/clusters/user_spec.rb
index 9d0dc65093e..748eba558aa 100644
--- a/spec/features/projects/clusters/user_spec.rb
+++ b/spec/features/projects/clusters/user_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'User Cluster', :js do
before do
visit project_clusters_path(project)
- click_link 'Add Kubernetes cluster'
+ click_link 'Integrate with a cluster certificate'
click_link 'Connect existing cluster'
end
@@ -52,6 +52,10 @@ RSpec.describe 'User Cluster', :js do
it 'user sees RBAC is enabled by default' do
expect(page).to have_checked_field('RBAC-enabled cluster')
end
+
+ it 'user sees namespace per environment is enabled by default' do
+ expect(page).to have_checked_field('Namespace per environment')
+ end
end
context 'when user filled form with invalid parameters' do
@@ -112,7 +116,7 @@ RSpec.describe 'User Cluster', :js do
it 'user sees creation form with the successful message' do
expect(page).to have_content('Kubernetes cluster integration was successfully removed.')
- expect(page).to have_link('Add Kubernetes cluster')
+ expect(page).to have_link('Integrate with a cluster certificate')
end
end
end
diff --git a/spec/features/projects/clusters_spec.rb b/spec/features/projects/clusters_spec.rb
index d674fbc457e..6c6e65005f6 100644
--- a/spec/features/projects/clusters_spec.rb
+++ b/spec/features/projects/clusters_spec.rb
@@ -11,7 +11,6 @@ RSpec.describe 'Clusters', :js do
before do
project.add_maintainer(user)
gitlab_sign_in(user)
- stub_feature_flags(clusters_list_redesign: false)
end
context 'when user does not have a cluster and visits cluster index page' do
@@ -20,7 +19,7 @@ RSpec.describe 'Clusters', :js do
end
it 'sees empty state' do
- expect(page).to have_link('Add Kubernetes cluster')
+ expect(page).to have_link('Integrate with a cluster certificate')
expect(page).to have_selector('.empty-state')
end
end
@@ -42,7 +41,7 @@ RSpec.describe 'Clusters', :js do
context 'when user filled form with environment scope' do
before do
- click_link 'Add Kubernetes cluster'
+ click_link 'Connect cluster with certificate'
click_link 'Connect existing cluster'
fill_in 'cluster_name', with: 'staging-cluster'
fill_in 'cluster_environment_scope', with: 'staging/*'
@@ -71,7 +70,7 @@ RSpec.describe 'Clusters', :js do
context 'when user updates duplicated environment scope' do
before do
- click_link 'Add Kubernetes cluster'
+ click_link 'Connect cluster with certificate'
click_link 'Connect existing cluster'
fill_in 'cluster_name', with: 'staging-cluster'
fill_in 'cluster_environment_scope', with: '*'
@@ -117,7 +116,7 @@ RSpec.describe 'Clusters', :js do
context 'when user filled form with environment scope' do
before do
- click_link 'Add Kubernetes cluster'
+ click_link 'Connect cluster with certificate'
click_link 'Create new cluster'
click_link 'Google GKE'
@@ -162,7 +161,7 @@ RSpec.describe 'Clusters', :js do
context 'when user updates duplicated environment scope' do
before do
- click_link 'Add Kubernetes cluster'
+ click_link 'Connect cluster with certificate'
click_link 'Create new cluster'
click_link 'Google GKE'
@@ -196,8 +195,7 @@ RSpec.describe 'Clusters', :js do
end
it 'user sees a table with one cluster' do
- # One is the header row, the other the cluster row
- expect(page).to have_selector('.gl-responsive-table-row', count: 2)
+ expect(page).to have_selector('[data-testid="cluster_list_table"] tbody tr', count: 1)
end
context 'when user clicks on a cluster' do
@@ -216,7 +214,7 @@ RSpec.describe 'Clusters', :js do
before do
visit project_clusters_path(project)
- click_link 'Add Kubernetes cluster'
+ click_link 'Integrate with a cluster certificate'
click_link 'Create new cluster'
end
diff --git a/spec/features/projects/commit/user_comments_on_commit_spec.rb b/spec/features/projects/commit/user_comments_on_commit_spec.rb
index 87a022d74a3..0fa4975bb25 100644
--- a/spec/features/projects/commit/user_comments_on_commit_spec.rb
+++ b/spec/features/projects/commit/user_comments_on_commit_spec.rb
@@ -6,19 +6,22 @@ RSpec.describe "User comments on commit", :js do
include Spec::Support::Helpers::Features::NotesHelpers
include RepoHelpers
- let(:project) { create(:project, :repository) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
let(:comment_text) { "XML attached" }
- before do
- sign_in(user)
+ before_all do
project.add_developer(user)
+ end
- visit(project_commit_path(project, sample_commit.id))
+ before do
+ sign_in(user)
end
context "when adding new comment" do
it "adds comment" do
+ visit(project_commit_path(project, sample_commit.id))
+
emoji_code = ":+1:"
page.within(".js-main-target-form") do
@@ -57,6 +60,8 @@ RSpec.describe "User comments on commit", :js do
context "when editing comment" do
before do
+ visit(project_commit_path(project, sample_commit.id))
+
add_note(comment_text)
end
@@ -87,6 +92,8 @@ RSpec.describe "User comments on commit", :js do
context "when deleting comment" do
before do
+ visit(project_commit_path(project, sample_commit.id))
+
add_note(comment_text)
end
@@ -108,4 +115,35 @@ RSpec.describe "User comments on commit", :js do
expect(page).not_to have_css(".note")
end
end
+
+ context 'when checking task lists' do
+ let(:note_with_task) do
+ <<-EOT.strip_heredoc
+
+ - [ ] Task 1
+ EOT
+ end
+
+ before do
+ create(:note_on_commit, project: project, commit_id: sample_commit.id, note: note_with_task, author: user)
+ create(:note_on_commit, project: project, commit_id: sample_commit.id, note: note_with_task, author: user)
+
+ visit(project_commit_path(project, sample_commit.id))
+ end
+
+ it 'allows the tasks to be checked' do
+ expect(page).to have_selector('li.task-list-item', count: 2)
+ expect(page).to have_selector('li.task-list-item input[checked]', count: 0)
+
+ all('.task-list-item-checkbox').each do |checkbox|
+ checkbox.click
+ end
+ wait_for_requests
+
+ visit(project_commit_path(project, sample_commit.id))
+
+ expect(page).to have_selector('li.task-list-item', count: 2)
+ expect(page).to have_selector('li.task-list-item input[checked]', count: 2)
+ end
+ end
end
diff --git a/spec/features/projects/environments/environments_spec.rb b/spec/features/projects/environments/environments_spec.rb
index 7f2ef61bcbe..8c032660726 100644
--- a/spec/features/projects/environments/environments_spec.rb
+++ b/spec/features/projects/environments/environments_spec.rb
@@ -372,7 +372,7 @@ RSpec.describe 'Environments page', :js do
let(:role) { :developer }
it 'developer creates a new environment with a valid name' do
- within(".top-area") { click_link 'New environment' }
+ within(".environments-section") { click_link 'New environment' }
fill_in('Name', with: 'production')
click_on 'Save'
@@ -380,7 +380,7 @@ RSpec.describe 'Environments page', :js do
end
it 'developer creates a new environmetn with invalid name' do
- within(".top-area") { click_link 'New environment' }
+ within(".environments-section") { click_link 'New environment' }
fill_in('Name', with: 'name,with,commas')
click_on 'Save'
diff --git a/spec/features/projects/issues/design_management/user_links_to_designs_in_issue_spec.rb b/spec/features/projects/issues/design_management/user_links_to_designs_in_issue_spec.rb
index 8d5e99d7e2b..78fb470d4ea 100644
--- a/spec/features/projects/issues/design_management/user_links_to_designs_in_issue_spec.rb
+++ b/spec/features/projects/issues/design_management/user_links_to_designs_in_issue_spec.rb
@@ -90,34 +90,5 @@ RSpec.describe 'viewing issues with design references' do
expect(page).not_to have_link(design_ref_b)
end
end
-
- context 'design management is enabled, but the filter is disabled globally' do
- before do
- enable_design_management
- stub_feature_flags(
- Banzai::Filter::DesignReferenceFilter::FEATURE_FLAG => false
- )
- end
-
- it 'processes design tab links successfully, and design references as issue references', :aggregate_failures do
- visit_page_with_design_references
-
- expect(page).to have_text('The designs I mentioned')
- expect(page).to have_link(design_tab_ref)
- expect(page).to have_link(issue_ref)
- expect(page).not_to have_link(design_ref_a)
- expect(page).not_to have_link(design_ref_b)
- end
- end
-
- context 'design management is enabled, and the filter is enabled for the current project' do
- before do
- stub_feature_flags(
- Banzai::Filter::DesignReferenceFilter::FEATURE_FLAG => public_project
- )
- end
-
- it_behaves_like 'successful use of design link references'
- end
end
end
diff --git a/spec/features/projects/issues/viewing_relocated_issues_spec.rb b/spec/features/projects/issues/viewing_relocated_issues_spec.rb
new file mode 100644
index 00000000000..10d5ad1747c
--- /dev/null
+++ b/spec/features/projects/issues/viewing_relocated_issues_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'issues canonical link' do
+ include Spec::Support::Helpers::Features::CanonicalLinkHelpers
+
+ let_it_be(:original_project) { create(:project, :public) }
+ let_it_be(:original_issue) { create(:issue, project: original_project) }
+ let_it_be(:canonical_issue) { create(:issue) }
+ let_it_be(:canonical_url) { issue_url(canonical_issue, Gitlab::Application.routes.default_url_options) }
+
+ it "doesn't show the canonical URL" do
+ visit(issue_path(original_issue))
+
+ expect(page).not_to have_any_canonical_links
+ end
+
+ context 'when the issue was moved' do
+ it 'shows the canonical URL' do
+ original_issue.moved_to = canonical_issue
+ original_issue.save!
+
+ visit(issue_path(original_issue))
+
+ expect(page).to have_canonical_link(canonical_url)
+ end
+ end
+
+ context 'when the issue was duplicated' do
+ it 'shows the canonical URL' do
+ original_issue.duplicated_to = canonical_issue
+ original_issue.save!
+
+ visit(issue_path(original_issue))
+
+ expect(page).to have_canonical_link(canonical_url)
+ end
+ end
+end
diff --git a/spec/features/projects/members/groups_with_access_list_spec.rb b/spec/features/projects/members/groups_with_access_list_spec.rb
index 2ee6bc103e9..d652f6715db 100644
--- a/spec/features/projects/members/groups_with_access_list_spec.rb
+++ b/spec/features/projects/members/groups_with_access_list_spec.rb
@@ -3,20 +3,23 @@
require 'spec_helper'
RSpec.describe 'Projects > Members > Groups with access list', :js do
- let(:user) { create(:user) }
- let(:group) { create(:group, :public) }
- let(:project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:project) { create(:project, :public) }
+
+ let(:additional_link_attrs) { {} }
+ let!(:group_link) { create(:project_group_link, project: project, group: group, **additional_link_attrs) }
before do
- project.add_maintainer(user)
- @group_link = create(:project_group_link, project: project, group: group)
+ travel_to Time.now.utc.beginning_of_day
+ project.add_maintainer(user)
sign_in(user)
visit project_project_members_path(project)
end
it 'updates group access level' do
- click_button @group_link.human_access
+ click_button group_link.human_access
page.within '.dropdown-menu' do
click_link 'Guest'
@@ -30,14 +33,32 @@ RSpec.describe 'Projects > Members > Groups with access list', :js do
end
it 'updates expiry date' do
- tomorrow = Date.today + 3
+ expires_at_field = "member_expires_at_#{group.id}"
+ fill_in expires_at_field, with: 3.days.from_now.to_date
- fill_in "member_expires_at_#{group.id}", with: tomorrow.strftime("%F")
- find('body').click
+ find_field(expires_at_field).native.send_keys :enter
wait_for_requests
page.within(find('li.group_member')) do
- expect(page).to have_content('Expires in')
+ expect(page).to have_content('Expires in 3 days')
+ end
+ end
+
+ context 'when link has expiry date set' do
+ let(:additional_link_attrs) { { expires_at: 3.days.from_now.to_date } }
+
+ it 'clears expiry date' do
+ page.within(find('li.group_member')) do
+ expect(page).to have_content('Expires in 3 days')
+
+ page.within(find('.js-edit-member-form')) do
+ find('.js-clear-input').click
+ end
+
+ wait_for_requests
+
+ expect(page).not_to have_content('Expires in')
+ end
end
end
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index f59dc5dd074..c8c3a52d427 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -172,10 +172,17 @@ RSpec.describe 'Pipeline', :js do
end
end
- it_behaves_like 'showing user status' do
- let(:user_with_status) { pipeline.user }
+ describe 'pipelines details view' do
+ let!(:status) { create(:user_status, user: pipeline.user, emoji: 'smirk', message: 'Authoring this object') }
- subject { visit project_pipeline_path(project, pipeline) }
+ it 'pipeline header shows the user status and emoji' do
+ visit project_pipeline_path(project, pipeline)
+
+ within '[data-testid="pipeline-header-content"]' do
+ expect(page).to have_selector("[data-testid='#{status.message}']")
+ expect(page).to have_selector("[data-name='#{status.emoji}']")
+ end
+ end
end
describe 'pipeline graph' do
@@ -400,7 +407,7 @@ RSpec.describe 'Pipeline', :js do
context 'when retrying' do
before do
- find('[data-testid="retryButton"]').click
+ find('[data-testid="retryPipeline"]').click
end
it 'does not show a "Retry" button', :sidekiq_might_not_need_inline do
@@ -902,7 +909,7 @@ RSpec.describe 'Pipeline', :js do
context 'when retrying' do
before do
- find('[data-testid="retryButton"]').click
+ find('[data-testid="retryPipeline"]').click
end
it 'does not show a "Retry" button', :sidekiq_might_not_need_inline do
diff --git a/spec/features/projects/releases/user_creates_release_spec.rb b/spec/features/projects/releases/user_creates_release_spec.rb
index 5d05a7e4c91..a440a4211e3 100644
--- a/spec/features/projects/releases/user_creates_release_spec.rb
+++ b/spec/features/projects/releases/user_creates_release_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe 'User creates release', :js do
project.add_developer(user)
- gitlab_sign_in(user)
+ sign_in(user)
visit new_page_url
@@ -108,6 +108,24 @@ RSpec.describe 'User creates release', :js do
end
end
+ context 'when the release notes "Preview" tab is clicked' do
+ before do
+ find_field('Release notes').click
+
+ fill_release_notes('**some** _markdown_ [content](https://example.com)')
+
+ click_on 'Preview'
+
+ wait_for_all_requests
+ end
+
+ it 'renders a preview of the release notes markdown' do
+ within('[data-testid="release-notes"]') do
+ expect(page).to have_text('some markdown content')
+ end
+ end
+ end
+
def fill_out_form_and_submit
fill_tag_name(tag_name)
diff --git a/spec/features/projects/releases/user_views_edit_release_spec.rb b/spec/features/projects/releases/user_views_edit_release_spec.rb
index 4ed1be6db6b..976840ee4e7 100644
--- a/spec/features/projects/releases/user_views_edit_release_spec.rb
+++ b/spec/features/projects/releases/user_views_edit_release_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe 'User edits Release', :js do
project.add_developer(user)
- gitlab_sign_in(user)
+ sign_in(user)
visit edit_project_release_path(project, release)
diff --git a/spec/features/projects/releases/user_views_release_spec.rb b/spec/features/projects/releases/user_views_release_spec.rb
index c82588746a8..4410f345e56 100644
--- a/spec/features/projects/releases/user_views_release_spec.rb
+++ b/spec/features/projects/releases/user_views_release_spec.rb
@@ -4,17 +4,25 @@ require 'spec_helper'
RSpec.describe 'User views Release', :js do
let(:project) { create(:project, :repository) }
- let(:release) { create(:release, project: project, name: 'The first release' ) }
let(:user) { create(:user) }
+ let(:release) do
+ create(:release,
+ project: project,
+ name: 'The first release',
+ description: '**Lorem** _ipsum_ dolor sit [amet](https://example.com)')
+ end
+
before do
project.add_developer(user)
- gitlab_sign_in(user)
+ sign_in(user)
visit project_release_path(project, release)
end
+ it_behaves_like 'page meta description', 'Lorem ipsum dolor sit amet'
+
it 'renders the breadcrumbs' do
within('.breadcrumbs') do
expect(page).to have_content("#{project.creator.name} #{project.name} Releases #{release.name}")
@@ -31,7 +39,7 @@ RSpec.describe 'User views Release', :js do
expect(page).to have_content(release.name)
expect(page).to have_content(release.tag)
expect(page).to have_content(release.commit.short_id)
- expect(page).to have_content(release.description)
+ expect(page).to have_content('Lorem ipsum dolor sit amet')
end
end
end
diff --git a/spec/features/projects/releases/user_views_releases_spec.rb b/spec/features/projects/releases/user_views_releases_spec.rb
index 993d3371904..3bf472e82ec 100644
--- a/spec/features/projects/releases/user_views_releases_spec.rb
+++ b/spec/features/projects/releases/user_views_releases_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe 'User views releases', :js do
shared_examples 'releases page' do
context('when the user is a maintainer') do
before do
- gitlab_sign_in(maintainer)
+ sign_in(maintainer)
end
it 'sees the release' do
@@ -110,7 +110,7 @@ RSpec.describe 'User views releases', :js do
context('when the user is a guest') do
before do
- gitlab_sign_in(guest)
+ sign_in(guest)
end
it 'renders release info except for Git-related data' do
diff --git a/spec/features/projects/show/user_manages_notifications_spec.rb b/spec/features/projects/show/user_manages_notifications_spec.rb
index 9d9a75c22be..d444ea27d35 100644
--- a/spec/features/projects/show/user_manages_notifications_spec.rb
+++ b/spec/features/projects/show/user_manages_notifications_spec.rb
@@ -18,7 +18,9 @@ RSpec.describe 'Projects > Show > User manages notifications', :js do
click_notifications_button
click_link 'On mention'
- wait_for_requests
+ page.within('.notification-dropdown') do
+ expect(page).not_to have_css('.gl-spinner')
+ end
click_notifications_button
expect(find('.update-notification.is-active')).to have_content('On mention')
@@ -30,7 +32,9 @@ RSpec.describe 'Projects > Show > User manages notifications', :js do
click_notifications_button
click_link 'Disabled'
- wait_for_requests
+ page.within('.notification-dropdown') do
+ expect(page).not_to have_css('.gl-spinner')
+ end
expect(page).to have_css('.notifications-icon[data-testid="notifications-off-icon"]')
end
diff --git a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
index 81736fefae9..afa9de5ce86 100644
--- a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
+++ b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
@@ -226,7 +226,7 @@ RSpec.describe 'Projects > Show > User sees setup shortcut buttons' do
expect(project.repository.gitlab_ci_yml).to be_nil
page.within('.project-buttons') do
- expect(page).to have_link('Set up CI/CD', href: presenter.add_ci_yml_ide_path)
+ expect(page).to have_link('Set up CI/CD', href: presenter.add_ci_yml_path)
end
end
diff --git a/spec/features/projects/tree/tree_show_spec.rb b/spec/features/projects/tree/tree_show_spec.rb
index bd2af66710a..ca9e0a23888 100644
--- a/spec/features/projects/tree/tree_show_spec.rb
+++ b/spec/features/projects/tree/tree_show_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe 'Projects tree', :js do
# Check last commit
expect(find('.commit-content').text).to include(message)
- expect(find('.commit-sha-group').text).to eq(short_newrev)
+ expect(find('.js-commit-sha-group').text).to eq(short_newrev)
end
end
diff --git a/spec/features/projects/user_sees_sidebar_spec.rb b/spec/features/projects/user_sees_sidebar_spec.rb
index 50d7b353c46..040e8741b6e 100644
--- a/spec/features/projects/user_sees_sidebar_spec.rb
+++ b/spec/features/projects/user_sees_sidebar_spec.rb
@@ -145,11 +145,11 @@ RSpec.describe 'Projects > User sees sidebar' do
expect(page).to have_content 'Project'
expect(page).to have_content 'Issues'
expect(page).to have_content 'Wiki'
+ expect(page).to have_content 'Operations'
expect(page).not_to have_content 'Repository'
expect(page).not_to have_content 'CI / CD'
expect(page).not_to have_content 'Merge Requests'
- expect(page).not_to have_content 'Operations'
end
end
diff --git a/spec/features/projects/wiki/markdown_preview_spec.rb b/spec/features/projects/wiki/markdown_preview_spec.rb
deleted file mode 100644
index 8f2fb9e827c..00000000000
--- a/spec/features/projects/wiki/markdown_preview_spec.rb
+++ /dev/null
@@ -1,168 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Projects > Wiki > User previews markdown changes', :js do
- let_it_be(:user) { create(:user) }
- let(:project) { create(:project, :wiki_repo, namespace: user.namespace) }
- let(:wiki_page) { create(:wiki_page, wiki: project.wiki, title: 'home', content: '[some link](other-page)') }
- let(:wiki_content) do
- <<-HEREDOC
-Some text so key event for [ does not trigger an incorrect replacement.
-[regular link](regular)
-[relative link 1](../relative)
-[relative link 2](./relative)
-[relative link 3](./e/f/relative)
-[spaced link](title with spaces)
- HEREDOC
- end
-
- before do
- project.add_maintainer(user)
-
- sign_in(user)
- end
-
- context "while creating a new wiki page" do
- context "when there are no spaces or hyphens in the page name" do
- it "rewrites relative links as expected" do
- create_wiki_page('a/b/c/d', content: wiki_content)
-
- expect(page).to have_content("regular link")
-
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/regular\">regular link</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a/b/relative\">relative link 1</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a/b/c/relative\">relative link 2</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a/b/c/e/f/relative\">relative link 3</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/title%20with%20spaces\">spaced link</a>")
- end
- end
-
- context "when there are spaces in the page name" do
- it "rewrites relative links as expected" do
- create_wiki_page('a page/b page/c page/d page', content: wiki_content)
-
- expect(page).to have_content("regular link")
-
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/regular\">regular link</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/relative\">relative link 1</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/relative\">relative link 2</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/e/f/relative\">relative link 3</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/title%20with%20spaces\">spaced link</a>")
- end
- end
-
- context "when there are hyphens in the page name" do
- it "rewrites relative links as expected" do
- create_wiki_page('a-page/b-page/c-page/d-page', content: wiki_content)
-
- expect(page).to have_content("regular link")
-
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/regular\">regular link</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/relative\">relative link 1</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/relative\">relative link 2</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/e/f/relative\">relative link 3</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/title%20with%20spaces\">spaced link</a>")
- end
- end
- end
-
- context "while editing a wiki page" do
- context "when there are no spaces or hyphens in the page name" do
- it "rewrites relative links as expected" do
- create_wiki_page('a/b/c/d')
- click_link 'Edit'
-
- fill_in :wiki_content, with: wiki_content
- click_on "Preview"
-
- expect(page).to have_content("regular link")
-
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/regular\">regular link</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a/b/relative\">relative link 1</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a/b/c/relative\">relative link 2</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a/b/c/e/f/relative\">relative link 3</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/title%20with%20spaces\">spaced link</a>")
- end
- end
-
- context "when there are spaces in the page name" do
- it "rewrites relative links as expected" do
- create_wiki_page('a page/b page/c page/d page')
- click_link 'Edit'
-
- fill_in :wiki_content, with: wiki_content
- click_on "Preview"
-
- expect(page).to have_content("regular link")
-
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/regular\">regular link</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/relative\">relative link 1</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/relative\">relative link 2</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/e/f/relative\">relative link 3</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/title%20with%20spaces\">spaced link</a>")
- end
- end
-
- context "when there are hyphens in the page name" do
- it "rewrites relative links as expected" do
- create_wiki_page('a-page/b-page/c-page/d-page')
- click_link 'Edit'
-
- fill_in :wiki_content, with: wiki_content
- click_on "Preview"
-
- expect(page).to have_content("regular link")
-
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/regular\">regular link</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/relative\">relative link 1</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/relative\">relative link 2</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/a-page/b-page/c-page/e/f/relative\">relative link 3</a>")
- expect(page.html).to include("<a href=\"/#{project.full_path}/-/wikis/title%20with%20spaces\">spaced link</a>")
- end
- end
-
- context 'when rendering the preview' do
- it 'renders content with CommonMark' do
- create_wiki_page('a-page/b-page/c-page/common-mark')
- click_link 'Edit'
-
- fill_in :wiki_content, with: "1. one\n - sublist\n"
- click_on "Preview"
-
- # the above generates two separate lists (not embedded) in CommonMark
- expect(page).to have_content("sublist")
- expect(page).not_to have_xpath("//ol//li//ul")
- end
- end
- end
-
- it "does not linkify double brackets inside code blocks as expected" do
- wiki_content = <<-HEREDOC
- `[[do_not_linkify]]`
- ```
- [[also_do_not_linkify]]
- ```
- HEREDOC
-
- create_wiki_page('linkify_test', wiki_content)
-
- expect(page).to have_content("do_not_linkify")
-
- expect(page.html).to include('[[do_not_linkify]]')
- expect(page.html).to include('[[also_do_not_linkify]]')
- end
-
- private
-
- def create_wiki_page(path, content = 'content')
- visit project_wiki_path(project, wiki_page)
-
- click_link 'New page'
-
- fill_in :wiki_title, with: path
- fill_in :wiki_content, with: content
-
- click_button 'Create page'
- end
-end
diff --git a/spec/features/projects/wiki/shortcuts_spec.rb b/spec/features/projects/wiki/shortcuts_spec.rb
deleted file mode 100644
index 170e7afb51f..00000000000
--- a/spec/features/projects/wiki/shortcuts_spec.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Wiki shortcuts', :js do
- let(:user) { create(:user) }
- let(:project) { create(:project, :wiki_repo, namespace: user.namespace) }
- let(:wiki_page) { create(:wiki_page, wiki: project.wiki, title: 'home', content: 'Home page') }
-
- before do
- sign_in(user)
- visit project_wiki_path(project, wiki_page)
- end
-
- it 'Visit edit wiki page using "e" keyboard shortcut' do
- find('body').native.send_key('e')
-
- expect(find('.wiki-page-title')).to have_content('Edit Page')
- end
-end
diff --git a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb b/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
deleted file mode 100644
index eba1b63765a..00000000000
--- a/spec/features/projects/wiki/user_creates_wiki_page_spec.rb
+++ /dev/null
@@ -1,360 +0,0 @@
-# frozen_string_literal: true
-
-require "spec_helper"
-
-RSpec.describe "User creates wiki page" do
- include WikiHelpers
-
- let(:user) { create(:user) }
- let(:wiki) { ProjectWiki.new(project, user) }
- let(:project) { create(:project) }
-
- before do
- project.add_maintainer(user)
-
- sign_in(user)
- end
-
- context "when wiki is empty" do
- before do |example|
- visit(project_wikis_path(project))
-
- wait_for_svg_to_be_loaded(example)
-
- click_link "Create your first page"
- end
-
- context "in a user namespace" do
- let(:project) { create(:project, :wiki_repo, namespace: user.namespace) }
-
- it "shows validation error message" do
- page.within(".wiki-form") do
- fill_in(:wiki_content, with: "")
-
- click_on("Create page")
- end
-
- expect(page).to have_content("The form contains the following error:").and have_content("Content can't be blank")
-
- page.within(".wiki-form") do
- fill_in(:wiki_content, with: "[link test](test)")
-
- click_on("Create page")
- end
-
- expect(page).to have_content("Home").and have_content("link test")
-
- click_link("link test")
-
- expect(page).to have_content("Create New Page")
- end
-
- it "shows non-escaped link in the pages list" do
- fill_in(:wiki_title, with: "one/two/three-test")
-
- page.within(".wiki-form") do
- fill_in(:wiki_content, with: "wiki content")
-
- click_on("Create page")
- end
-
- expect(current_path).to include("one/two/three-test")
- expect(page).to have_xpath("//a[@href='/#{project.full_path}/-/wikis/one/two/three-test']")
- end
-
- it "has `Create home` as a commit message", :js do
- wait_for_requests
-
- expect(page).to have_field("wiki[message]", with: "Create home")
- end
-
- it "creates a page from the home page" do
- fill_in(:wiki_content, with: "[test](test)\n[GitLab API doc](api)\n[Rake tasks](raketasks)\n# Wiki header\n")
- fill_in(:wiki_message, with: "Adding links to wiki")
-
- page.within(".wiki-form") do
- click_button("Create page")
- end
-
- expect(current_path).to eq(project_wiki_path(project, "home"))
- expect(page).to have_content("test GitLab API doc Rake tasks Wiki header")
- .and have_content("Home")
- .and have_content("Last edited by #{user.name}")
- .and have_header_with_correct_id_and_link(1, "Wiki header", "wiki-header")
-
- click_link("test")
-
- expect(current_path).to eq(project_wiki_path(project, "test"))
-
- page.within(:css, ".nav-text") do
- expect(page).to have_content("Create New Page")
- end
-
- click_link("Home")
-
- expect(current_path).to eq(project_wiki_path(project, "home"))
-
- click_link("GitLab API")
-
- expect(current_path).to eq(project_wiki_path(project, "api"))
-
- page.within(:css, ".nav-text") do
- expect(page).to have_content("Create")
- end
-
- click_link("Home")
-
- expect(current_path).to eq(project_wiki_path(project, "home"))
-
- click_link("Rake tasks")
-
- expect(current_path).to eq(project_wiki_path(project, "raketasks"))
-
- page.within(:css, ".nav-text") do
- expect(page).to have_content("Create")
- end
- end
-
- it "creates ASCII wiki with LaTeX blocks", :js do
- stub_application_setting(plantuml_url: "http://localhost", plantuml_enabled: true)
-
- ascii_content = <<~MD
- :stem: latexmath
-
- [stem]
- ++++
- \\sqrt{4} = 2
- ++++
-
- another part
-
- [latexmath]
- ++++
- \\beta_x \\gamma
- ++++
-
- stem:[2+2] is 4
- MD
-
- find("#wiki_format option[value=asciidoc]").select_option
-
- fill_in(:wiki_content, with: ascii_content)
-
- page.within(".wiki-form") do
- click_button("Create page")
- end
-
- page.within ".md" do
- expect(page).to have_selector(".katex", count: 3).and have_content("2+2 is 4")
- end
- end
-
- it 'creates a wiki page with Org markup', :aggregate_failures do
- org_content = <<~ORG
- * Heading
- ** Subheading
- [[home][Link to Home]]
- ORG
-
- page.within('.wiki-form') do
- find('#wiki_format option[value=org]').select_option
- fill_in(:wiki_content, with: org_content)
- click_button('Create page')
- end
-
- expect(page).to have_selector('h1', text: 'Heading')
- expect(page).to have_selector('h2', text: 'Subheading')
- expect(page).to have_link('Link to Home', href: "/#{project.full_path}/-/wikis/home")
- end
-
- it_behaves_like 'wiki file attachments'
- end
-
- context "in a group namespace", :js do
- let(:project) { create(:project, :wiki_repo, namespace: create(:group, :public)) }
-
- it "has `Create home` as a commit message" do
- wait_for_requests
-
- expect(page).to have_field("wiki[message]", with: "Create home")
- end
-
- it "creates a page from the home page" do
- page.within(".wiki-form") do
- fill_in(:wiki_content, with: "My awesome wiki!")
-
- click_button("Create page")
- end
-
- expect(page).to have_content("Home")
- .and have_content("Last edited by #{user.name}")
- .and have_content("My awesome wiki!")
- end
- end
- end
-
- context "when wiki is not empty", :js do
- before do
- create(:wiki_page, wiki: wiki, title: 'home', content: 'Home page')
-
- visit(project_wikis_path(project))
- end
-
- context "in a user namespace" do
- let(:project) { create(:project, :wiki_repo, namespace: user.namespace) }
-
- context "via the `new wiki page` page" do
- it "creates a page with a single word" do
- click_link("New page")
-
- page.within(".wiki-form") do
- fill_in(:wiki_title, with: "foo")
- fill_in(:wiki_content, with: "My awesome wiki!")
- end
-
- # Commit message field should have correct value.
- expect(page).to have_field("wiki[message]", with: "Create foo")
-
- click_button("Create page")
-
- expect(page).to have_content("foo")
- .and have_content("Last edited by #{user.name}")
- .and have_content("My awesome wiki!")
- end
-
- it "creates a page with spaces in the name" do
- click_link("New page")
-
- page.within(".wiki-form") do
- fill_in(:wiki_title, with: "Spaces in the name")
- fill_in(:wiki_content, with: "My awesome wiki!")
- end
-
- # Commit message field should have correct value.
- expect(page).to have_field("wiki[message]", with: "Create Spaces in the name")
-
- click_button("Create page")
-
- expect(page).to have_content("Spaces in the name")
- .and have_content("Last edited by #{user.name}")
- .and have_content("My awesome wiki!")
- end
-
- it "creates a page with hyphens in the name" do
- click_link("New page")
-
- page.within(".wiki-form") do
- fill_in(:wiki_title, with: "hyphens-in-the-name")
- fill_in(:wiki_content, with: "My awesome wiki!")
- end
-
- # Commit message field should have correct value.
- expect(page).to have_field("wiki[message]", with: "Create hyphens in the name")
-
- page.within(".wiki-form") do
- fill_in(:wiki_content, with: "My awesome wiki!")
-
- click_button("Create page")
- end
-
- expect(page).to have_content("hyphens in the name")
- .and have_content("Last edited by #{user.name}")
- .and have_content("My awesome wiki!")
- end
- end
-
- it "shows the emoji autocompletion dropdown" do
- click_link("New page")
-
- page.within(".wiki-form") do
- find("#wiki_content").native.send_keys("")
-
- fill_in(:wiki_content, with: ":")
- end
-
- expect(page).to have_selector(".atwho-view")
- end
- end
-
- context "in a group namespace" do
- let(:project) { create(:project, :wiki_repo, namespace: create(:group, :public)) }
-
- context "via the `new wiki page` page" do
- it "creates a page" do
- click_link("New page")
-
- page.within(".wiki-form") do
- fill_in(:wiki_title, with: "foo")
- fill_in(:wiki_content, with: "My awesome wiki!")
- end
-
- # Commit message field should have correct value.
- expect(page).to have_field("wiki[message]", with: "Create foo")
-
- click_button("Create page")
-
- expect(page).to have_content("foo")
- .and have_content("Last edited by #{user.name}")
- .and have_content("My awesome wiki!")
- end
- end
- end
- end
-
- describe 'sidebar feature' do
- context 'when there are some existing pages' do
- before do
- create(:wiki_page, wiki: wiki, title: 'home', content: 'home')
- create(:wiki_page, wiki: wiki, title: 'another', content: 'another')
- end
-
- it 'renders a default sidebar when there is no customized sidebar' do
- visit(project_wikis_path(project))
-
- expect(page).to have_content('another')
- expect(page).not_to have_link('View All Pages')
- end
-
- context 'when there is a customized sidebar' do
- before do
- create(:wiki_page, wiki: wiki, title: '_sidebar', content: 'My customized sidebar')
- end
-
- it 'renders my customized sidebar instead of the default one' do
- visit(project_wikis_path(project))
-
- expect(page).to have_content('My customized sidebar')
- expect(page).not_to have_content('Another')
- end
- end
- end
-
- context 'when there are 15 existing pages' do
- before do
- (1..5).each { |i| create(:wiki_page, wiki: wiki, title: "my page #{i}") }
- (6..10).each { |i| create(:wiki_page, wiki: wiki, title: "parent/my page #{i}") }
- (11..15).each { |i| create(:wiki_page, wiki: wiki, title: "grandparent/parent/my page #{i}") }
- end
-
- it 'shows all pages in the sidebar' do
- visit(project_wikis_path(project))
-
- (1..15).each { |i| expect(page).to have_content("my page #{i}") }
- expect(page).not_to have_link('View All Pages')
- end
-
- context 'when there are more than 15 existing pages' do
- before do
- create(:wiki_page, wiki: wiki, title: 'my page 16')
- end
-
- it 'shows the first 15 pages in the sidebar' do
- visit(project_wikis_path(project))
-
- expect(page).to have_text('my page', count: 15)
- expect(page).to have_link('View All Pages')
- end
- end
- end
- end
-end
diff --git a/spec/features/projects/wiki/user_deletes_wiki_page_spec.rb b/spec/features/projects/wiki/user_deletes_wiki_page_spec.rb
deleted file mode 100644
index a5d865d581b..00000000000
--- a/spec/features/projects/wiki/user_deletes_wiki_page_spec.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'User deletes wiki page', :js do
- let(:user) { create(:user) }
- let(:project) { create(:project, :wiki_repo, namespace: user.namespace) }
- let(:wiki_page) { create(:wiki_page, wiki: project.wiki) }
-
- before do
- sign_in(user)
- visit(project_wiki_path(project, wiki_page))
- end
-
- it 'deletes a page' do
- click_on('Edit')
- click_on('Delete')
- find('.modal-footer .btn-danger').click
-
- expect(page).to have_content('Page was successfully deleted')
- end
-end
diff --git a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
deleted file mode 100644
index fdab63a56b8..00000000000
--- a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
+++ /dev/null
@@ -1,263 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'User updates wiki page' do
- include WikiHelpers
-
- let(:user) { create(:user) }
-
- before do
- project.add_maintainer(user)
- sign_in(user)
- end
-
- context 'when wiki is empty' do
- before do |example|
- visit(project_wikis_path(project))
-
- wait_for_svg_to_be_loaded(example)
-
- click_link "Create your first page"
- end
-
- context 'in a user namespace' do
- let(:project) { create(:project, :wiki_repo) }
-
- it 'redirects back to the home edit page' do
- page.within(:css, '.wiki-form .form-actions') do
- click_on('Cancel')
- end
-
- expect(current_path).to eq wiki_path(project.wiki)
- end
-
- it 'updates a page that has a path', :js do
- fill_in(:wiki_title, with: 'one/two/three-test')
-
- page.within '.wiki-form' do
- fill_in(:wiki_content, with: 'wiki content')
- click_on('Create page')
- end
-
- expect(current_path).to include('one/two/three-test')
- expect(find('.wiki-pages')).to have_content('three')
-
- first(:link, text: 'three').click
-
- expect(find('.nav-text')).to have_content('three')
-
- click_on('Edit')
-
- expect(current_path).to include('one/two/three-test')
- expect(page).to have_content('Edit Page')
-
- fill_in('Content', with: 'Updated Wiki Content')
- click_on('Save changes')
-
- expect(page).to have_content('Updated Wiki Content')
- end
-
- it_behaves_like 'wiki file attachments'
- end
- end
-
- context 'when wiki is not empty' do
- let(:project_wiki) { create(:project_wiki, project: project, user: project.creator) }
- let!(:wiki_page) { create(:wiki_page, wiki: project_wiki, title: 'home', content: 'Home page') }
-
- before do
- visit(project_wikis_path(project))
-
- click_link('Edit')
- end
-
- context 'in a user namespace' do
- let(:project) { create(:project, :wiki_repo) }
-
- it 'updates a page', :js do
- # Commit message field should have correct value.
- expect(page).to have_field('wiki[message]', with: 'Update home')
-
- fill_in(:wiki_content, with: 'My awesome wiki!')
- click_button('Save changes')
-
- expect(page).to have_content('Home')
- expect(page).to have_content("Last edited by #{user.name}")
- expect(page).to have_content('My awesome wiki!')
- end
-
- it 'updates the commit message as the title is changed', :js do
- fill_in(:wiki_title, with: '& < > \ \ { } &')
-
- expect(page).to have_field('wiki[message]', with: 'Update & < > \ \ { } &')
- end
-
- it 'correctly escapes the commit message entities', :js do
- fill_in(:wiki_title, with: 'Wiki title')
-
- expect(page).to have_field('wiki[message]', with: 'Update Wiki title')
- end
-
- it 'shows a validation error message' do
- fill_in(:wiki_content, with: '')
- click_button('Save changes')
-
- expect(page).to have_selector('.wiki-form')
- expect(page).to have_content('Edit Page')
- expect(page).to have_content('The form contains the following error:')
- expect(page).to have_content("Content can't be blank")
- expect(find('textarea#wiki_content').value).to eq('')
- end
-
- it 'shows the emoji autocompletion dropdown', :js do
- find('#wiki_content').native.send_keys('')
- fill_in(:wiki_content, with: ':')
-
- expect(page).to have_selector('.atwho-view')
- end
-
- it 'shows the error message' do
- wiki_page.update(content: 'Update')
-
- click_button('Save changes')
-
- expect(page).to have_content('Someone edited the page the same time you did.')
- end
-
- it 'updates a page' do
- fill_in('Content', with: 'Updated Wiki Content')
- click_on('Save changes')
-
- expect(page).to have_content('Updated Wiki Content')
- end
-
- it 'cancels editing of a page' do
- page.within(:css, '.wiki-form .form-actions') do
- click_on('Cancel')
- end
-
- expect(current_path).to eq(project_wiki_path(project, wiki_page))
- end
-
- it_behaves_like 'wiki file attachments'
- end
-
- context 'in a group namespace' do
- let(:project) { create(:project, :wiki_repo, namespace: create(:group, :public)) }
-
- it 'updates a page', :js do
- # Commit message field should have correct value.
- expect(page).to have_field('wiki[message]', with: 'Update home')
-
- fill_in(:wiki_content, with: 'My awesome wiki!')
-
- click_button('Save changes')
-
- expect(page).to have_content('Home')
- expect(page).to have_content("Last edited by #{user.name}")
- expect(page).to have_content('My awesome wiki!')
- end
-
- it_behaves_like 'wiki file attachments'
- end
- end
-
- context 'when the page is in a subdir' do
- let!(:project) { create(:project, :wiki_repo) }
- let(:project_wiki) { create(:project_wiki, project: project, user: project.creator) }
- let(:page_name) { 'page_name' }
- let(:page_dir) { "foo/bar/#{page_name}" }
- let!(:wiki_page) { create(:wiki_page, wiki: project_wiki, title: page_dir, content: 'Home page') }
-
- before do
- visit(project_wiki_edit_path(project, wiki_page))
- end
-
- it 'moves the page to the root folder' do
- fill_in(:wiki_title, with: "/#{page_name}")
-
- click_button('Save changes')
-
- expect(current_path).to eq(project_wiki_path(project, page_name))
- end
-
- it 'moves the page to other dir' do
- new_page_dir = "foo1/bar1/#{page_name}"
-
- fill_in(:wiki_title, with: new_page_dir)
-
- click_button('Save changes')
-
- expect(current_path).to eq(project_wiki_path(project, new_page_dir))
- end
-
- it 'remains in the same place if title has not changed' do
- original_path = project_wiki_path(project, wiki_page)
-
- fill_in(:wiki_title, with: page_name)
-
- click_button('Save changes')
-
- expect(current_path).to eq(original_path)
- end
-
- it 'can be moved to a different dir with a different name' do
- new_page_dir = "foo1/bar1/new_page_name"
-
- fill_in(:wiki_title, with: new_page_dir)
-
- click_button('Save changes')
-
- expect(current_path).to eq(project_wiki_path(project, new_page_dir))
- end
-
- it 'can be renamed and moved to the root folder' do
- new_name = 'new_page_name'
-
- fill_in(:wiki_title, with: "/#{new_name}")
-
- click_button('Save changes')
-
- expect(current_path).to eq(project_wiki_path(project, new_name))
- end
-
- it 'squishes the title before creating the page' do
- new_page_dir = " foo1 / bar1 / #{page_name} "
-
- fill_in(:wiki_title, with: new_page_dir)
-
- click_button('Save changes')
-
- expect(current_path).to eq(project_wiki_path(project, "foo1/bar1/#{page_name}"))
- end
-
- it_behaves_like 'wiki file attachments'
- end
-
- context 'when an existing page exceeds the content size limit' do
- let_it_be(:project) { create(:project, :wiki_repo) }
- let!(:wiki_page) { create(:wiki_page, wiki: project.wiki, content: "one\ntwo\nthree") }
-
- before do
- stub_application_setting(wiki_page_max_content_bytes: 10)
-
- visit wiki_page_path(wiki_page.wiki, wiki_page, action: :edit)
- end
-
- it 'allows changing the title if the content does not change' do
- fill_in 'Title', with: 'new title'
- click_on 'Save changes'
-
- expect(page).to have_content('Wiki was successfully updated.')
- end
-
- it 'shows a validation error when trying to change the content' do
- fill_in 'Content', with: 'new content'
- click_on 'Save changes'
-
- expect(page).to have_content('The form contains the following error:')
- expect(page).to have_content('Content is too long (11 Bytes). The maximum size is 10 Bytes.')
- end
- end
-end
diff --git a/spec/features/projects/wiki/user_views_wiki_empty_spec.rb b/spec/features/projects/wiki/user_views_wiki_empty_spec.rb
index 0af40a2d760..1f460f39267 100644
--- a/spec/features/projects/wiki/user_views_wiki_empty_spec.rb
+++ b/spec/features/projects/wiki/user_views_wiki_empty_spec.rb
@@ -2,108 +2,86 @@
require 'spec_helper'
-RSpec.describe 'User views empty wiki' do
- let(:user) { create(:user) }
- let(:confluence_link) { 'Enable the Confluence Wiki integration' }
- let(:element) { page.find('.row.empty-state') }
-
- shared_examples 'empty wiki and accessible issues' do
- it 'show "issue tracker" message' do
- visit(project_wikis_path(project))
-
- expect(element).to have_content('This project has no wiki pages')
- expect(element).to have_content('You must be a project member')
- expect(element).to have_content('improve the wiki for this project')
- expect(element).to have_link("issue tracker", href: project_issues_path(project))
- expect(element).to have_link("Suggest wiki improvement", href: new_project_issue_path(project))
- expect(element).to have_no_link(confluence_link)
- end
- end
-
- shared_examples 'empty wiki and non-accessible issues' do
- it 'does not show "issue tracker" message' do
- visit(project_wikis_path(project))
+RSpec.describe 'Project > User views empty wiki' do
+ let_it_be(:user) { create(:user) }
- expect(element).to have_content('This project has no wiki pages')
- expect(element).to have_content('You must be a project member')
- expect(element).to have_no_link('Suggest wiki improvement')
- expect(element).to have_no_link(confluence_link)
- end
- end
+ let(:wiki) { create(:project_wiki, project: project) }
- context 'when user is logged out and issue tracker is public' do
- let(:project) { create(:project, :public, :wiki_repo) }
+ it_behaves_like 'User views empty wiki' do
+ context 'when project is public' do
+ let(:project) { create(:project, :public) }
- it_behaves_like 'empty wiki and accessible issues'
- end
+ it_behaves_like 'empty wiki message', issuable: true
- context 'when user is logged in and not a member' do
- let(:project) { create(:project, :public, :wiki_repo) }
+ context 'when issue tracker is private' do
+ let(:project) { create(:project, :public, :issues_private) }
- before do
- sign_in(user)
- end
+ it_behaves_like 'empty wiki message', issuable: false
+ end
- it_behaves_like 'empty wiki and accessible issues'
- end
+ context 'when issue tracker is disabled' do
+ let(:project) { create(:project, :public, :issues_disabled) }
- context 'when issue tracker is private' do
- let(:project) { create(:project, :public, :wiki_repo, :issues_private) }
+ it_behaves_like 'empty wiki message', issuable: false
+ end
- it_behaves_like 'empty wiki and non-accessible issues'
- end
+ context 'and user is logged in' do
+ before do
+ sign_in(user)
+ end
- context 'when issue tracker is disabled' do
- let(:project) { create(:project, :public, :wiki_repo, :issues_disabled) }
+ context 'and user is not a member' do
+ it_behaves_like 'empty wiki message', issuable: true
+ end
- it_behaves_like 'empty wiki and non-accessible issues'
- end
+ context 'and user is a member' do
+ before do
+ project.add_developer(user)
+ end
- context 'when user is logged in and a member' do
- let(:project) { create(:project, :public) }
-
- before do
- sign_in(user)
- project.add_developer(user)
+ it_behaves_like 'empty wiki message', writable: true, issuable: true
+ end
+ end
end
- it 'shows "create first page" message' do
- visit(project_wikis_path(project))
-
- expect(element).to have_content('your project', count: 2)
+ context 'when project is private' do
+ let(:project) { create(:project, :private) }
- element.click_link 'Create your first page'
+ it_behaves_like 'wiki is not found'
- expect(page).to have_button('Create page')
- end
+ context 'and user is logged in' do
+ before do
+ sign_in(user)
+ end
- it 'does not show the "enable confluence" button' do
- visit(project_wikis_path(project))
+ context 'and user is not a member' do
+ it_behaves_like 'wiki is not found'
+ end
- expect(element).to have_no_link(confluence_link)
- end
- end
+ context 'and user is a member' do
+ before do
+ project.add_developer(user)
+ end
- context 'when user is logged in and an admin' do
- let(:project) { create(:project, :public, :wiki_repo) }
+ it_behaves_like 'empty wiki message', writable: true, issuable: true
+ end
- before do
- sign_in(user)
- project.add_maintainer(user)
- end
-
- it 'shows the "enable confluence" button' do
- visit(project_wikis_path(project))
-
- expect(element).to have_link(confluence_link)
- end
+ context 'and user is a maintainer' do
+ before do
+ project.add_maintainer(user)
+ end
- it 'does not show "enable confluence" button if confluence is already enabled' do
- create(:confluence_service, project: project)
+ it_behaves_like 'empty wiki message', writable: true, issuable: true, confluence: true
- visit(project_wikis_path(project))
+ context 'and Confluence is already enabled' do
+ before do
+ create(:confluence_service, project: project)
+ end
- expect(element).to have_no_link(confluence_link)
+ it_behaves_like 'empty wiki message', writable: true, issuable: true, confluence: false
+ end
+ end
+ end
end
end
end
diff --git a/spec/features/projects/wikis_spec.rb b/spec/features/projects/wikis_spec.rb
new file mode 100644
index 00000000000..1c66ad81145
--- /dev/null
+++ b/spec/features/projects/wikis_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe 'Project wikis' do
+ let_it_be(:user) { create(:user) }
+
+ let(:wiki) { create(:project_wiki, user: user, project: project) }
+ let(:project) { create(:project, namespace: user.namespace, creator: user) }
+
+ it_behaves_like 'User creates wiki page'
+ it_behaves_like 'User deletes wiki page'
+ it_behaves_like 'User previews wiki changes'
+ it_behaves_like 'User updates wiki page'
+ it_behaves_like 'User uses wiki shortcuts'
+ it_behaves_like 'User views AsciiDoc page with includes'
+ it_behaves_like 'User views a wiki page'
+ it_behaves_like 'User views wiki pages'
+ it_behaves_like 'User views wiki sidebar'
+end
diff --git a/spec/features/projects_spec.rb b/spec/features/projects_spec.rb
index 970500985ae..6c1e1eab968 100644
--- a/spec/features/projects_spec.rb
+++ b/spec/features/projects_spec.rb
@@ -99,6 +99,15 @@ RSpec.describe 'Project' do
expect(page).to have_css('.home-panel-description .is-expanded')
end
end
+
+ context 'page description' do
+ before do
+ project.update_attribute(:description, '**Lorem** _ipsum_ dolor sit [amet](https://example.com)')
+ visit path
+ end
+
+ it_behaves_like 'page meta description', 'Lorem ipsum dolor sit amet'
+ end
end
describe 'project topics' do
diff --git a/spec/features/protected_branches_spec.rb b/spec/features/protected_branches_spec.rb
index f0707610c3f..d8d9ccb5c6f 100644
--- a/spec/features/protected_branches_spec.rb
+++ b/spec/features/protected_branches_spec.rb
@@ -9,6 +9,10 @@ RSpec.describe 'Protected Branches', :js do
let(:admin) { create(:admin) }
let(:project) { create(:project, :repository) }
+ before do
+ stub_feature_flags(deploy_keys_on_protected_branches: false)
+ end
+
context 'logged in as developer' do
before do
project.add_developer(user)
@@ -163,4 +167,14 @@ RSpec.describe 'Protected Branches', :js do
include_examples "protected branches > access control > CE"
end
end
+
+ context 'when the users for protected branches feature is off' do
+ before do
+ stub_licensed_features(protected_refs_for_users: false)
+ end
+
+ include_examples 'when the deploy_keys_on_protected_branches FF is turned on' do
+ let(:all_dropdown_sections) { %w(Roles Deploy\ Keys) }
+ end
+ end
end
diff --git a/spec/features/search/user_searches_for_code_spec.rb b/spec/features/search/user_searches_for_code_spec.rb
index 227e75088d2..a88043c98ac 100644
--- a/spec/features/search/user_searches_for_code_spec.rb
+++ b/spec/features/search/user_searches_for_code_spec.rb
@@ -21,6 +21,7 @@ RSpec.describe 'User searches for code' do
expect(page).to have_selector('.results', text: 'application.js')
expect(page).to have_selector('.file-content .code')
expect(page).to have_selector("span.line[lang='javascript']")
+ expect(page).to have_link('application.js', href: /master\/files\/js\/application.js/)
end
context 'when on a project page', :js do
diff --git a/spec/features/search/user_uses_header_search_field_spec.rb b/spec/features/search/user_uses_header_search_field_spec.rb
index cfda25b9ab4..5cbfacf4e48 100644
--- a/spec/features/search/user_uses_header_search_field_spec.rb
+++ b/spec/features/search/user_uses_header_search_field_spec.rb
@@ -30,6 +30,8 @@ RSpec.describe 'User uses header search field', :js do
before do
find('#search')
find('body').native.send_keys('s')
+
+ wait_for_all_requests
end
it 'shows the category search dropdown' do
@@ -89,9 +91,7 @@ RSpec.describe 'User uses header search field', :js do
context 'when entering text into the search field' do
it 'does not display the category search dropdown' do
- page.within('.search-input-wrap') do
- fill_in('search', with: scope_name.first(4))
- end
+ fill_in_search(scope_name.first(4))
expect(page).not_to have_selector('.dropdown-header', text: /#{scope_name}/i)
end
@@ -105,9 +105,7 @@ RSpec.describe 'User uses header search field', :js do
end
it 'displays search options' do
- page.within('.search-input-wrap') do
- fill_in('search', with: 'test')
- end
+ fill_in_search('test')
expect(page).to have_selector(scoped_search_link('test'))
end
@@ -140,9 +138,7 @@ RSpec.describe 'User uses header search field', :js do
end
it 'displays search options' do
- page.within('.search-input-wrap') do
- fill_in('search', with: 'test')
- end
+ fill_in_search('test')
expect(page).to have_selector(scoped_search_link('test'))
expect(page).to have_selector(scoped_search_link('test', group_id: group.id))
@@ -157,9 +153,7 @@ RSpec.describe 'User uses header search field', :js do
end
it 'displays search options' do
- page.within('.search-input-wrap') do
- fill_in('search', with: 'test')
- end
+ fill_in_search('test')
expect(page).to have_selector(scoped_search_link('test'))
expect(page).not_to have_selector(scoped_search_link('test', group_id: project.namespace_id))
@@ -182,9 +176,7 @@ RSpec.describe 'User uses header search field', :js do
end
it 'displays search options' do
- page.within('.search-input-wrap') do
- fill_in('search', with: 'test')
- end
+ fill_in_search('test')
expect(page).to have_selector(scoped_search_link('test'))
expect(page).to have_selector(scoped_search_link('test', group_id: group.id))
@@ -208,9 +200,7 @@ RSpec.describe 'User uses header search field', :js do
end
it 'displays search options' do
- page.within('.search-input-wrap') do
- fill_in('search', with: 'test')
- end
+ fill_in_search('test')
expect(page).to have_selector(scoped_search_link('test'))
expect(page).to have_selector(scoped_search_link('test', group_id: subgroup.id))
diff --git a/spec/features/static_site_editor_spec.rb b/spec/features/static_site_editor_spec.rb
index b67e47b6ac4..03085917d67 100644
--- a/spec/features/static_site_editor_spec.rb
+++ b/spec/features/static_site_editor_spec.rb
@@ -6,18 +6,71 @@ RSpec.describe 'Static Site Editor' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public, :repository) }
+ let(:sse_path) { project_show_sse_path(project, 'master/README.md') }
+
+ before_all do
+ project.add_developer(user)
+ end
+
before do
- project.add_maintainer(user)
sign_in(user)
+ end
+
+ context "when no config file is present" do
+ before do
+ visit sse_path
+ end
- visit project_show_sse_path(project, 'master/README.md')
+ it 'renders SSE page with all generated config values and default config file values' do
+ node = page.find('#static-site-editor')
+
+ # assert generated config values are present
+ expect(node['data-base-url']).to eq("/#{project.full_path}/-/sse/master%2FREADME.md")
+ expect(node['data-branch']).to eq('master')
+ expect(node['data-commit-id']).to match(/\A[0-9a-f]{40}\z/)
+ expect(node['data-is-supported-content']).to eq('true')
+ expect(node['data-merge-requests-illustration-path'])
+ .to match(%r{/assets/illustrations/merge_requests-.*\.svg})
+ expect(node['data-namespace']).to eq(project.namespace.full_path)
+ expect(node['data-project']).to eq(project.path)
+ expect(node['data-project-id']).to eq(project.id.to_s)
+
+ # assert default config file values are present
+ expect(node['data-image-upload-path']).to eq('source/images')
+ expect(node['data-mounts']).to eq('[{"source":"source","target":""}]')
+ expect(node['data-static-site-generator']).to eq('middleman')
+ end
end
- it 'renders Static Site Editor page with generated and file attributes' do
- # assert generated config value is present
- expect(page).to have_css('#static-site-editor[data-branch="master"]')
+ context "when a config file is present" do
+ let(:config_file_yml) do
+ <<~YAML
+ image_upload_path: custom-image-upload-path
+ mounts:
+ - source: source1
+ target: ""
+ - source: source2
+ target: target2
+ static_site_generator: middleman
+ YAML
+ end
+
+ before do
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository).to receive(:blob_data_at).and_return(config_file_yml)
+ end
+
+ visit sse_path
+ end
+
+ it 'renders Static Site Editor page values read from config file' do
+ node = page.find('#static-site-editor')
- # assert file config value is present
- expect(page).to have_css('#static-site-editor[data-static-site-generator="middleman"]')
+ # assert user-specified config file values are present
+ expected_mounts = '[{"source":"source1","target":""},{"source":"source2","target":"target2"}]'
+ expect(node['data-image-upload-path']).to eq('custom-image-upload-path')
+ expect(node['data-mounts']).to eq(expected_mounts)
+ expect(node['data-static-site-generator']).to eq('middleman')
+ end
end
end
diff --git a/spec/features/tags/developer_deletes_tag_spec.rb b/spec/features/tags/developer_deletes_tag_spec.rb
index de9296bc08e..7c4c6f54685 100644
--- a/spec/features/tags/developer_deletes_tag_spec.rb
+++ b/spec/features/tags/developer_deletes_tag_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Developer deletes tag' do
+RSpec.describe 'Developer deletes tag', :js do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:project) { create(:project, :repository, namespace: group) }
@@ -13,11 +13,12 @@ RSpec.describe 'Developer deletes tag' do
visit project_tags_path(project)
end
- context 'from the tags list page', :js do
+ context 'from the tags list page' do
it 'deletes the tag' do
expect(page).to have_content 'v1.1.0'
- delete_tag 'v1.1.0'
+ container = page.find('.content .flex-row', text: 'v1.1.0')
+ delete_tag container
expect(page).not_to have_content 'v1.1.0'
end
@@ -29,15 +30,15 @@ RSpec.describe 'Developer deletes tag' do
expect(current_path).to eq(
project_tag_path(project, 'v1.0.0'))
- click_on 'Delete tag'
+ container = page.find('.nav-controls')
+ delete_tag container
- expect(current_path).to eq(
- project_tags_path(project))
+ expect(current_path).to eq("#{project_tags_path(project)}/")
expect(page).not_to have_content 'v1.0.0'
end
end
- context 'when pre-receive hook fails', :js do
+ context 'when pre-receive hook fails' do
before do
allow_next_instance_of(Gitlab::GitalyClient::OperationService) do |instance|
allow(instance).to receive(:rm_tag)
@@ -46,15 +47,17 @@ RSpec.describe 'Developer deletes tag' do
end
it 'shows the error message' do
- delete_tag 'v1.1.0'
+ container = page.find('.content .flex-row', text: 'v1.1.0')
+ delete_tag container
expect(page).to have_content('Do not delete tags')
end
end
- def delete_tag(tag)
- page.within('.content') do
- accept_confirm { find("li > .row-fixed-content.controls a.btn-remove[href='/#{project.full_path}/-/tags/#{tag}']").click }
- end
+ def delete_tag(container)
+ container.find('.js-remove-tag').click
+
+ page.within('.modal') { click_button('Delete tag') }
+ wait_for_requests
end
end
diff --git a/spec/features/task_lists_spec.rb b/spec/features/task_lists_spec.rb
index a9cfe794177..0f8daaf8e15 100644
--- a/spec/features/task_lists_spec.rb
+++ b/spec/features/task_lists_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Task Lists' do
+RSpec.describe 'Task Lists', :js do
include Warden::Test::Helpers
let_it_be(:project) { create(:project, :public, :repository) }
@@ -38,41 +38,7 @@ RSpec.describe 'Task Lists' do
MARKDOWN
end
- let(:nested_tasks_markdown) do
- <<-EOT.strip_heredoc
- - [ ] Task a
- - [x] Task a.1
- - [ ] Task a.2
- - [ ] Task b
-
- 1. [ ] Task 1
- 1. [ ] Task 1.1
- 1. [x] Task 1.2
- EOT
- end
-
- let(:commented_tasks_markdown) do
- <<-EOT.strip_heredoc
- <!--
- - [ ] a
- -->
-
- - [ ] b
- EOT
- end
-
- let(:summary_no_blank_line_markdown) do
- <<-EOT.strip_heredoc
- <details>
- <summary>No blank line after summary element breaks task list</summary>
- 1. [ ] People Ops: do such and such
- </details>
-
- * [ ] Task 1
- EOT
- end
-
- before(:all) do
+ before_all do
project.add_maintainer(user)
project.add_guest(user2)
end
@@ -86,7 +52,7 @@ RSpec.describe 'Task Lists' do
end
describe 'for Issues' do
- describe 'multiple tasks', :js do
+ describe 'multiple tasks' do
let!(:issue) { create(:issue, description: markdown, author: user, project: project) }
it 'renders' do
@@ -127,7 +93,7 @@ RSpec.describe 'Task Lists' do
end
end
- describe 'single incomplete task', :js do
+ describe 'single incomplete task' do
let!(:issue) { create(:issue, description: singleIncompleteMarkdown, author: user, project: project) }
it 'renders' do
@@ -146,7 +112,7 @@ RSpec.describe 'Task Lists' do
end
end
- describe 'single complete task', :js do
+ describe 'single complete task' do
let!(:issue) { create(:issue, description: singleCompleteMarkdown, author: user, project: project) }
it 'renders' do
@@ -175,7 +141,7 @@ RSpec.describe 'Task Lists' do
project: project, author: user)
end
- it 'renders for note body', :js do
+ it 'renders for note body' do
visit_issue(project, issue)
expect(page).to have_selector('.note ul.task-list', count: 1)
@@ -183,14 +149,14 @@ RSpec.describe 'Task Lists' do
expect(page).to have_selector('.note ul input[checked]', count: 2)
end
- it 'contains the required selectors', :js do
+ it 'contains the required selectors' do
visit_issue(project, issue)
expect(page).to have_selector('.note .js-task-list-container')
expect(page).to have_selector('.note .js-task-list-container .task-list .task-list-item .task-list-item-checkbox')
end
- it 'is only editable by author', :js do
+ it 'is only editable by author' do
visit_issue(project, issue)
expect(page).to have_selector('.js-task-list-container')
@@ -209,7 +175,7 @@ RSpec.describe 'Task Lists' do
project: project, author: user)
end
- it 'renders for note body', :js do
+ it 'renders for note body' do
visit_issue(project, issue)
expect(page).to have_selector('.note ul.task-list', count: 1)
@@ -224,7 +190,7 @@ RSpec.describe 'Task Lists' do
project: project, author: user)
end
- it 'renders for note body', :js do
+ it 'renders for note body' do
visit_issue(project, issue)
expect(page).to have_selector('.note ul.task-list', count: 1)
@@ -240,7 +206,7 @@ RSpec.describe 'Task Lists' do
end
shared_examples 'multiple tasks' do
- it 'renders for description', :js do
+ it 'renders for description' do
visit_merge_request(project, merge)
wait_for_requests
@@ -249,7 +215,7 @@ RSpec.describe 'Task Lists' do
expect(page).to have_selector('ul input[checked]', count: 2)
end
- it 'contains the required selectors', :js do
+ it 'contains the required selectors' do
visit_merge_request(project, merge)
wait_for_requests
@@ -261,7 +227,7 @@ RSpec.describe 'Task Lists' do
expect(page).to have_selector('form.js-issuable-update')
end
- it 'is only editable by author', :js do
+ it 'is only editable by author' do
visit_merge_request(project, merge)
wait_for_requests
@@ -300,7 +266,7 @@ RSpec.describe 'Task Lists' do
describe 'single incomplete task' do
let!(:merge) { create(:merge_request, :simple, description: singleIncompleteMarkdown, author: user, source_project: project) }
- it 'renders for description', :js do
+ it 'renders for description' do
visit_merge_request(project, merge)
wait_for_requests
@@ -319,7 +285,7 @@ RSpec.describe 'Task Lists' do
describe 'single complete task' do
let!(:merge) { create(:merge_request, :simple, description: singleCompleteMarkdown, author: user, source_project: project) }
- it 'renders for description', :js do
+ it 'renders for description' do
visit_merge_request(project, merge)
wait_for_requests
@@ -337,7 +303,17 @@ RSpec.describe 'Task Lists' do
end
describe 'markdown task edge cases' do
- describe 'commented tasks', :js do
+ describe 'commented tasks' do
+ let(:commented_tasks_markdown) do
+ <<-EOT.strip_heredoc
+ <!--
+ - [ ] a
+ -->
+
+ - [ ] b
+ EOT
+ end
+
let!(:issue) { create(:issue, description: commented_tasks_markdown, author: user, project: project) }
it 'renders' do
@@ -360,7 +336,18 @@ RSpec.describe 'Task Lists' do
end
end
- describe 'summary with no blank line', :js do
+ describe 'summary with no blank line' do
+ let(:summary_no_blank_line_markdown) do
+ <<-EOT.strip_heredoc
+ <details>
+ <summary>No blank line after summary element breaks task list</summary>
+ 1. [ ] People Ops: do such and such
+ </details>
+
+ * [ ] Task 1
+ EOT
+ end
+
let!(:issue) { create(:issue, description: summary_no_blank_line_markdown, author: user, project: project) }
it 'renders' do
@@ -382,5 +369,31 @@ RSpec.describe 'Task Lists' do
expect(page).to have_selector('ul input[checked]', count: 1)
end
end
+
+ describe 'markdown starting with new line character' do
+ let(:markdown_starting_with_new_line) do
+ <<-EOT.strip_heredoc
+
+ - [ ] Task 1
+ EOT
+ end
+
+ let(:merge_request) { create(:merge_request, description: markdown_starting_with_new_line, author: user, source_project: project) }
+
+ it 'allows the task to be checked' do
+ visit project_merge_request_path(project, merge_request)
+ wait_for_requests
+
+ expect(page).to have_selector('ul input[checked]', count: 0)
+
+ find('.task-list-item-checkbox').click
+ wait_for_requests
+
+ visit project_merge_request_path(project, merge_request)
+ wait_for_requests
+
+ expect(page).to have_selector('ul input[checked]', count: 1)
+ end
+ end
end
end
diff --git a/spec/features/triggers_spec.rb b/spec/features/triggers_spec.rb
index 4be27673adf..6fa805d8c74 100644
--- a/spec/features/triggers_spec.rb
+++ b/spec/features/triggers_spec.rb
@@ -19,114 +19,132 @@ RSpec.describe 'Triggers', :js do
visit project_settings_ci_cd_path(@project)
end
- describe 'create trigger workflow' do
- it 'prevents adding new trigger with no description' do
- fill_in 'trigger_description', with: ''
- click_button 'Add trigger'
-
- # See if input has error due to empty value
- expect(page.find('form.gl-show-field-errors .gl-field-error')).to be_visible
- end
+ shared_examples 'triggers page' do
+ describe 'create trigger workflow' do
+ it 'prevents adding new trigger with no description' do
+ fill_in 'trigger_description', with: ''
+ click_button 'Add trigger'
+
+ # See if input has error due to empty value
+ expect(page.find('form.gl-show-field-errors .gl-field-error')).to be_visible
+ end
- it 'adds new trigger with description' do
- fill_in 'trigger_description', with: 'trigger desc'
- click_button 'Add trigger'
+ it 'adds new trigger with description' do
+ fill_in 'trigger_description', with: 'trigger desc'
+ click_button 'Add trigger'
- # See if "trigger creation successful" message displayed and description and owner are correct
- expect(page.find('.flash-notice')).to have_content 'Trigger was created successfully.'
- expect(page.find('.triggers-list')).to have_content 'trigger desc'
- expect(page.find('.triggers-list .trigger-owner')).to have_content user.name
+ aggregate_failures 'display creation notice and trigger is created' do
+ expect(page.find('.flash-notice')).to have_content 'Trigger was created successfully.'
+ expect(page.find('.triggers-list')).to have_content 'trigger desc'
+ expect(page.find('.triggers-list .trigger-owner')).to have_content user.name
+ end
+ end
end
- end
-
- describe 'edit trigger workflow' do
- let(:new_trigger_title) { 'new trigger' }
- it 'click on edit trigger opens edit trigger page' do
- create(:ci_trigger, owner: user, project: @project, description: trigger_title)
- visit project_settings_ci_cd_path(@project)
+ describe 'edit trigger workflow' do
+ let(:new_trigger_title) { 'new trigger' }
- # See if edit page has correct descrption
- find('a[title="Edit"]').send_keys(:return)
- expect(page.find('#trigger_description').value).to have_content 'trigger desc'
- end
+ it 'click on edit trigger opens edit trigger page' do
+ create(:ci_trigger, owner: user, project: @project, description: trigger_title)
+ visit project_settings_ci_cd_path(@project)
- it 'edit trigger and save' do
- create(:ci_trigger, owner: user, project: @project, description: trigger_title)
- visit project_settings_ci_cd_path(@project)
+ # See if edit page has correct descrption
+ find('a[title="Edit"]').send_keys(:return)
+ expect(page.find('#trigger_description').value).to have_content 'trigger desc'
+ end
- # See if edit page opens, then fill in new description and save
- find('a[title="Edit"]').send_keys(:return)
- fill_in 'trigger_description', with: new_trigger_title
- click_button 'Save trigger'
+ it 'edit trigger and save' do
+ create(:ci_trigger, owner: user, project: @project, description: trigger_title)
+ visit project_settings_ci_cd_path(@project)
- # See if "trigger updated successfully" message displayed and description and owner are correct
- expect(page.find('.flash-notice')).to have_content 'Trigger was successfully updated.'
- expect(page.find('.triggers-list')).to have_content new_trigger_title
- expect(page.find('.triggers-list .trigger-owner')).to have_content user.name
- end
- end
+ # See if edit page opens, then fill in new description and save
+ find('a[title="Edit"]').send_keys(:return)
+ fill_in 'trigger_description', with: new_trigger_title
+ click_button 'Save trigger'
- describe 'trigger "Revoke" workflow' do
- before do
- create(:ci_trigger, owner: user2, project: @project, description: trigger_title)
- visit project_settings_ci_cd_path(@project)
+ aggregate_failures 'display update notice and trigger is updated' do
+ expect(page.find('.flash-notice')).to have_content 'Trigger was successfully updated.'
+ expect(page.find('.triggers-list')).to have_content new_trigger_title
+ expect(page.find('.triggers-list .trigger-owner')).to have_content user.name
+ end
+ end
end
- it 'button "Revoke" has correct alert' do
- expected_alert = 'By revoking a trigger you will break any processes making use of it. Are you sure?'
- expect(page.find('a.btn-trigger-revoke')['data-confirm']).to eq expected_alert
- end
+ describe 'trigger "Revoke" workflow' do
+ before do
+ create(:ci_trigger, owner: user2, project: @project, description: trigger_title)
+ visit project_settings_ci_cd_path(@project)
+ end
- it 'revoke trigger' do
- # See if "Revoke" on trigger works post trigger creation
- page.accept_confirm do
- find('a.btn-trigger-revoke').send_keys(:return)
+ it 'button "Revoke" has correct alert' do
+ expected_alert = 'By revoking a trigger you will break any processes making use of it. Are you sure?'
+ expect(page.find('[data-testid="trigger_revoke_button"]')['data-confirm']).to eq expected_alert
end
- expect(page.find('.flash-notice')).to have_content 'Trigger removed'
- expect(page).to have_selector('p.settings-message.text-center.gl-mb-3')
- end
- end
+ it 'revoke trigger' do
+ # See if "Revoke" on trigger works post trigger creation
+ page.accept_confirm do
+ find('[data-testid="trigger_revoke_button"]').send_keys(:return)
+ end
- describe 'show triggers workflow' do
- it 'contains trigger description placeholder' do
- expect(page.find('#trigger_description')['placeholder']).to eq 'Trigger description'
+ aggregate_failures 'trigger is removed' do
+ expect(page.find('.flash-notice')).to have_content 'Trigger removed'
+ expect(page).to have_css('[data-testid="no_triggers_content"]')
+ end
+ end
end
- it 'show "invalid" badge for trigger with owner having insufficient permissions' do
- create(:ci_trigger, owner: guest_user, project: @project, description: trigger_title)
- visit project_settings_ci_cd_path(@project)
+ describe 'show triggers workflow' do
+ it 'contains trigger description placeholder' do
+ expect(page.find('#trigger_description')['placeholder']).to eq 'Trigger description'
+ end
- expect(page.find('.triggers-list')).to have_content 'invalid'
- expect(page.find('.triggers-list')).not_to have_selector('a[title="Edit"]')
- end
+ it 'show "invalid" badge for trigger with owner having insufficient permissions' do
+ create(:ci_trigger, owner: guest_user, project: @project, description: trigger_title)
+ visit project_settings_ci_cd_path(@project)
+
+ aggregate_failures 'has invalid badge and no edit link' do
+ expect(page.find('.triggers-list')).to have_content 'invalid'
+ expect(page.find('.triggers-list')).not_to have_selector('a[title="Edit"]')
+ end
+ end
- it 'do not show "Edit" or full token for not owned trigger' do
- # Create trigger with user different from current_user
- create(:ci_trigger, owner: user2, project: @project, description: trigger_title)
- visit project_settings_ci_cd_path(@project)
+ it 'do not show "Edit" or full token for not owned trigger' do
+ # Create trigger with user different from current_user
+ create(:ci_trigger, owner: user2, project: @project, description: trigger_title)
+ visit project_settings_ci_cd_path(@project)
+
+ aggregate_failures 'shows truncated token, no clipboard button and no edit link' do
+ expect(page.find('.triggers-list')).to have_content(@project.triggers.first.token[0..3])
+ expect(page.find('.triggers-list')).not_to have_selector('[data-testid="clipboard-btn"]')
+ expect(page.find('.triggers-list .trigger-owner')).not_to have_content user.name
+ expect(page.find('.triggers-list')).not_to have_selector('a[title="Edit"]')
+ end
+ end
- # See if trigger not owned by current_user shows only first few token chars and doesn't have copy-to-clipboard button
- expect(page.find('.triggers-list')).to have_content(@project.triggers.first.token[0..3])
- expect(page.find('.triggers-list')).not_to have_selector('button.btn-clipboard')
+ it 'show "Edit" and full token for owned trigger' do
+ create(:ci_trigger, owner: user, project: @project, description: trigger_title)
+ visit project_settings_ci_cd_path(@project)
- # See if trigger owner name doesn't match with current_user and trigger is non-editable
- expect(page.find('.triggers-list .trigger-owner')).not_to have_content user.name
- expect(page.find('.triggers-list')).not_to have_selector('a[title="Edit"]')
+ aggregate_failures 'shows full token, clipboard button and edit link' do
+ expect(page.find('.triggers-list')).to have_content @project.triggers.first.token
+ expect(page.find('.triggers-list')).to have_selector('[data-testid="clipboard-btn"]')
+ expect(page.find('.triggers-list .trigger-owner')).to have_content user.name
+ expect(page.find('.triggers-list')).to have_selector('a[title="Edit"]')
+ end
+ end
end
+ end
- it 'show "Edit" and full token for owned trigger' do
- create(:ci_trigger, owner: user, project: @project, description: trigger_title)
- visit project_settings_ci_cd_path(@project)
-
- # See if trigger shows full token and has copy-to-clipboard button
- expect(page.find('.triggers-list')).to have_content @project.triggers.first.token
- expect(page.find('.triggers-list')).to have_selector('button.btn-clipboard')
+ context 'when ci_pipeline_triggers_settings_vue_ui is enabled' do
+ it_behaves_like 'triggers page'
+ end
- # See if trigger owner name matches with current_user and is editable
- expect(page.find('.triggers-list .trigger-owner')).to have_content user.name
- expect(page.find('.triggers-list')).to have_selector('a[title="Edit"]')
+ context 'when ci_pipeline_triggers_settings_vue_ui is disabled' do
+ before do
+ stub_feature_flags(ci_pipeline_triggers_settings_vue_ui: false)
end
+
+ it_behaves_like 'triggers page'
end
end
diff --git a/spec/features/users/show_spec.rb b/spec/features/users/show_spec.rb
index dd5c2442d00..b3c8cf8d326 100644
--- a/spec/features/users/show_spec.rb
+++ b/spec/features/users/show_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'User page' do
include ExternalAuthorizationServiceHelpers
- let(:user) { create(:user) }
+ let(:user) { create(:user, bio: '**Lorem** _ipsum_ dolor sit [amet](https://example.com)') }
context 'with public profile' do
it 'shows all the tabs' do
@@ -174,4 +174,12 @@ RSpec.describe 'User page' do
end
end
end
+
+ context 'page description' do
+ before do
+ visit(user_path(user))
+ end
+
+ it_behaves_like 'page meta description', 'Lorem ipsum dolor sit amet'
+ end
end
diff --git a/spec/features/users/terms_spec.rb b/spec/features/users/terms_spec.rb
index 5275845fe5b..7500f2fe59a 100644
--- a/spec/features/users/terms_spec.rb
+++ b/spec/features/users/terms_spec.rb
@@ -26,6 +26,21 @@ RSpec.describe 'Users > Terms' do
expect(page).not_to have_content('Continue')
end
+ context 'when user is a project bot' do
+ let(:project_bot) { create(:user, :project_bot) }
+
+ before do
+ enforce_terms
+ end
+
+ it 'auto accepts the terms' do
+ visit terms_path
+
+ expect(page).not_to have_content('Accept terms')
+ expect(project_bot.terms_accepted?).to be(true)
+ end
+ end
+
context 'when signed in' do
let(:user) { create(:user) }
diff --git a/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb b/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb
index 196fde5efe0..65f6dc0ba74 100644
--- a/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb
+++ b/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb
@@ -122,7 +122,7 @@ RSpec.describe Ci::PipelinesForMergeRequestFinder do
end
context 'with unsaved merge request' do
- let(:merge_request) { build(:merge_request) }
+ let(:merge_request) { build(:merge_request, source_project: create(:project, :repository)) }
let!(:pipeline) do
create(:ci_empty_pipeline, project: project,
diff --git a/spec/finders/group_labels_finder_spec.rb b/spec/finders/group_labels_finder_spec.rb
deleted file mode 100644
index d65a8fb4fed..00000000000
--- a/spec/finders/group_labels_finder_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe GroupLabelsFinder, '#execute' do
- let!(:group) { create(:group) }
- let!(:user) { create(:user) }
- let!(:label1) { create(:group_label, title: 'Foo', description: 'Lorem ipsum', group: group) }
- let!(:label2) { create(:group_label, title: 'Bar', description: 'Fusce consequat', group: group) }
-
- it 'returns all group labels sorted by name if no params' do
- result = described_class.new(user, group).execute
-
- expect(result.to_a).to match_array([label2, label1])
- end
-
- it 'returns all group labels sorted by name desc' do
- result = described_class.new(user, group, sort: 'name_desc').execute
-
- expect(result.to_a).to match_array([label2, label1])
- end
-
- it 'returns group labels that match search' do
- result = described_class.new(user, group, search: 'Foo').execute
-
- expect(result.to_a).to match_array([label1])
- end
-
- it 'returns group labels user subscribed to' do
- label2.subscribe(user)
-
- result = described_class.new(user, group, subscribed: 'true').execute
-
- expect(result.to_a).to match_array([label2])
- end
-
- it 'returns second page of labels' do
- result = described_class.new(user, group, page: '2').execute
-
- expect(result.to_a).to match_array([])
- end
-end
diff --git a/spec/finders/groups_finder_spec.rb b/spec/finders/groups_finder_spec.rb
index 48e4c5dadc9..c9e9328794e 100644
--- a/spec/finders/groups_finder_spec.rb
+++ b/spec/finders/groups_finder_spec.rb
@@ -161,5 +161,61 @@ RSpec.describe GroupsFinder do
end
end
end
+
+ context 'with include parent group descendants' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:parent_group) { create(:group, :public) }
+ let_it_be(:public_subgroup) { create(:group, :public, parent: parent_group) }
+ let_it_be(:internal_sub_subgroup) { create(:group, :internal, parent: public_subgroup) }
+ let_it_be(:private_sub_subgroup) { create(:group, :private, parent: public_subgroup) }
+ let_it_be(:public_sub_subgroup) { create(:group, :public, parent: public_subgroup) }
+ let(:params) { { include_parent_descendants: true, parent: parent_group } }
+
+ context 'with nil parent' do
+ it 'returns all accessible groups' do
+ params[:parent] = nil
+ expect(described_class.new(user, params).execute).to contain_exactly(
+ parent_group,
+ public_subgroup,
+ internal_sub_subgroup,
+ public_sub_subgroup
+ )
+ end
+ end
+
+ context 'without a user' do
+ it 'only returns the group public descendants' do
+ expect(described_class.new(nil, params).execute).to contain_exactly(
+ public_subgroup,
+ public_sub_subgroup
+ )
+ end
+ end
+
+ context 'when a user is present' do
+ it 'returns the group public and internal descendants' do
+ expect(described_class.new(user, params).execute).to contain_exactly(
+ public_subgroup,
+ public_sub_subgroup,
+ internal_sub_subgroup
+ )
+ end
+ end
+
+ context 'when a parent group member is present' do
+ before do
+ parent_group.add_developer(user)
+ end
+
+ it 'returns all group descendants' do
+ expect(described_class.new(user, params).execute).to contain_exactly(
+ public_subgroup,
+ public_sub_subgroup,
+ internal_sub_subgroup,
+ private_sub_subgroup
+ )
+ end
+ end
+ end
end
end
diff --git a/spec/finders/merge_requests/by_approvals_finder_spec.rb b/spec/finders/merge_requests/by_approvals_finder_spec.rb
new file mode 100644
index 00000000000..0e1856879f1
--- /dev/null
+++ b/spec/finders/merge_requests/by_approvals_finder_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::ByApprovalsFinder do
+ let_it_be(:first_user) { create(:user) }
+ let_it_be(:second_user) { create(:user) }
+ let(:third_user) { create(:user) }
+
+ let_it_be(:merge_request_without_approvals) { create(:merge_request) }
+ let_it_be(:merge_request_with_first_user_approval) do
+ create(:merge_request).tap do |mr|
+ create(:approval, merge_request: mr, user: first_user)
+ end
+ end
+ let_it_be(:merge_request_with_both_approvals) do
+ create(:merge_request).tap do |mr|
+ create(:approval, merge_request: mr, user: first_user)
+ create(:approval, merge_request: mr, user: second_user)
+ end
+ end
+
+ def merge_requests(ids: nil, names: [])
+ described_class.new(names, ids).execute(MergeRequest.all)
+ end
+
+ context 'filter by no approvals' do
+ it 'returns merge requests without approvals' do
+ expected_result = [merge_request_without_approvals]
+
+ expect(merge_requests(ids: 'None')).to match_array(expected_result)
+ expect(merge_requests(names: ['None'])).to match_array(expected_result)
+ end
+ end
+
+ context 'filter by any approvals' do
+ it 'returns merge requests approved by at least one user' do
+ expected_result = [merge_request_with_first_user_approval, merge_request_with_both_approvals]
+
+ expect(merge_requests(ids: 'Any')).to match_array(expected_result)
+ expect(merge_requests(names: ['Any'])).to match_array(expected_result)
+ end
+ end
+
+ context 'filter by specific user approval' do
+ it 'returns merge requests approved by specific user' do
+ expected_result = [merge_request_with_first_user_approval, merge_request_with_both_approvals]
+
+ expect(merge_requests(ids: [first_user.id])).to match_array(expected_result)
+ expect(merge_requests(names: [first_user.username])).to match_array(expected_result)
+ end
+ end
+
+ context 'filter by multiple user approval' do
+ it 'returns merge requests approved by both users' do
+ expected_result = [merge_request_with_both_approvals]
+
+ expect(merge_requests(ids: [first_user.id, second_user.id])).to match_array(expected_result)
+ expect(merge_requests(names: [first_user.username, second_user.username])).to match_array(expected_result)
+ end
+
+ context 'limiting max conditional elements' do
+ it 'returns merge requests approved by both users, considering limit of 2 being defined' do
+ stub_const('MergeRequests::ByApprovalsFinder::MAX_FILTER_ELEMENTS', 2)
+
+ expected_result = [merge_request_with_both_approvals]
+
+ expect(merge_requests(ids: [first_user.id, second_user.id, third_user.id])).to match_array(expected_result)
+ expect(merge_requests(names: [first_user.username, second_user.username, third_user.username])).to match_array(expected_result)
+ end
+ end
+ end
+
+ context 'with empty params' do
+ it 'returns all merge requests' do
+ expected_result = [merge_request_without_approvals, merge_request_with_first_user_approval, merge_request_with_both_approvals]
+
+ expect(merge_requests(ids: [])).to match_array(expected_result)
+ expect(merge_requests(names: [])).to match_array(expected_result)
+ end
+ end
+end
diff --git a/spec/finders/packages/generic/package_finder_spec.rb b/spec/finders/packages/generic/package_finder_spec.rb
new file mode 100644
index 00000000000..ed34268e7a9
--- /dev/null
+++ b/spec/finders/packages/generic/package_finder_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Packages::Generic::PackageFinder do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:package) { create(:generic_package, project: project) }
+
+ describe '#execute!' do
+ subject(:finder) { described_class.new(project) }
+
+ it 'finds package by name and version' do
+ found_package = finder.execute!(package.name, package.version)
+
+ expect(found_package).to eq(package)
+ end
+
+ it 'ignores packages with same name but different version' do
+ create(:generic_package, project: project, name: package.name, version: '3.1.4')
+
+ found_package = finder.execute!(package.name, package.version)
+
+ expect(found_package).to eq(package)
+ end
+
+ it 'raises ActiveRecord::RecordNotFound if package is not found' do
+ expect { finder.execute!(package.name, '3.1.4') }
+ .to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+end
diff --git a/spec/fixtures/api/schemas/entities/group_group_link.json b/spec/fixtures/api/schemas/entities/group_group_link.json
index 4c9aae140d2..dda94dc3362 100644
--- a/spec/fixtures/api/schemas/entities/group_group_link.json
+++ b/spec/fixtures/api/schemas/entities/group_group_link.json
@@ -1,10 +1,12 @@
{
"type": "object",
- "required": ["id", "created_at", "expires_at", "access_level"],
+ "required": ["id", "created_at", "expires_at", "can_update", "can_remove", "access_level"],
"properties": {
"id": { "type": "integer" },
"created_at": { "type": "date-time" },
"expires_at": { "type": ["date-time", "null"] },
+ "can_update": { "type": "boolean" },
+ "can_remove": { "type": "boolean" },
"access_level": {
"type": "object",
"required": ["integer_value", "string_value"],
diff --git a/spec/fixtures/api/schemas/entities/merge_request_basic.json b/spec/fixtures/api/schemas/entities/merge_request_basic.json
index 3c19528d71b..b061176f6a7 100644
--- a/spec/fixtures/api/schemas/entities/merge_request_basic.json
+++ b/spec/fixtures/api/schemas/entities/merge_request_basic.json
@@ -1,6 +1,7 @@
{
"type": "object",
"properties" : {
+ "title": { "type": "string" },
"state": { "type": "string" },
"merge_status": { "type": "string" },
"source_branch_exists": { "type": "boolean" },
diff --git a/spec/fixtures/api/schemas/entities/test_case.json b/spec/fixtures/api/schemas/entities/test_case.json
index 0dd3c5d472f..d731d7eed0a 100644
--- a/spec/fixtures/api/schemas/entities/test_case.json
+++ b/spec/fixtures/api/schemas/entities/test_case.json
@@ -8,6 +8,7 @@
"status": { "type": "string" },
"name": { "type": "string" },
"classname": { "type": "string" },
+ "file": { "type": ["string", "null"] },
"execution_time": { "type": "float" },
"system_output": { "type": ["string", "null"] },
"stack_trace": { "type": ["string", "null"] },
diff --git a/spec/fixtures/api/schemas/entities/trigger.json b/spec/fixtures/api/schemas/entities/trigger.json
new file mode 100644
index 00000000000..5c46142673f
--- /dev/null
+++ b/spec/fixtures/api/schemas/entities/trigger.json
@@ -0,0 +1,39 @@
+{
+ "type": "object",
+ "required": [
+ "description",
+ "owner",
+ "last_used",
+ "has_token_exposed",
+ "token",
+ "can_access_project"
+ ],
+ "properties": {
+ "description": {
+ "type": ["string", "null"]
+ },
+ "owner": {
+ "type": "object",
+ "$ref": "user.json"
+ },
+ "last_used": {
+ "type": ["datetime", "null"]
+ },
+ "token": {
+ "type": "string"
+ },
+ "has_token_exposed": {
+ "type": "boolean"
+ },
+ "can_access_project": {
+ "type": "boolean"
+ },
+ "edit_project_trigger_path": {
+ "type": "string"
+ },
+ "project_trigger_path": {
+ "type": "string"
+ }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/feature_flags_client_token.json b/spec/fixtures/api/schemas/feature_flags_client_token.json
new file mode 100644
index 00000000000..115db422d12
--- /dev/null
+++ b/spec/fixtures/api/schemas/feature_flags_client_token.json
@@ -0,0 +1,10 @@
+{
+ "type": "object",
+ "required" : [
+ "token"
+ ],
+ "properties" : {
+ "token": { "type": ["string"] }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/group_member.json b/spec/fixtures/api/schemas/group_member.json
index 035c862d229..eefd4aeb219 100644
--- a/spec/fixtures/api/schemas/group_member.json
+++ b/spec/fixtures/api/schemas/group_member.json
@@ -62,7 +62,18 @@
"avatar_url": { "type": ["string", "null"] },
"web_url": { "type": "string" },
"blocked": { "type": "boolean" },
- "two_factor_enabled": { "type": "boolean" }
+ "two_factor_enabled": { "type": "boolean" },
+ "status": {
+ "type": "object",
+ "required": [
+ "emoji",
+ "message_html"
+ ],
+ "properties": {
+ "emoji": { "type": "string" },
+ "message_html": { "type": "string" }
+ }
+ }
}
},
"invite": {
diff --git a/spec/fixtures/api/schemas/public_api/v4/packages/package.json b/spec/fixtures/api/schemas/public_api/v4/packages/package.json
index 757e5fd26b6..08909efd10c 100644
--- a/spec/fixtures/api/schemas/public_api/v4/packages/package.json
+++ b/spec/fixtures/api/schemas/public_api/v4/packages/package.json
@@ -11,6 +11,9 @@
"name": {
"type": "string"
},
+ "conan_package_name": {
+ "type": "string"
+ },
"version": {
"type": "string"
},
diff --git a/spec/fixtures/api/schemas/registry/repository.json b/spec/fixtures/api/schemas/registry/repository.json
index 1f84e787b19..18d2c68ac2f 100644
--- a/spec/fixtures/api/schemas/registry/repository.json
+++ b/spec/fixtures/api/schemas/registry/repository.json
@@ -20,6 +20,9 @@
"created_at": {
"type": "date-time"
},
+ "cleanup_policy_started_at": {
+ "type": "date-time"
+ },
"tags_path": {
"type": "string"
},
diff --git a/spec/fixtures/api/schemas/unleash/unleash.json b/spec/fixtures/api/schemas/unleash/unleash.json
new file mode 100644
index 00000000000..6eaf316bb11
--- /dev/null
+++ b/spec/fixtures/api/schemas/unleash/unleash.json
@@ -0,0 +1,20 @@
+{
+ "additionalProperties": false,
+ "properties": {
+ "features": {
+ "items": {
+ "$ref": "unleash_feature.json"
+ },
+ "minItems": 0,
+ "type": "array"
+ },
+ "version": {
+ "type": "integer"
+ }
+ },
+ "required": [
+ "version",
+ "features"
+ ],
+ "type": "object"
+}
diff --git a/spec/fixtures/api/schemas/unleash/unleash_feature.json b/spec/fixtures/api/schemas/unleash/unleash_feature.json
new file mode 100644
index 00000000000..71d375a5371
--- /dev/null
+++ b/spec/fixtures/api/schemas/unleash/unleash_feature.json
@@ -0,0 +1,27 @@
+{
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "name",
+ "enabled",
+ "strategies"
+ ],
+ "properties": {
+ "name": {
+ "type": "string"
+ },
+ "enabled": {
+ "type": "boolean"
+ },
+ "description": {
+ "type": "string"
+ },
+ "strategies": {
+ "items": {
+ "$ref": "unleash_strategy.json"
+ },
+ "minItems": 1,
+ "type": "array"
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/unleash/unleash_strategy.json b/spec/fixtures/api/schemas/unleash/unleash_strategy.json
new file mode 100644
index 00000000000..7b48038ad15
--- /dev/null
+++ b/spec/fixtures/api/schemas/unleash/unleash_strategy.json
@@ -0,0 +1,24 @@
+{
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "name"
+ ],
+ "properties": {
+ "name": {
+ "type": "string"
+ },
+ "parameters": {
+ "type": "object",
+ "additionalProperties": false,
+ "properties": {
+ "groupId": {
+ "type": "string"
+ },
+ "percentage": {
+ "type": "integer"
+ }
+ }
+ }
+ }
+}
diff --git a/spec/fixtures/invalid_manifest.xml b/spec/fixtures/invalid_manifest.xml
new file mode 100644
index 00000000000..5357329784c
--- /dev/null
+++ b/spec/fixtures/invalid_manifest.xml
@@ -0,0 +1,4 @@
+<manifest>
+ <remote review="invalid-url" />
+ <project name="platform/build"/>
+</manifest>
diff --git a/spec/fixtures/lib/backup/design_repo.bundle b/spec/fixtures/lib/backup/design_repo.bundle
new file mode 100644
index 00000000000..3ed4ad6ab8b
--- /dev/null
+++ b/spec/fixtures/lib/backup/design_repo.bundle
Binary files differ
diff --git a/spec/fixtures/lib/backup/project_repo.bundle b/spec/fixtures/lib/backup/project_repo.bundle
new file mode 100644
index 00000000000..44d4fc56d51
--- /dev/null
+++ b/spec/fixtures/lib/backup/project_repo.bundle
Binary files differ
diff --git a/spec/fixtures/lib/backup/wiki_repo.bundle b/spec/fixtures/lib/backup/wiki_repo.bundle
new file mode 100644
index 00000000000..bcc08dcbe8e
--- /dev/null
+++ b/spec/fixtures/lib/backup/wiki_repo.bundle
Binary files differ
diff --git a/spec/fixtures/packages/debian/libsample0_1.2.3~alpha2-1_amd64.deb b/spec/fixtures/packages/debian/libsample0_1.2.3~alpha2-1_amd64.deb
new file mode 100644
index 00000000000..c6cac69265a
--- /dev/null
+++ b/spec/fixtures/packages/debian/libsample0_1.2.3~alpha2-1_amd64.deb
@@ -0,0 +1 @@
+empty
diff --git a/spec/fixtures/packages/generic/myfile.tar.gz b/spec/fixtures/packages/generic/myfile.tar.gz
new file mode 100644
index 00000000000..c71b1fef23d
--- /dev/null
+++ b/spec/fixtures/packages/generic/myfile.tar.gz
Binary files differ
diff --git a/spec/frontend/alert_handler_spec.js b/spec/frontend/alert_handler_spec.js
index ba2f4f24aa5..0cee28112a8 100644
--- a/spec/frontend/alert_handler_spec.js
+++ b/spec/frontend/alert_handler_spec.js
@@ -2,18 +2,26 @@ import { setHTMLFixture } from 'helpers/fixtures';
import initAlertHandler from '~/alert_handler';
describe('Alert Handler', () => {
- const ALERT_SELECTOR = 'gl-alert';
- const CLOSE_SELECTOR = 'gl-alert-dismiss';
- const ALERT_HTML = `<div class="${ALERT_SELECTOR}"><button class="${CLOSE_SELECTOR}">Dismiss</button></div>`;
+ const ALERT_CLASS = 'gl-alert';
+ const BANNER_CLASS = 'gl-banner';
+ const DISMISS_CLASS = 'gl-alert-dismiss';
+ const DISMISS_LABEL = 'Dismiss';
- const findFirstAlert = () => document.querySelector(`.${ALERT_SELECTOR}`);
- const findAllAlerts = () => document.querySelectorAll(`.${ALERT_SELECTOR}`);
- const findFirstCloseButton = () => document.querySelector(`.${CLOSE_SELECTOR}`);
+ const generateHtml = parentClass =>
+ `<div class="${parentClass}">
+ <button aria-label="${DISMISS_LABEL}">Dismiss</button>
+ </div>`;
+
+ const findFirstAlert = () => document.querySelector(`.${ALERT_CLASS}`);
+ const findFirstBanner = () => document.querySelector(`.${BANNER_CLASS}`);
+ const findAllAlerts = () => document.querySelectorAll(`.${ALERT_CLASS}`);
+ const findFirstDismissButton = () => document.querySelector(`[aria-label="${DISMISS_LABEL}"]`);
+ const findFirstDismissButtonByClass = () => document.querySelector(`.${DISMISS_CLASS}`);
describe('initAlertHandler', () => {
describe('with one alert', () => {
beforeEach(() => {
- setHTMLFixture(ALERT_HTML);
+ setHTMLFixture(generateHtml(ALERT_CLASS));
initAlertHandler();
});
@@ -22,14 +30,14 @@ describe('Alert Handler', () => {
});
it('should dismiss the alert on click', () => {
- findFirstCloseButton().click();
+ findFirstDismissButton().click();
expect(findFirstAlert()).not.toExist();
});
});
describe('with two alerts', () => {
beforeEach(() => {
- setHTMLFixture(ALERT_HTML + ALERT_HTML);
+ setHTMLFixture(generateHtml(ALERT_CLASS) + generateHtml(ALERT_CLASS));
initAlertHandler();
});
@@ -38,9 +46,46 @@ describe('Alert Handler', () => {
});
it('should dismiss only one alert on click', () => {
- findFirstCloseButton().click();
+ findFirstDismissButton().click();
expect(findAllAlerts()).toHaveLength(1);
});
});
+
+ describe('with a dismissible banner', () => {
+ beforeEach(() => {
+ setHTMLFixture(generateHtml(BANNER_CLASS));
+ initAlertHandler();
+ });
+
+ it('should render the banner', () => {
+ expect(findFirstBanner()).toExist();
+ });
+
+ it('should dismiss the banner on click', () => {
+ findFirstDismissButton().click();
+ expect(findFirstBanner()).not.toExist();
+ });
+ });
+
+ // Dismiss buttons *should* have the correct aria labels, but some of them won't
+ // because legacy code isn't always a11y compliant.
+ // This tests that the fallback for the incorrectly labelled buttons works.
+ describe('with a mislabelled dismiss button', () => {
+ beforeEach(() => {
+ setHTMLFixture(`<div class="${ALERT_CLASS}">
+ <button class="${DISMISS_CLASS}">Dismiss</button>
+ </div>`);
+ initAlertHandler();
+ });
+
+ it('should render the banner', () => {
+ expect(findFirstAlert()).toExist();
+ });
+
+ it('should dismiss the banner on click', () => {
+ findFirstDismissButtonByClass().click();
+ expect(findFirstAlert()).not.toExist();
+ });
+ });
});
});
diff --git a/spec/frontend/alert_management/components/alert_details_spec.js b/spec/frontend/alert_management/components/alert_details_spec.js
index 8aa26dbca3b..910bb31b573 100644
--- a/spec/frontend/alert_management/components/alert_details_spec.js
+++ b/spec/frontend/alert_management/components/alert_details_spec.js
@@ -2,8 +2,10 @@ import { mount, shallowMount } from '@vue/test-utils';
import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import AlertDetailsTable from '~/vue_shared/components/alert_details_table.vue';
import AlertDetails from '~/alert_management/components/alert_details.vue';
+import AlertSummaryRow from '~/alert_management/components/alert_summary_row.vue';
import createIssueMutation from '~/alert_management/graphql/mutations/create_issue_from_alert.mutation.graphql';
import { joinPaths } from '~/lib/utils/url_utility';
import {
@@ -24,31 +26,36 @@ describe('AlertDetails', () => {
const $router = { replace: jest.fn() };
function mountComponent({ data, loading = false, mountMethod = shallowMount, stubs = {} } = {}) {
- wrapper = mountMethod(AlertDetails, {
- provide: {
- alertId: 'alertId',
- projectPath,
- projectIssuesPath,
- projectId,
- },
- data() {
- return { alert: { ...mockAlert }, sidebarStatus: false, ...data };
- },
- mocks: {
- $apollo: {
- mutate: jest.fn(),
- queries: {
- alert: {
- loading,
+ wrapper = extendedWrapper(
+ mountMethod(AlertDetails, {
+ provide: {
+ alertId: 'alertId',
+ projectPath,
+ projectIssuesPath,
+ projectId,
+ },
+ data() {
+ return { alert: { ...mockAlert }, sidebarStatus: false, ...data };
+ },
+ mocks: {
+ $apollo: {
+ mutate: jest.fn(),
+ queries: {
+ alert: {
+ loading,
+ },
+ sidebarStatus: {},
},
- sidebarStatus: {},
},
+ $router,
+ $route: { params: {} },
},
- $router,
- $route: { params: {} },
- },
- stubs,
- });
+ stubs: {
+ ...stubs,
+ AlertSummaryRow,
+ },
+ }),
+ );
}
beforeEach(() => {
@@ -62,9 +69,10 @@ describe('AlertDetails', () => {
mock.restore();
});
- const findCreateIncidentBtn = () => wrapper.find('[data-testid="createIncidentBtn"]');
- const findViewIncidentBtn = () => wrapper.find('[data-testid="viewIncidentBtn"]');
- const findIncidentCreationAlert = () => wrapper.find('[data-testid="incidentCreationError"]');
+ const findCreateIncidentBtn = () => wrapper.findByTestId('createIncidentBtn');
+ const findViewIncidentBtn = () => wrapper.findByTestId('viewIncidentBtn');
+ const findIncidentCreationAlert = () => wrapper.findByTestId('incidentCreationError');
+ const findEnvironmentLink = () => wrapper.findByTestId('environmentUrl');
const findDetailsTable = () => wrapper.find(AlertDetailsTable);
describe('Alert details', () => {
@@ -74,7 +82,7 @@ describe('AlertDetails', () => {
});
it('shows an empty state', () => {
- expect(wrapper.find('[data-testid="alertDetailsTabs"]').exists()).toBe(false);
+ expect(wrapper.findByTestId('alertDetailsTabs').exists()).toBe(false);
});
});
@@ -84,28 +92,26 @@ describe('AlertDetails', () => {
});
it('renders a tab with overview information', () => {
- expect(wrapper.find('[data-testid="overview"]').exists()).toBe(true);
+ expect(wrapper.findByTestId('overview').exists()).toBe(true);
});
it('renders a tab with an activity feed', () => {
- expect(wrapper.find('[data-testid="activity"]').exists()).toBe(true);
+ expect(wrapper.findByTestId('activity').exists()).toBe(true);
});
it('renders severity', () => {
- expect(wrapper.find('[data-testid="severity"]').text()).toBe(
+ expect(wrapper.findByTestId('severity').text()).toBe(
ALERTS_SEVERITY_LABELS[mockAlert.severity],
);
});
it('renders a title', () => {
- expect(wrapper.find('[data-testid="title"]').text()).toBe(mockAlert.title);
+ expect(wrapper.findByTestId('title').text()).toBe(mockAlert.title);
});
it('renders a start time', () => {
- expect(wrapper.find('[data-testid="startTimeItem"]').exists()).toBe(true);
- expect(wrapper.find('[data-testid="startTimeItem"]').props().time).toBe(
- mockAlert.startedAt,
- );
+ expect(wrapper.findByTestId('startTimeItem').exists()).toBe(true);
+ expect(wrapper.findByTestId('startTimeItem').props('time')).toBe(mockAlert.startedAt);
});
});
@@ -114,6 +120,8 @@ describe('AlertDetails', () => {
field | data | isShown
${'eventCount'} | ${1} | ${true}
${'eventCount'} | ${undefined} | ${false}
+ ${'environment'} | ${undefined} | ${false}
+ ${'environment'} | ${'Production'} | ${true}
${'monitoringTool'} | ${'New Relic'} | ${true}
${'monitoringTool'} | ${undefined} | ${false}
${'service'} | ${'Prometheus'} | ${true}
@@ -126,15 +134,29 @@ describe('AlertDetails', () => {
});
it(`${field} is ${isShown ? 'displayed' : 'hidden'} correctly`, () => {
+ const element = wrapper.findByTestId(field);
if (isShown) {
- expect(wrapper.find(`[data-testid="${field}"]`).text()).toBe(data.toString());
+ expect(element.text()).toContain(data.toString());
} else {
- expect(wrapper.find(`[data-testid="${field}"]`).exists()).toBe(false);
+ expect(wrapper.findByTestId(field).exists()).toBe(false);
}
});
});
});
+ describe('environment URL fields', () => {
+ it('should show the environment URL when available', () => {
+ const environment = 'Production';
+ const environmentUrl = 'fake/url';
+ mountComponent({
+ data: { alert: { ...mockAlert, environment, environmentUrl } },
+ });
+
+ expect(findEnvironmentLink().text()).toBe(environment);
+ expect(findEnvironmentLink().attributes('href')).toBe(environmentUrl);
+ });
+ });
+
describe('Create incident from alert', () => {
it('should display "View incident" button that links the incident page when incident exists', () => {
const issueIid = '3';
@@ -222,7 +244,7 @@ describe('AlertDetails', () => {
mountComponent({
data: { errored: true, sidebarErrorMessage: '<span data-testid="htmlError" />' },
});
- expect(wrapper.find('[data-testid="htmlError"]').exists()).toBe(true);
+ expect(wrapper.findByTestId('htmlError').exists()).toBe(true);
});
it('does not display an error when dismissed', () => {
@@ -232,7 +254,7 @@ describe('AlertDetails', () => {
});
describe('header', () => {
- const findHeader = () => wrapper.find('[data-testid="alert-header"]');
+ const findHeader = () => wrapper.findByTestId('alert-header');
const stubs = { TimeAgoTooltip: { template: '<span>now</span>' } };
describe('individual header fields', () => {
diff --git a/spec/frontend/alert_management/components/alert_management_table_spec.js b/spec/frontend/alert_management/components/alert_management_table_spec.js
index bcad415eb19..3aa67614369 100644
--- a/spec/frontend/alert_management/components/alert_management_table_spec.js
+++ b/spec/frontend/alert_management/components/alert_management_table_spec.js
@@ -3,8 +3,8 @@ import {
GlTable,
GlAlert,
GlLoadingIcon,
- GlDeprecatedDropdown,
- GlDeprecatedDropdownItem,
+ GlDropdown,
+ GlDropdownItem,
GlIcon,
GlTabs,
GlTab,
@@ -34,12 +34,12 @@ describe('AlertManagementTable', () => {
const findAlerts = () => wrapper.findAll('table tbody tr');
const findAlert = () => wrapper.find(GlAlert);
const findLoader = () => wrapper.find(GlLoadingIcon);
- const findStatusDropdown = () => wrapper.find(GlDeprecatedDropdown);
+ const findStatusDropdown = () => wrapper.find(GlDropdown);
const findStatusFilterTabs = () => wrapper.findAll(GlTab);
const findStatusTabs = () => wrapper.find(GlTabs);
const findStatusFilterBadge = () => wrapper.findAll(GlBadge);
const findDateFields = () => wrapper.findAll(TimeAgo);
- const findFirstStatusOption = () => findStatusDropdown().find(GlDeprecatedDropdownItem);
+ const findFirstStatusOption = () => findStatusDropdown().find(GlDropdownItem);
const findPagination = () => wrapper.find(GlPagination);
const findSearch = () => wrapper.find(GlSearchBoxByType);
const findSeverityColumnHeader = () =>
@@ -295,10 +295,30 @@ describe('AlertManagementTable', () => {
loading: false,
});
+ expect(visitUrl).not.toHaveBeenCalled();
+
findAlerts()
.at(0)
.trigger('click');
- expect(visitUrl).toHaveBeenCalledWith('/1527542/details');
+ expect(visitUrl).toHaveBeenCalledWith('/1527542/details', false);
+ });
+
+ it('navigates to the detail page in new tab when alert row is clicked with the metaKey', () => {
+ mountComponent({
+ props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
+ data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
+ loading: false,
+ });
+
+ expect(visitUrl).not.toHaveBeenCalled();
+
+ findAlerts()
+ .at(0)
+ .trigger('click', {
+ metaKey: true,
+ });
+
+ expect(visitUrl).toHaveBeenCalledWith('/1527542/details', true);
});
describe('alert issue links', () => {
diff --git a/spec/frontend/alert_management/components/alert_summary_row_spec.js b/spec/frontend/alert_management/components/alert_summary_row_spec.js
new file mode 100644
index 00000000000..47c715c089a
--- /dev/null
+++ b/spec/frontend/alert_management/components/alert_summary_row_spec.js
@@ -0,0 +1,40 @@
+import { shallowMount } from '@vue/test-utils';
+import AlertSummaryRow from '~/alert_management/components/alert_summary_row.vue';
+
+const label = 'a label';
+const value = 'a value';
+
+describe('AlertSummaryRow', () => {
+ let wrapper;
+
+ function mountComponent({ mountMethod = shallowMount, props, defaultSlot } = {}) {
+ wrapper = mountMethod(AlertSummaryRow, {
+ propsData: props,
+ scopedSlots: {
+ default: defaultSlot,
+ },
+ });
+ }
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ });
+
+ describe('Alert Summary Row', () => {
+ beforeEach(() => {
+ mountComponent({
+ props: {
+ label,
+ },
+ defaultSlot: `<span class="value">${value}</span>`,
+ });
+ });
+
+ it('should display a label and a value', () => {
+ expect(wrapper.text()).toBe(`${label} ${value}`);
+ });
+ });
+});
diff --git a/spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js b/spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js
index 4c9db02eff4..1d87301aac9 100644
--- a/spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js
+++ b/spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
-import { GlDeprecatedDropdownItem } from '@gitlab/ui';
+import { GlDropdownItem } from '@gitlab/ui';
import SidebarAssignee from '~/alert_management/components/sidebar/sidebar_assignee.vue';
import SidebarAssignees from '~/alert_management/components/sidebar/sidebar_assignees.vue';
import AlertSetAssignees from '~/alert_management/graphql/mutations/alert_set_assignees.mutation.graphql';
@@ -106,7 +106,7 @@ describe('Alert Details Sidebar Assignees', () => {
it('renders a unassigned option', async () => {
wrapper.setData({ isDropdownSearching: false });
await wrapper.vm.$nextTick();
- expect(wrapper.find(GlDeprecatedDropdownItem).text()).toBe('Unassigned');
+ expect(wrapper.find(GlDropdownItem).text()).toBe('Unassigned');
});
it('calls `$apollo.mutate` with `AlertSetAssignees` mutation and variables containing `iid`, `assigneeUsernames`, & `projectPath`', async () => {
diff --git a/spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js b/spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js
index a8fe40687e1..e144d473c12 100644
--- a/spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js
+++ b/spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js
@@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
-import { GlDeprecatedDropdown, GlDeprecatedDropdownItem, GlLoadingIcon } from '@gitlab/ui';
+import { GlDropdown, GlDropdownItem, GlLoadingIcon } from '@gitlab/ui';
import { trackAlertStatusUpdateOptions } from '~/alert_management/constants';
import AlertSidebarStatus from '~/alert_management/components/sidebar/sidebar_status.vue';
import updateAlertStatus from '~/alert_management/graphql/mutations/update_alert_status.mutation.graphql';
@@ -10,9 +10,10 @@ const mockAlert = mockAlerts[0];
describe('Alert Details Sidebar Status', () => {
let wrapper;
- const findStatusDropdown = () => wrapper.find(GlDeprecatedDropdown);
- const findStatusDropdownItem = () => wrapper.find(GlDeprecatedDropdownItem);
+ const findStatusDropdown = () => wrapper.find(GlDropdown);
+ const findStatusDropdownItem = () => wrapper.find(GlDropdownItem);
const findStatusLoadingIcon = () => wrapper.find(GlLoadingIcon);
+ const findStatusDropdownHeader = () => wrapper.find('[data-testid="dropdown-header"]');
function mountComponent({ data, sidebarCollapsed = true, loading = false, stubs = {} } = {}) {
wrapper = mount(AlertSidebarStatus, {
@@ -56,11 +57,7 @@ describe('Alert Details Sidebar Status', () => {
});
it('displays the dropdown status header', () => {
- expect(
- findStatusDropdown()
- .find('.dropdown-title')
- .exists(),
- ).toBe(true);
+ expect(findStatusDropdownHeader().exists()).toBe(true);
});
describe('updating the alert status', () => {
diff --git a/spec/frontend/alert_management/components/system_notes/alert_management_system_note_spec.js b/spec/frontend/alert_management/components/system_notes/alert_management_system_note_spec.js
index 8dd663e55d9..65cfc600d76 100644
--- a/spec/frontend/alert_management/components/system_notes/alert_management_system_note_spec.js
+++ b/spec/frontend/alert_management/components/system_notes/alert_management_system_note_spec.js
@@ -1,4 +1,5 @@
import { shallowMount } from '@vue/test-utils';
+import { GlIcon } from '@gitlab/ui';
import SystemNote from '~/alert_management/components/system_notes/system_note.vue';
import mockAlerts from '../../mocks/alerts.json';
@@ -19,6 +20,7 @@ describe('Alert Details System Note', () => {
afterEach(() => {
if (wrapper) {
wrapper.destroy();
+ wrapper = null;
}
});
@@ -29,10 +31,10 @@ describe('Alert Details System Note', () => {
it('renders the correct system note', () => {
const noteId = wrapper.find('.note-wrapper').attributes('id');
- const iconRoute = wrapper.find('use').attributes('href');
+ const iconName = wrapper.find(GlIcon).attributes('name');
expect(noteId).toBe('note_1628');
- expect(iconRoute.includes('user')).toBe(true);
+ expect(iconName).toBe(mockAlert.notes.nodes[0].systemNoteIconName);
});
});
});
diff --git a/spec/frontend/alert_settings/__snapshots__/alert_settings_form_spec.js.snap b/spec/frontend/alert_settings/__snapshots__/alert_settings_form_spec.js.snap
index 16e92bf505a..545be94dcaa 100644
--- a/spec/frontend/alert_settings/__snapshots__/alert_settings_form_spec.js.snap
+++ b/spec/frontend/alert_settings/__snapshots__/alert_settings_form_spec.js.snap
@@ -26,7 +26,7 @@ exports[`AlertsSettingsForm with default values renders the initial template 1`]
</gl-form-group-stub>
<gl-form-group-stub label=\\"Authorization key\\" label-for=\\"authorization-key\\" label-class=\\"label-bold\\">
<gl-form-input-group-stub value=\\"abcedfg123\\" predefinedoptions=\\"[object Object]\\" id=\\"authorization-key\\" readonly=\\"\\" class=\\"gl-mb-2\\"></gl-form-input-group-stub>
- <gl-button-stub category=\\"primary\\" variant=\\"default\\" size=\\"medium\\" icon=\\"\\" disabled=\\"true\\" class=\\"gl-mt-3\\" role=\\"button\\" tabindex=\\"0\\">Reset key</gl-button-stub>
+ <gl-button-stub category=\\"primary\\" variant=\\"default\\" size=\\"medium\\" icon=\\"\\" buttontextclasses=\\"\\" disabled=\\"true\\" class=\\"gl-mt-3\\" role=\\"button\\" tabindex=\\"0\\">Reset key</gl-button-stub>
<gl-modal-stub modalid=\\"authKeyModal\\" titletag=\\"h4\\" modalclass=\\"\\" size=\\"md\\" title=\\"Reset key\\" ok-title=\\"Reset key\\" ok-variant=\\"danger\\">
Resetting the authorization key for this project will require updating the authorization key in every alert source it is enabled in.
</gl-modal-stub>
@@ -34,16 +34,14 @@ exports[`AlertsSettingsForm with default values renders the initial template 1`]
<gl-form-group-stub label=\\"Alert test payload\\" label-for=\\"alert-json\\" label-class=\\"label-bold\\">
<gl-form-textarea-stub noresize=\\"true\\" id=\\"alert-json\\" disabled=\\"true\\" state=\\"true\\" placeholder=\\"Enter test alert JSON....\\" rows=\\"6\\" max-rows=\\"10\\"></gl-form-textarea-stub>
</gl-form-group-stub>
- <div class=\\"gl-display-flex gl-justify-content-end\\">
- <gl-button-stub category=\\"primary\\" variant=\\"default\\" size=\\"medium\\" icon=\\"\\" disabled=\\"true\\">Test alert payload</gl-button-stub>
- </div>
+ <gl-button-stub category=\\"primary\\" variant=\\"default\\" size=\\"medium\\" icon=\\"\\" buttontextclasses=\\"\\" disabled=\\"true\\">Test alert payload</gl-button-stub>
<div class=\\"footer-block row-content-block gl-display-flex gl-justify-content-space-between\\">
- <gl-button-stub category=\\"primary\\" variant=\\"default\\" size=\\"medium\\" icon=\\"\\" disabled=\\"true\\">
- Cancel
- </gl-button-stub>
- <gl-button-stub category=\\"primary\\" variant=\\"success\\" size=\\"medium\\" icon=\\"\\" disabled=\\"true\\">
+ <gl-button-stub category=\\"primary\\" variant=\\"success\\" size=\\"medium\\" icon=\\"\\" buttontextclasses=\\"\\" disabled=\\"true\\">
Save changes
</gl-button-stub>
+ <gl-button-stub category=\\"primary\\" variant=\\"default\\" size=\\"medium\\" icon=\\"\\" buttontextclasses=\\"\\" disabled=\\"true\\">
+ Cancel
+ </gl-button-stub>
</div>
</gl-form-stub>
</div>"
diff --git a/spec/frontend/analytics/instance_statistics/components/app_spec.js b/spec/frontend/analytics/instance_statistics/components/app_spec.js
new file mode 100644
index 00000000000..242621dc40c
--- /dev/null
+++ b/spec/frontend/analytics/instance_statistics/components/app_spec.js
@@ -0,0 +1,24 @@
+import { shallowMount } from '@vue/test-utils';
+import InstanceStatisticsApp from '~/analytics/instance_statistics/components/app.vue';
+import InstanceCounts from '~/analytics/instance_statistics/components//instance_counts.vue';
+
+describe('InstanceStatisticsApp', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(InstanceStatisticsApp);
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('displays the instance counts component', () => {
+ expect(wrapper.find(InstanceCounts).exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/analytics/instance_statistics/components/instance_counts_spec.js b/spec/frontend/analytics/instance_statistics/components/instance_counts_spec.js
new file mode 100644
index 00000000000..2274f4c3fde
--- /dev/null
+++ b/spec/frontend/analytics/instance_statistics/components/instance_counts_spec.js
@@ -0,0 +1,54 @@
+import { shallowMount } from '@vue/test-utils';
+import InstanceCounts from '~/analytics/instance_statistics/components/instance_counts.vue';
+import MetricCard from '~/analytics/shared/components/metric_card.vue';
+import countsMockData from '../mock_data';
+
+describe('InstanceCounts', () => {
+ let wrapper;
+
+ const createComponent = ({ loading = false, data = {} } = {}) => {
+ const $apollo = {
+ queries: {
+ counts: {
+ loading,
+ },
+ },
+ };
+
+ wrapper = shallowMount(InstanceCounts, {
+ mocks: { $apollo },
+ data() {
+ return {
+ ...data,
+ };
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findMetricCard = () => wrapper.find(MetricCard);
+
+ describe('while loading', () => {
+ beforeEach(() => {
+ createComponent({ loading: true });
+ });
+
+ it('displays the metric card with isLoading=true', () => {
+ expect(findMetricCard().props('isLoading')).toBe(true);
+ });
+ });
+
+ describe('with data', () => {
+ beforeEach(() => {
+ createComponent({ data: { counts: countsMockData } });
+ });
+
+ it('passes the counts data to the metric card', () => {
+ expect(findMetricCard().props('metrics')).toEqual(countsMockData);
+ });
+ });
+});
diff --git a/spec/frontend/analytics/instance_statistics/mock_data.js b/spec/frontend/analytics/instance_statistics/mock_data.js
new file mode 100644
index 00000000000..9fabf3a4c65
--- /dev/null
+++ b/spec/frontend/analytics/instance_statistics/mock_data.js
@@ -0,0 +1,4 @@
+export default [
+ { key: 'projects', value: 10, label: 'Projects' },
+ { key: 'groups', value: 20, label: 'Group' },
+];
diff --git a/spec/frontend/analytics/shared/components/metric_card_spec.js b/spec/frontend/analytics/shared/components/metric_card_spec.js
new file mode 100644
index 00000000000..e89d499ed9b
--- /dev/null
+++ b/spec/frontend/analytics/shared/components/metric_card_spec.js
@@ -0,0 +1,129 @@
+import { mount } from '@vue/test-utils';
+import { GlDeprecatedSkeletonLoading as GlSkeletonLoading } from '@gitlab/ui';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import MetricCard from '~/analytics/shared/components/metric_card.vue';
+
+const metrics = [
+ { key: 'first_metric', value: 10, label: 'First metric', unit: 'days', link: 'some_link' },
+ { key: 'second_metric', value: 20, label: 'Yet another metric' },
+ { key: 'third_metric', value: null, label: 'Null metric without value', unit: 'parsecs' },
+ { key: 'fourth_metric', value: '-', label: 'Metric without value', unit: 'parsecs' },
+];
+
+const defaultProps = {
+ title: 'My fancy title',
+ isLoading: false,
+ metrics,
+};
+
+describe('MetricCard', () => {
+ let wrapper;
+
+ const factory = (props = defaultProps) => {
+ wrapper = mount(MetricCard, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findTitle = () => wrapper.find({ ref: 'title' });
+ const findLoadingIndicator = () => wrapper.find(GlSkeletonLoading);
+ const findMetricsWrapper = () => wrapper.find({ ref: 'metricsWrapper' });
+ const findMetricItem = () => wrapper.findAll({ ref: 'metricItem' });
+ const findTooltip = () => wrapper.find('[data-testid="tooltip"]');
+
+ describe('template', () => {
+ it('renders the title', () => {
+ factory();
+
+ expect(findTitle().text()).toContain('My fancy title');
+ });
+
+ describe('when isLoading is true', () => {
+ beforeEach(() => {
+ factory({ isLoading: true });
+ });
+
+ it('displays a loading indicator', () => {
+ expect(findLoadingIndicator().exists()).toBe(true);
+ });
+
+ it('does not display the metrics container', () => {
+ expect(findMetricsWrapper().exists()).toBe(false);
+ });
+ });
+
+ describe('when isLoading is false', () => {
+ beforeEach(() => {
+ factory({ isLoading: false });
+ });
+
+ it('does not display a loading indicator', () => {
+ expect(findLoadingIndicator().exists()).toBe(false);
+ });
+
+ it('displays the metrics container', () => {
+ expect(findMetricsWrapper().exists()).toBe(true);
+ });
+
+ it('renders two metrics', () => {
+ expect(findMetricItem()).toHaveLength(metrics.length);
+ });
+
+ describe('with tooltip text', () => {
+ const tooltipText = 'This is a tooltip';
+ const tooltipMetric = {
+ key: 'fifth_metric',
+ value: '-',
+ label: 'Metric with tooltip',
+ unit: 'parsecs',
+ tooltipText,
+ };
+
+ beforeEach(() => {
+ factory({
+ isLoading: false,
+ metrics: [tooltipMetric],
+ });
+ });
+
+ it('will render a tooltip', () => {
+ const tt = getBinding(findTooltip().element, 'gl-tooltip');
+ expect(tt.value.title).toEqual(tooltipText);
+ });
+ });
+
+ describe.each`
+ columnIndex | label | value | unit | link
+ ${0} | ${'First metric'} | ${10} | ${' days'} | ${'some_link'}
+ ${1} | ${'Yet another metric'} | ${20} | ${''} | ${null}
+ ${2} | ${'Null metric without value'} | ${'-'} | ${''} | ${null}
+ ${3} | ${'Metric without value'} | ${'-'} | ${''} | ${null}
+ `('metric columns', ({ columnIndex, label, value, unit, link }) => {
+ it(`renders ${value}${unit} ${label} with URL ${link}`, () => {
+ const allMetricItems = findMetricItem();
+ const metricItem = allMetricItems.at(columnIndex);
+ const text = metricItem.text();
+
+ expect(text).toContain(`${value}${unit}`);
+ expect(text).toContain(label);
+
+ if (link) {
+ expect(metricItem.find('a').attributes('href')).toBe(link);
+ } else {
+ expect(metricItem.find('a').exists()).toBe(false);
+ }
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/api_spec.js b/spec/frontend/api_spec.js
index 3ae0d06162d..f7c6290ce1c 100644
--- a/spec/frontend/api_spec.js
+++ b/spec/frontend/api_spec.js
@@ -1152,4 +1152,44 @@ describe('Api', () => {
});
});
});
+
+ describe('trackRedisHllUserEvent', () => {
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/usage_data/increment_unique_users`;
+
+ const event = 'dummy_event';
+ const postData = { event };
+ const headers = {
+ 'Content-Type': 'application/json',
+ };
+
+ describe('when usage data increment unique users is called with feature flag disabled', () => {
+ beforeEach(() => {
+ gon.features = { ...gon.features, usageDataApi: false };
+ });
+
+ it('returns null', () => {
+ jest.spyOn(axios, 'post');
+ mock.onPost(expectedUrl).replyOnce(httpStatus.OK, true);
+
+ expect(axios.post).toHaveBeenCalledTimes(0);
+ expect(Api.trackRedisHllUserEvent(event)).toEqual(null);
+ });
+ });
+
+ describe('when usage data increment unique users is called', () => {
+ beforeEach(() => {
+ gon.features = { ...gon.features, usageDataApi: true };
+ });
+
+ it('resolves the Promise', () => {
+ jest.spyOn(axios, 'post');
+ mock.onPost(expectedUrl, { event }).replyOnce(httpStatus.OK, true);
+
+ return Api.trackRedisHllUserEvent(event).then(({ data }) => {
+ expect(data).toEqual(true);
+ expect(axios.post).toHaveBeenCalledWith(expectedUrl, postData, { headers });
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/awards_handler_spec.js b/spec/frontend/awards_handler_spec.js
index f0ed18248f0..7fd6a9e7b87 100644
--- a/spec/frontend/awards_handler_spec.js
+++ b/spec/frontend/awards_handler_spec.js
@@ -309,6 +309,30 @@ describe('AwardsHandler', () => {
expect($('[data-name=alien]').is(':visible')).toBe(true);
expect($('.js-emoji-menu-search').val()).toBe('');
});
+
+ it('should fuzzy filter the emoji', async () => {
+ await openAndWaitForEmojiMenu();
+
+ awardsHandler.searchEmojis('sgls');
+
+ expect($('[data-name=angel]').is(':visible')).toBe(false);
+ expect($('[data-name=anger]').is(':visible')).toBe(false);
+ expect($('[data-name=sunglasses]').is(':visible')).toBe(true);
+ });
+
+ it('should filter by emoji description', async () => {
+ await openAndWaitForEmojiMenu();
+
+ awardsHandler.searchEmojis('baby');
+ expect($('[data-name=angel]').is(':visible')).toBe(true);
+ });
+
+ it('should filter by emoji unicode value', async () => {
+ await openAndWaitForEmojiMenu();
+
+ awardsHandler.searchEmojis('👼');
+ expect($('[data-name=angel]').is(':visible')).toBe(true);
+ });
});
describe('emoji menu', () => {
diff --git a/spec/frontend/batch_comments/components/preview_item_spec.js b/spec/frontend/batch_comments/components/preview_item_spec.js
index 2b63ece28ba..8ddad3dacfe 100644
--- a/spec/frontend/batch_comments/components/preview_item_spec.js
+++ b/spec/frontend/batch_comments/components/preview_item_spec.js
@@ -43,22 +43,6 @@ describe('Batch comments draft preview item component', () => {
);
});
- it('adds is last class', () => {
- createComponent(true);
-
- expect(vm.$el.classList).toContain('is-last');
- });
-
- it('scrolls to draft on click', () => {
- createComponent();
-
- jest.spyOn(vm.$store, 'dispatch').mockImplementation();
-
- vm.$el.click();
-
- expect(vm.$store.dispatch).toHaveBeenCalledWith('batchComments/scrollToDraft', vm.draft);
- });
-
describe('for file', () => {
it('renders file path', () => {
createComponent(false, { file_path: 'index.js', file_hash: 'abc', position: {} });
diff --git a/spec/frontend/batch_comments/components/publish_button_spec.js b/spec/frontend/batch_comments/components/publish_button_spec.js
index 4362f62c7f8..4032713150c 100644
--- a/spec/frontend/batch_comments/components/publish_button_spec.js
+++ b/spec/frontend/batch_comments/components/publish_button_spec.js
@@ -29,17 +29,6 @@ describe('Batch comments publish button component', () => {
expect(vm.$store.dispatch).toHaveBeenCalledWith('batchComments/publishReview', undefined);
});
- it('dispatches toggleReviewDropdown when shouldPublish is false on click', () => {
- vm.shouldPublish = false;
-
- vm.$el.click();
-
- expect(vm.$store.dispatch).toHaveBeenCalledWith(
- 'batchComments/toggleReviewDropdown',
- undefined,
- );
- });
-
it('sets loading when isPublishing is true', done => {
vm.$store.state.batchComments.isPublishing = true;
diff --git a/spec/frontend/batch_comments/components/publish_dropdown_spec.js b/spec/frontend/batch_comments/components/publish_dropdown_spec.js
index fb3c532174d..f235867f002 100644
--- a/spec/frontend/batch_comments/components/publish_dropdown_spec.js
+++ b/spec/frontend/batch_comments/components/publish_dropdown_spec.js
@@ -1,96 +1,39 @@
-import Vue from 'vue';
-import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import PreviewDropdown from '~/batch_comments/components/preview_dropdown.vue';
import { createStore } from '~/mr_notes/stores';
import '~/behaviors/markdown/render_gfm';
import { createDraft } from '../mock_data';
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
describe('Batch comments publish dropdown component', () => {
- let vm;
- let Component;
+ let wrapper;
- function createComponent(extendStore = () => {}) {
+ function createComponent() {
const store = createStore();
store.state.batchComments.drafts.push(createDraft(), { ...createDraft(), id: 2 });
- extendStore(store);
-
- vm = mountComponentWithStore(Component, { store });
+ wrapper = shallowMount(PreviewDropdown, {
+ store,
+ });
}
- beforeAll(() => {
- Component = Vue.extend(PreviewDropdown);
- });
-
afterEach(() => {
- vm.$destroy();
- });
-
- it('toggles dropdown when clicking button', done => {
- createComponent();
-
- jest.spyOn(vm.$store, 'dispatch');
-
- vm.$el.querySelector('.review-preview-dropdown-toggle').click();
-
- expect(vm.$store.dispatch).toHaveBeenCalledWith(
- 'batchComments/toggleReviewDropdown',
- expect.anything(),
- );
-
- setImmediate(() => {
- expect(vm.$el.classList).toContain('show');
-
- done();
- });
- });
-
- it('toggles dropdown when clicking body', () => {
- createComponent();
-
- vm.$store.state.batchComments.showPreviewDropdown = true;
-
- jest.spyOn(vm.$store, 'dispatch').mockImplementation();
-
- document.body.click();
-
- expect(vm.$store.dispatch).toHaveBeenCalledWith(
- 'batchComments/toggleReviewDropdown',
- undefined,
- );
+ wrapper.destroy();
});
it('renders list of drafts', () => {
- createComponent(store => {
- Object.assign(store.state.notes, {
- isNotesFetched: true,
- });
- });
-
- expect(vm.$el.querySelectorAll('.dropdown-content li').length).toBe(2);
- });
-
- it('adds is-last class to last item', () => {
- createComponent(store => {
- Object.assign(store.state.notes, {
- isNotesFetched: true,
- });
- });
-
- expect(vm.$el.querySelectorAll('.dropdown-content li')[1].querySelector('.is-last')).not.toBe(
- null,
- );
- });
-
- it('renders draft count in dropdown title', () => {
createComponent();
- expect(vm.$el.querySelector('.dropdown-title').textContent).toContain('2 pending comments');
+ expect(wrapper.findAll(GlDropdownItem).length).toBe(2);
});
- it('renders publish button in footer', () => {
+ it('renders draft count in dropdown title', () => {
createComponent();
- expect(vm.$el.querySelector('.dropdown-footer .js-publish-draft-button')).not.toBe(null);
+ expect(wrapper.find(GlDropdown).props('headerText')).toEqual('2 pending comments');
});
});
diff --git a/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js b/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
index a6942115649..e66f36aa3a2 100644
--- a/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
+++ b/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
@@ -199,42 +199,6 @@ describe('Batch comments store actions', () => {
});
});
- describe('discardReview', () => {
- it('commits mutations', done => {
- const getters = {
- getNotesData: { draftsDiscardPath: TEST_HOST },
- };
- const commit = jest.fn();
- mock.onAny().reply(200);
-
- actions
- .discardReview({ getters, commit })
- .then(() => {
- expect(commit.mock.calls[0]).toEqual(['REQUEST_DISCARD_REVIEW']);
- expect(commit.mock.calls[1]).toEqual(['RECEIVE_DISCARD_REVIEW_SUCCESS']);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('commits error mutations', done => {
- const getters = {
- getNotesData: { draftsDiscardPath: TEST_HOST },
- };
- const commit = jest.fn();
- mock.onAny().reply(500);
-
- actions
- .discardReview({ getters, commit })
- .then(() => {
- expect(commit.mock.calls[0]).toEqual(['REQUEST_DISCARD_REVIEW']);
- expect(commit.mock.calls[1]).toEqual(['RECEIVE_DISCARD_REVIEW_ERROR']);
- })
- .then(done)
- .catch(done.fail);
- });
- });
-
describe('updateDraft', () => {
let getters;
@@ -284,56 +248,6 @@ describe('Batch comments store actions', () => {
});
});
- describe('toggleReviewDropdown', () => {
- it('dispatches openReviewDropdown', done => {
- testAction(
- actions.toggleReviewDropdown,
- null,
- { showPreviewDropdown: false },
- [],
- [{ type: 'openReviewDropdown' }],
- done,
- );
- });
-
- it('dispatches closeReviewDropdown when showPreviewDropdown is true', done => {
- testAction(
- actions.toggleReviewDropdown,
- null,
- { showPreviewDropdown: true },
- [],
- [{ type: 'closeReviewDropdown' }],
- done,
- );
- });
- });
-
- describe('openReviewDropdown', () => {
- it('commits OPEN_REVIEW_DROPDOWN', done => {
- testAction(
- actions.openReviewDropdown,
- null,
- null,
- [{ type: 'OPEN_REVIEW_DROPDOWN' }],
- [],
- done,
- );
- });
- });
-
- describe('closeReviewDropdown', () => {
- it('commits CLOSE_REVIEW_DROPDOWN', done => {
- testAction(
- actions.closeReviewDropdown,
- null,
- null,
- [{ type: 'CLOSE_REVIEW_DROPDOWN' }],
- [],
- done,
- );
- });
- });
-
describe('expandAllDiscussions', () => {
it('dispatches expandDiscussion for all drafts', done => {
const state = {
@@ -383,9 +297,7 @@ describe('Batch comments store actions', () => {
actions.scrollToDraft({ dispatch, rootGetters }, draft);
- expect(dispatch.mock.calls[0]).toEqual(['closeReviewDropdown']);
-
- expect(dispatch.mock.calls[1]).toEqual([
+ expect(dispatch.mock.calls[0]).toEqual([
'expandDiscussion',
{ discussionId: '1' },
{ root: true },
diff --git a/spec/frontend/batch_comments/stores/modules/batch_comments/mutations_spec.js b/spec/frontend/batch_comments/stores/modules/batch_comments/mutations_spec.js
index a86726269ef..1406f66fd10 100644
--- a/spec/frontend/batch_comments/stores/modules/batch_comments/mutations_spec.js
+++ b/spec/frontend/batch_comments/stores/modules/batch_comments/mutations_spec.js
@@ -89,42 +89,6 @@ describe('Batch comments mutations', () => {
});
});
- describe(types.REQUEST_DISCARD_REVIEW, () => {
- it('sets isDiscarding to true', () => {
- mutations[types.REQUEST_DISCARD_REVIEW](state);
-
- expect(state.isDiscarding).toBe(true);
- });
- });
-
- describe(types.RECEIVE_DISCARD_REVIEW_SUCCESS, () => {
- it('emptys drafts array', () => {
- state.drafts.push('test');
-
- mutations[types.RECEIVE_DISCARD_REVIEW_SUCCESS](state);
-
- expect(state.drafts).toEqual([]);
- });
-
- it('sets isDiscarding to false', () => {
- state.isDiscarding = true;
-
- mutations[types.RECEIVE_DISCARD_REVIEW_SUCCESS](state);
-
- expect(state.isDiscarding).toBe(false);
- });
- });
-
- describe(types.RECEIVE_DISCARD_REVIEW_ERROR, () => {
- it('updates isDiscarding to false', () => {
- state.isDiscarding = true;
-
- mutations[types.RECEIVE_DISCARD_REVIEW_ERROR](state);
-
- expect(state.isDiscarding).toBe(false);
- });
- });
-
describe(types.RECEIVE_DRAFT_UPDATE_SUCCESS, () => {
it('updates draft in store', () => {
state.drafts.push({ id: 1 });
@@ -140,20 +104,4 @@ describe('Batch comments mutations', () => {
]);
});
});
-
- describe(types.OPEN_REVIEW_DROPDOWN, () => {
- it('sets showPreviewDropdown to true', () => {
- mutations[types.OPEN_REVIEW_DROPDOWN](state);
-
- expect(state.showPreviewDropdown).toBe(true);
- });
- });
-
- describe(types.CLOSE_REVIEW_DROPDOWN, () => {
- it('sets showPreviewDropdown to false', () => {
- mutations[types.CLOSE_REVIEW_DROPDOWN](state);
-
- expect(state.showPreviewDropdown).toBe(false);
- });
- });
});
diff --git a/spec/frontend/behaviors/load_startup_css_spec.js b/spec/frontend/behaviors/load_startup_css_spec.js
new file mode 100644
index 00000000000..81222ac5aaa
--- /dev/null
+++ b/spec/frontend/behaviors/load_startup_css_spec.js
@@ -0,0 +1,44 @@
+import { setHTMLFixture } from 'helpers/fixtures';
+import { loadStartupCSS } from '~/behaviors/load_startup_css';
+
+describe('behaviors/load_startup_css', () => {
+ let loadListener;
+
+ const setupListeners = () => {
+ document
+ .querySelectorAll('link')
+ .forEach(x => x.addEventListener('load', () => loadListener(x)));
+ };
+
+ beforeEach(() => {
+ loadListener = jest.fn();
+
+ setHTMLFixture(`
+ <meta charset="utf-8" />
+ <link media="print" src="./lorem-print.css" />
+ <link media="print" src="./ipsum-print.css" />
+ <link media="all" src="./dolar-all.css" />
+ `);
+
+ setupListeners();
+
+ loadStartupCSS();
+ });
+
+ it('does nothing at first', () => {
+ expect(loadListener).not.toHaveBeenCalled();
+ });
+
+ describe('on window load', () => {
+ beforeEach(() => {
+ window.dispatchEvent(new Event('load'));
+ });
+
+ it('dispatches load to the print links', () => {
+ expect(loadListener.mock.calls.map(([el]) => el.getAttribute('src'))).toEqual([
+ './lorem-print.css',
+ './ipsum-print.css',
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap b/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap
index 0f5b3cd3f5e..53815820bbe 100644
--- a/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap
+++ b/spec/frontend/blob/components/__snapshots__/blob_header_filepath_spec.js.snap
@@ -27,8 +27,10 @@ exports[`Blob Header Filepath rendering matches the snapshot 1`] = `
</small>
<clipboard-button-stub
+ category="tertiary"
cssclass="btn-clipboard btn-transparent lh-100 position-static"
gfm="\`foo/bar/dummy.md\`"
+ size="medium"
text="foo/bar/dummy.md"
title="Copy file path"
tooltipplacement="top"
diff --git a/spec/frontend/blob/suggest_web_ide_ci/web_ide_alert_spec.js b/spec/frontend/blob/suggest_web_ide_ci/web_ide_alert_spec.js
deleted file mode 100644
index 8dc71f99010..00000000000
--- a/spec/frontend/blob/suggest_web_ide_ci/web_ide_alert_spec.js
+++ /dev/null
@@ -1,67 +0,0 @@
-import MockAdapter from 'axios-mock-adapter';
-import waitForPromises from 'helpers/wait_for_promises';
-import { shallowMount } from '@vue/test-utils';
-import { GlButton, GlAlert } from '@gitlab/ui';
-import axios from '~/lib/utils/axios_utils';
-import WebIdeAlert from '~/blob/suggest_web_ide_ci/components/web_ide_alert.vue';
-
-const dismissEndpoint = '/-/user_callouts';
-const featureId = 'web_ide_alert_dismissed';
-const editPath = 'edit/master/-/.gitlab-ci.yml';
-
-describe('WebIdeAlert', () => {
- let wrapper;
- let mock;
-
- const findButton = () => wrapper.find(GlButton);
- const findAlert = () => wrapper.find(GlAlert);
- const dismissAlert = alertWrapper => alertWrapper.vm.$emit('dismiss');
- const getPostPayload = () => JSON.parse(mock.history.post[0].data);
-
- const createComponent = () => {
- wrapper = shallowMount(WebIdeAlert, {
- propsData: {
- dismissEndpoint,
- featureId,
- editPath,
- },
- });
- };
-
- beforeEach(() => {
- mock = new MockAdapter(axios);
-
- mock.onPost(dismissEndpoint).reply(200);
-
- createComponent();
- });
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
-
- mock.restore();
- });
-
- describe('with defaults', () => {
- it('displays alert correctly', () => {
- expect(findAlert().exists()).toBe(true);
- });
-
- it('web ide button link has correct path', () => {
- expect(findButton().attributes('href')).toBe(editPath);
- });
-
- it('dismisses alert correctly', async () => {
- const alertWrapper = findAlert();
-
- dismissAlert(alertWrapper);
-
- await waitForPromises();
-
- expect(alertWrapper.exists()).toBe(false);
- expect(mock.history.post).toHaveLength(1);
- expect(getPostPayload()).toEqual({ feature_name: featureId });
- });
- });
-});
diff --git a/spec/frontend/boards/board_blank_state_spec.js b/spec/frontend/boards/board_blank_state_spec.js
deleted file mode 100644
index 3ffdda52f58..00000000000
--- a/spec/frontend/boards/board_blank_state_spec.js
+++ /dev/null
@@ -1,95 +0,0 @@
-import Vue from 'vue';
-import boardsStore from '~/boards/stores/boards_store';
-import BoardBlankState from '~/boards/components/board_blank_state.vue';
-
-describe('Boards blank state', () => {
- let vm;
- let fail = false;
-
- beforeEach(done => {
- const Comp = Vue.extend(BoardBlankState);
-
- boardsStore.create();
-
- jest.spyOn(boardsStore, 'addList').mockImplementation();
- jest.spyOn(boardsStore, 'removeList').mockImplementation();
- jest.spyOn(boardsStore, 'generateDefaultLists').mockImplementation(
- () =>
- new Promise((resolve, reject) => {
- if (fail) {
- reject();
- } else {
- resolve({
- data: [
- {
- id: 1,
- title: 'To Do',
- label: { id: 1 },
- },
- {
- id: 2,
- title: 'Doing',
- label: { id: 2 },
- },
- ],
- });
- }
- }),
- );
-
- vm = new Comp();
-
- setImmediate(() => {
- vm.$mount();
- done();
- });
- });
-
- it('renders pre-defined labels', () => {
- expect(vm.$el.querySelectorAll('.board-blank-state-list li').length).toBe(2);
-
- expect(vm.$el.querySelectorAll('.board-blank-state-list li')[0].textContent.trim()).toEqual(
- 'To Do',
- );
-
- expect(vm.$el.querySelectorAll('.board-blank-state-list li')[1].textContent.trim()).toEqual(
- 'Doing',
- );
- });
-
- it('clears blank state', done => {
- vm.$el.querySelector('.btn-default').click();
-
- setImmediate(() => {
- expect(boardsStore.welcomeIsHidden()).toBeTruthy();
-
- done();
- });
- });
-
- it('creates pre-defined labels', done => {
- vm.$el.querySelector('.btn-success').click();
-
- setImmediate(() => {
- expect(boardsStore.addList).toHaveBeenCalledTimes(2);
- expect(boardsStore.addList).toHaveBeenCalledWith(expect.objectContaining({ title: 'To Do' }));
-
- expect(boardsStore.addList).toHaveBeenCalledWith(expect.objectContaining({ title: 'Doing' }));
-
- done();
- });
- });
-
- it('resets the store if request fails', done => {
- fail = true;
-
- vm.$el.querySelector('.btn-success').click();
-
- setImmediate(() => {
- expect(boardsStore.welcomeIsHidden()).toBeFalsy();
- expect(boardsStore.removeList).toHaveBeenCalledWith(undefined, 'label');
-
- done();
- });
- });
-});
diff --git a/spec/frontend/boards/boards_store_spec.js b/spec/frontend/boards/boards_store_spec.js
index 41971137b95..e7c1cf79fdc 100644
--- a/spec/frontend/boards/boards_store_spec.js
+++ b/spec/frontend/boards/boards_store_spec.js
@@ -1,7 +1,7 @@
import AxiosMockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
-import boardsStore from '~/boards/stores/boards_store';
+import boardsStore, { gqlClient } from '~/boards/stores/boards_store';
import eventHub from '~/boards/eventhub';
import { listObj, listObjDuplicate } from './mock_data';
@@ -503,11 +503,15 @@ describe('boardsStore', () => {
beforeEach(() => {
requestSpy = jest.fn();
axiosMock.onPut(url).replyOnce(config => requestSpy(config));
+ jest.spyOn(gqlClient, 'mutate').mockReturnValue(Promise.resolve({}));
});
it('makes a request to update the board', () => {
requestSpy.mockReturnValue([200, dummyResponse]);
- const expectedResponse = expect.objectContaining({ data: dummyResponse });
+ const expectedResponse = [
+ expect.objectContaining({ data: dummyResponse }),
+ expect.objectContaining({}),
+ ];
return expect(
boardsStore.createBoard({
@@ -555,11 +559,12 @@ describe('boardsStore', () => {
beforeEach(() => {
requestSpy = jest.fn();
axiosMock.onPost(url).replyOnce(config => requestSpy(config));
+ jest.spyOn(gqlClient, 'mutate').mockReturnValue(Promise.resolve({}));
});
it('makes a request to create a new board', () => {
requestSpy.mockReturnValue([200, dummyResponse]);
- const expectedResponse = expect.objectContaining({ data: dummyResponse });
+ const expectedResponse = dummyResponse;
return expect(boardsStore.createBoard(board))
.resolves.toEqual(expectedResponse)
@@ -740,14 +745,6 @@ describe('boardsStore', () => {
expect(boardsStore.shouldAddBlankState()).toBe(true);
});
- it('adds the blank state', () => {
- boardsStore.addBlankState();
-
- const list = boardsStore.findList('type', 'blank', 'blank');
-
- expect(list).toBeDefined();
- });
-
it('removes list from state', () => {
boardsStore.addList(listObj);
diff --git a/spec/frontend/boards/components/board_configuration_options_spec.js b/spec/frontend/boards/components/board_configuration_options_spec.js
new file mode 100644
index 00000000000..e9a1cb6a4e8
--- /dev/null
+++ b/spec/frontend/boards/components/board_configuration_options_spec.js
@@ -0,0 +1,59 @@
+import { shallowMount } from '@vue/test-utils';
+import BoardConfigurationOptions from '~/boards/components/board_configuration_options.vue';
+
+describe('BoardConfigurationOptions', () => {
+ let wrapper;
+ const board = { hide_backlog_list: false, hide_closed_list: false };
+
+ const defaultProps = {
+ currentBoard: board,
+ board,
+ isNewForm: false,
+ };
+
+ const createComponent = () => {
+ wrapper = shallowMount(BoardConfigurationOptions, {
+ propsData: { ...defaultProps },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const backlogListCheckbox = el => el.find('[data-testid="backlog-list-checkbox"]');
+ const closedListCheckbox = el => el.find('[data-testid="closed-list-checkbox"]');
+
+ const checkboxAssert = (backlogCheckbox, closedCheckbox) => {
+ expect(backlogListCheckbox(wrapper).attributes('checked')).toEqual(
+ backlogCheckbox ? undefined : 'true',
+ );
+ expect(closedListCheckbox(wrapper).attributes('checked')).toEqual(
+ closedCheckbox ? undefined : 'true',
+ );
+ };
+
+ it.each`
+ backlogCheckboxValue | closedCheckboxValue
+ ${true} | ${true}
+ ${true} | ${false}
+ ${false} | ${true}
+ ${false} | ${false}
+ `(
+ 'renders two checkbox when one is $backlogCheckboxValue and other is $closedCheckboxValue',
+ async ({ backlogCheckboxValue, closedCheckboxValue }) => {
+ await wrapper.setData({
+ hideBacklogList: backlogCheckboxValue,
+ hideClosedList: closedCheckboxValue,
+ });
+
+ return wrapper.vm.$nextTick().then(() => {
+ checkboxAssert(backlogCheckboxValue, closedCheckboxValue);
+ });
+ },
+ );
+});
diff --git a/spec/frontend/boards/components/board_content_spec.js b/spec/frontend/boards/components/board_content_spec.js
index df117d06cdf..09e38001e2e 100644
--- a/spec/frontend/boards/components/board_content_spec.js
+++ b/spec/frontend/boards/components/board_content_spec.js
@@ -23,9 +23,6 @@ describe('BoardContent', () => {
return new Vuex.Store({
getters,
state,
- actions: {
- fetchIssuesForAllLists: () => {},
- },
});
};
diff --git a/spec/frontend/boards/components/sidebar/board_editable_item_spec.js b/spec/frontend/boards/components/sidebar/board_editable_item_spec.js
index 1dbcbd06407..e7139ceaa93 100644
--- a/spec/frontend/boards/components/sidebar/board_editable_item_spec.js
+++ b/spec/frontend/boards/components/sidebar/board_editable_item_spec.js
@@ -96,12 +96,22 @@ describe('boards sidebar remove issue', () => {
expect(findExpanded().isVisible()).toBe(false);
});
- it('emits changed event', async () => {
+ it('emits close event', async () => {
document.body.click();
await wrapper.vm.$nextTick();
- expect(wrapper.emitted().changed[1][0]).toBe(false);
+ expect(wrapper.emitted().close.length).toBe(1);
});
});
+
+ it('emits open when edit button is clicked and edit is initailized to false', async () => {
+ createComponent({ canUpdate: true });
+
+ findEditButton().vm.$emit('click');
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.emitted().open.length).toBe(1);
+ });
});
diff --git a/spec/frontend/boards/stores/actions_spec.js b/spec/frontend/boards/stores/actions_spec.js
index bdbcd435708..6415a5a5d3a 100644
--- a/spec/frontend/boards/stores/actions_spec.js
+++ b/spec/frontend/boards/stores/actions_spec.js
@@ -6,12 +6,13 @@ import {
mockIssueWithModel,
mockIssue2WithModel,
rawIssue,
+ mockIssues,
} from '../mock_data';
import actions, { gqlClient } from '~/boards/stores/actions';
import * as types from '~/boards/stores/mutation_types';
import { inactiveId, ListType } from '~/boards/constants';
import issueMoveListMutation from '~/boards/queries/issue_move_list.mutation.graphql';
-import { fullBoardId } from '~/boards/boards_util';
+import { fullBoardId, formatListIssues } from '~/boards/boards_util';
const expectNotImplemented = action => {
it('is not implemented', () => {
@@ -237,6 +238,77 @@ describe('deleteList', () => {
expectNotImplemented(actions.deleteList);
});
+describe('fetchIssuesForList', () => {
+ const listId = mockLists[0].id;
+
+ const state = {
+ endpoints: {
+ fullPath: 'gitlab-org',
+ boardId: 1,
+ },
+ filterParams: {},
+ boardType: 'group',
+ };
+
+ const queryResponse = {
+ data: {
+ group: {
+ board: {
+ lists: {
+ nodes: [
+ {
+ id: listId,
+ issues: {
+ nodes: mockIssues,
+ },
+ },
+ ],
+ },
+ },
+ },
+ },
+ };
+
+ const formattedIssues = formatListIssues(queryResponse.data.group.board.lists);
+
+ it('should commit mutation RECEIVE_ISSUES_FOR_LIST_SUCCESS on success', done => {
+ jest.spyOn(gqlClient, 'query').mockResolvedValue(queryResponse);
+
+ testAction(
+ actions.fetchIssuesForList,
+ listId,
+ state,
+ [
+ {
+ type: types.RECEIVE_ISSUES_FOR_LIST_SUCCESS,
+ payload: { listIssues: formattedIssues, listId },
+ },
+ ],
+ [],
+ done,
+ );
+ });
+
+ it('should commit mutation RECEIVE_ISSUES_FOR_LIST_FAILURE on failure', done => {
+ jest.spyOn(gqlClient, 'query').mockResolvedValue(Promise.reject());
+
+ testAction(
+ actions.fetchIssuesForList,
+ listId,
+ state,
+ [{ type: types.RECEIVE_ISSUES_FOR_LIST_FAILURE, payload: listId }],
+ [],
+ done,
+ );
+ });
+});
+
+describe('resetIssues', () => {
+ it('commits RESET_ISSUES mutation', () => {
+ return testAction(actions.resetIssues, {}, {}, [{ type: types.RESET_ISSUES }], []);
+ });
+});
+
describe('moveIssue', () => {
const listIssues = {
'gid://gitlab/List/1': [436, 437],
diff --git a/spec/frontend/boards/stores/mutations_spec.js b/spec/frontend/boards/stores/mutations_spec.js
index a13a99a507e..c80537bf168 100644
--- a/spec/frontend/boards/stores/mutations_spec.js
+++ b/spec/frontend/boards/stores/mutations_spec.js
@@ -145,6 +145,23 @@ describe('Board Store Mutations', () => {
expectNotImplemented(mutations.RECEIVE_REMOVE_LIST_ERROR);
});
+ describe('RESET_ISSUES', () => {
+ it('should remove issues from issuesByListId state', () => {
+ const issuesByListId = {
+ 'gid://gitlab/List/1': [mockIssue.id],
+ };
+
+ state = {
+ ...state,
+ issuesByListId,
+ };
+
+ mutations[types.RESET_ISSUES](state);
+
+ expect(state.issuesByListId).toEqual({ 'gid://gitlab/List/1': [] });
+ });
+ });
+
describe('RECEIVE_ISSUES_FOR_LIST_SUCCESS', () => {
it('updates issuesByListId and issues on state', () => {
const listIssues = {
@@ -156,7 +173,6 @@ describe('Board Store Mutations', () => {
state = {
...state,
- isLoadingIssues: true,
issuesByListId: {},
issues: {},
boardLists: mockListsWithModel,
@@ -172,16 +188,6 @@ describe('Board Store Mutations', () => {
});
});
- describe('REQUEST_ISSUES_FOR_ALL_LISTS', () => {
- it('sets isLoadingIssues to true', () => {
- expect(state.isLoadingIssues).toBe(false);
-
- mutations.REQUEST_ISSUES_FOR_ALL_LISTS(state);
-
- expect(state.isLoadingIssues).toBe(true);
- });
- });
-
describe('RECEIVE_ISSUES_FOR_LIST_FAILURE', () => {
it('sets error message', () => {
state = {
@@ -200,51 +206,10 @@ describe('Board Store Mutations', () => {
});
});
- describe('RECEIVE_ISSUES_FOR_ALL_LISTS_SUCCESS', () => {
- it('sets isLoadingIssues to false and updates issuesByListId object', () => {
- const listIssues = {
- 'gid://gitlab/List/1': [mockIssue.id],
- };
- const issues = {
- '1': mockIssue,
- };
-
- state = {
- ...state,
- isLoadingIssues: true,
- issuesByListId: {},
- issues: {},
- };
-
- mutations.RECEIVE_ISSUES_FOR_ALL_LISTS_SUCCESS(state, { listData: listIssues, issues });
-
- expect(state.isLoadingIssues).toBe(false);
- expect(state.issuesByListId).toEqual(listIssues);
- expect(state.issues).toEqual(issues);
- });
- });
-
describe('REQUEST_ADD_ISSUE', () => {
expectNotImplemented(mutations.REQUEST_ADD_ISSUE);
});
- describe('RECEIVE_ISSUES_FOR_ALL_LISTS_FAILURE', () => {
- it('sets isLoadingIssues to false and sets error message', () => {
- state = {
- ...state,
- isLoadingIssues: true,
- error: undefined,
- };
-
- mutations.RECEIVE_ISSUES_FOR_ALL_LISTS_FAILURE(state);
-
- expect(state.isLoadingIssues).toBe(false);
- expect(state.error).toEqual(
- 'An error occurred while fetching the board issues. Please reload the page.',
- );
- });
- });
-
describe('UPDATE_ISSUE_BY_ID', () => {
const issueId = '1';
const prop = 'id';
@@ -254,7 +219,6 @@ describe('Board Store Mutations', () => {
beforeEach(() => {
state = {
...state,
- isLoadingIssues: true,
error: undefined,
issues: {
...issue,
diff --git a/spec/frontend/ci_settings_pipeline_triggers/components/triggers_list_spec.js b/spec/frontend/ci_settings_pipeline_triggers/components/triggers_list_spec.js
new file mode 100644
index 00000000000..e07afb5d736
--- /dev/null
+++ b/spec/frontend/ci_settings_pipeline_triggers/components/triggers_list_spec.js
@@ -0,0 +1,102 @@
+import { mount } from '@vue/test-utils';
+import { GlTable, GlBadge } from '@gitlab/ui';
+import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+
+import TriggersList from '~/ci_settings_pipeline_triggers/components/triggers_list.vue';
+import { triggers } from '../mock_data';
+
+describe('TriggersList', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = mount(TriggersList, {
+ propsData: { triggers, ...props },
+ });
+ };
+
+ const findTable = () => wrapper.find(GlTable);
+ const findHeaderAt = i => wrapper.findAll('thead th').at(i);
+ const findRows = () => wrapper.findAll('tbody tr');
+ const findRowAt = i => findRows().at(i);
+ const findCell = (i, col) =>
+ findRowAt(i)
+ .findAll('td')
+ .at(col);
+ const findClipboardBtn = i => findCell(i, 0).find(ClipboardButton);
+ const findInvalidBadge = i => findCell(i, 0).find(GlBadge);
+ const findEditBtn = i => findRowAt(i).find('[data-testid="edit-btn"]');
+ const findRevokeBtn = i => findRowAt(i).find('[data-testid="trigger_revoke_button"]');
+
+ beforeEach(() => {
+ createComponent();
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('displays a table with expected headers', () => {
+ const headers = ['Token', 'Description', 'Owner', 'Last Used', ''];
+ headers.forEach((header, i) => {
+ expect(findHeaderAt(i).text()).toBe(header);
+ });
+ });
+
+ it('displays a table with rows', () => {
+ expect(findRows()).toHaveLength(triggers.length);
+
+ const [trigger] = triggers;
+
+ expect(findCell(0, 0).text()).toBe(trigger.token);
+ expect(findCell(0, 1).text()).toBe(trigger.description);
+ expect(findCell(0, 2).text()).toContain(trigger.owner.name);
+ });
+
+ it('displays a "copy to cliboard" button for exposed tokens', () => {
+ expect(findClipboardBtn(0).exists()).toBe(true);
+ expect(findClipboardBtn(0).props('text')).toBe(triggers[0].token);
+
+ expect(findClipboardBtn(1).exists()).toBe(false);
+ });
+
+ it('displays an "invalid" label for tokens without access', () => {
+ expect(findInvalidBadge(0).exists()).toBe(false);
+
+ expect(findInvalidBadge(1).exists()).toBe(true);
+ });
+
+ it('displays a time ago label when last used', () => {
+ expect(findCell(0, 3).text()).toBe('Never');
+
+ expect(
+ findCell(1, 3)
+ .find(TimeAgoTooltip)
+ .props('time'),
+ ).toBe(triggers[1].lastUsed);
+ });
+
+ it('displays actions in a rows', () => {
+ const [data] = triggers;
+
+ expect(findEditBtn(0).attributes('href')).toBe(data.editProjectTriggerPath);
+
+ expect(findRevokeBtn(0).attributes('href')).toBe(data.projectTriggerPath);
+ expect(findRevokeBtn(0).attributes('data-method')).toBe('delete');
+ expect(findRevokeBtn(0).attributes('data-confirm')).toBeTruthy();
+ });
+
+ describe('when there are no triggers set', () => {
+ beforeEach(() => {
+ createComponent({ triggers: [] });
+ });
+
+ it('does not display a table', () => {
+ expect(findTable().exists()).toBe(false);
+ });
+
+ it('displays a message', () => {
+ expect(wrapper.text()).toBe(
+ 'No triggers have been created yet. Add one using the form above.',
+ );
+ });
+ });
+});
diff --git a/spec/frontend/ci_settings_pipeline_triggers/mock_data.js b/spec/frontend/ci_settings_pipeline_triggers/mock_data.js
new file mode 100644
index 00000000000..6813e941e03
--- /dev/null
+++ b/spec/frontend/ci_settings_pipeline_triggers/mock_data.js
@@ -0,0 +1,30 @@
+export const triggers = [
+ {
+ hasTokenExposed: true,
+ token: '0000',
+ description: 'My trigger',
+ owner: {
+ name: 'My User',
+ username: 'user1',
+ path: '/user1',
+ },
+ lastUsed: null,
+ canAccessProject: true,
+ editProjectTriggerPath: '/triggers/1/edit',
+ projectTriggerPath: '/trigger/1',
+ },
+ {
+ hasTokenExposed: false,
+ token: '1111',
+ description: "Anothe user's trigger",
+ owner: {
+ name: 'Someone else',
+ username: 'user2',
+ path: '/user2',
+ },
+ lastUsed: '2020-09-10T08:26:47.410Z',
+ canAccessProject: false,
+ editProjectTriggerPath: '/triggers/1/edit',
+ projectTriggerPath: '/trigger/1',
+ },
+];
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
index ab32fb12058..5c2d096418d 100644
--- a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
+++ b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
@@ -1,6 +1,6 @@
import Vuex from 'vuex';
import { createLocalVue, shallowMount, mount } from '@vue/test-utils';
-import { GlButton, GlFormCombobox } from '@gitlab/ui';
+import { GlButton } from '@gitlab/ui';
import { AWS_ACCESS_KEY_ID } from '~/ci_variable_list/constants';
import CiVariableModal from '~/ci_variable_list/components/ci_variable_modal.vue';
import createStore from '~/ci_variable_list/store';
@@ -18,7 +18,6 @@ describe('Ci variable modal', () => {
store = createStore();
wrapper = method(CiVariableModal, {
attachToDocument: true,
- provide: { glFeatures: { ciKeyAutocomplete: true } },
stubs: {
GlModal: ModalStub,
},
@@ -42,27 +41,6 @@ describe('Ci variable modal', () => {
wrapper.destroy();
});
- describe('Feature flag', () => {
- describe('when off', () => {
- beforeEach(() => {
- createComponent(shallowMount, { provide: { glFeatures: { ciKeyAutocomplete: false } } });
- });
-
- it('does not render the autocomplete dropdown', () => {
- expect(wrapper.find(GlFormCombobox).exists()).toBe(false);
- });
- });
-
- describe('when on', () => {
- beforeEach(() => {
- createComponent(shallowMount);
- });
- it('renders the autocomplete dropdown', () => {
- expect(wrapper.find(GlFormCombobox).exists()).toBe(true);
- });
- });
- });
-
describe('Basic interactions', () => {
beforeEach(() => {
createComponent(shallowMount);
diff --git a/spec/frontend/clusters/components/fluentd_output_settings_spec.js b/spec/frontend/clusters/components/fluentd_output_settings_spec.js
index c263679a45c..25db8785edc 100644
--- a/spec/frontend/clusters/components/fluentd_output_settings_spec.js
+++ b/spec/frontend/clusters/components/fluentd_output_settings_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlAlert, GlDeprecatedDropdown, GlFormCheckbox } from '@gitlab/ui';
+import { GlAlert, GlDropdown, GlFormCheckbox } from '@gitlab/ui';
import FluentdOutputSettings from '~/clusters/components/fluentd_output_settings.vue';
import { APPLICATION_STATUS, FLUENTD } from '~/clusters/constants';
import eventHub from '~/clusters/event_hub';
@@ -36,7 +36,7 @@ describe('FluentdOutputSettings', () => {
};
const findSaveButton = () => wrapper.find({ ref: 'saveBtn' });
const findCancelButton = () => wrapper.find({ ref: 'cancelBtn' });
- const findProtocolDropdown = () => wrapper.find(GlDeprecatedDropdown);
+ const findProtocolDropdown = () => wrapper.find(GlDropdown);
const findCheckbox = name =>
wrapper.findAll(GlFormCheckbox).wrappers.find(x => x.text() === name);
const findHost = () => wrapper.find('#fluentd-host');
diff --git a/spec/frontend/clusters/components/ingress_modsecurity_settings_spec.js b/spec/frontend/clusters/components/ingress_modsecurity_settings_spec.js
index 3a9a608b2e2..1f07a0b7908 100644
--- a/spec/frontend/clusters/components/ingress_modsecurity_settings_spec.js
+++ b/spec/frontend/clusters/components/ingress_modsecurity_settings_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlAlert, GlToggle, GlDeprecatedDropdown } from '@gitlab/ui';
+import { GlAlert, GlToggle, GlDropdown } from '@gitlab/ui';
import IngressModsecuritySettings from '~/clusters/components/ingress_modsecurity_settings.vue';
import { APPLICATION_STATUS, INGRESS } from '~/clusters/constants';
import eventHub from '~/clusters/event_hub';
@@ -28,10 +28,12 @@ describe('IngressModsecuritySettings', () => {
});
};
- const findSaveButton = () => wrapper.find('.btn-success');
- const findCancelButton = () => wrapper.find('[variant="secondary"]');
+ const findSaveButton = () =>
+ wrapper.find('[data-qa-selector="save_ingress_modsecurity_settings"]');
+ const findCancelButton = () =>
+ wrapper.find('[data-qa-selector="cancel_ingress_modsecurity_settings"]');
const findModSecurityToggle = () => wrapper.find(GlToggle);
- const findModSecurityDropdown = () => wrapper.find(GlDeprecatedDropdown);
+ const findModSecurityDropdown = () => wrapper.find(GlDropdown);
describe('when ingress is installed', () => {
beforeEach(() => {
diff --git a/spec/frontend/clusters/components/knative_domain_editor_spec.js b/spec/frontend/clusters/components/knative_domain_editor_spec.js
index 11ebe1b5d61..b7f76211fd6 100644
--- a/spec/frontend/clusters/components/knative_domain_editor_spec.js
+++ b/spec/frontend/clusters/components/knative_domain_editor_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDeprecatedDropdownItem, GlButton } from '@gitlab/ui';
+import { GlDropdownItem, GlButton } from '@gitlab/ui';
import KnativeDomainEditor from '~/clusters/components/knative_domain_editor.vue';
import { APPLICATION_STATUS } from '~/clusters/constants';
@@ -112,7 +112,7 @@ describe('KnativeDomainEditor', () => {
createComponent({ knative: { ...knative, availableDomains: [newDomain] } });
jest.spyOn(wrapper.vm, 'selectDomain');
- wrapper.find(GlDeprecatedDropdownItem).vm.$emit('click');
+ wrapper.find(GlDropdownItem).vm.$emit('click');
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.vm.selectDomain).toHaveBeenCalledWith(newDomain);
diff --git a/spec/frontend/clusters/services/crossplane_provider_stack_spec.js b/spec/frontend/clusters/services/crossplane_provider_stack_spec.js
index 57c538d2650..3e5f8de8e7b 100644
--- a/spec/frontend/clusters/services/crossplane_provider_stack_spec.js
+++ b/spec/frontend/clusters/services/crossplane_provider_stack_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDeprecatedDropdownItem, GlIcon } from '@gitlab/ui';
+import { GlDropdownItem, GlIcon } from '@gitlab/ui';
import CrossplaneProviderStack from '~/clusters/components/crossplane_provider_stack.vue';
describe('CrossplaneProviderStack component', () => {
@@ -37,7 +37,7 @@ describe('CrossplaneProviderStack component', () => {
createComponent({ crossplane });
});
- const findDropdownElements = () => wrapper.findAll(GlDeprecatedDropdownItem);
+ const findDropdownElements = () => wrapper.findAll(GlDropdownItem);
const findFirstDropdownElement = () => findDropdownElements().at(0);
afterEach(() => {
diff --git a/spec/frontend/clusters_list/components/clusters_spec.js b/spec/frontend/clusters_list/components/clusters_spec.js
index 628c35ae839..34d99473eb7 100644
--- a/spec/frontend/clusters_list/components/clusters_spec.js
+++ b/spec/frontend/clusters_list/components/clusters_spec.js
@@ -164,18 +164,18 @@ describe('Clusters', () => {
});
it.each`
- nodeSize | lineNumber
- ${'Unknown'} | ${0}
- ${'1'} | ${1}
- ${'2'} | ${2}
- ${'1'} | ${3}
- ${'1'} | ${4}
- ${'Unknown'} | ${5}
- `('renders node size for each cluster', ({ nodeSize, lineNumber }) => {
+ nodeText | lineNumber
+ ${'Unable to Authenticate'} | ${0}
+ ${'1'} | ${1}
+ ${'2'} | ${2}
+ ${'1'} | ${3}
+ ${'1'} | ${4}
+ ${'Unknown Error'} | ${5}
+ `('renders node size for each cluster', ({ nodeText, lineNumber }) => {
const sizes = findTable().findAll('td:nth-child(3)');
const size = sizes.at(lineNumber);
- expect(size.text()).toBe(nodeSize);
+ expect(size.text()).toContain(nodeText);
expect(size.find(GlSkeletonLoading).exists()).toBe(false);
});
});
diff --git a/spec/frontend/clusters_list/components/node_error_help_text_spec.js b/spec/frontend/clusters_list/components/node_error_help_text_spec.js
new file mode 100644
index 00000000000..4d157b3a8ab
--- /dev/null
+++ b/spec/frontend/clusters_list/components/node_error_help_text_spec.js
@@ -0,0 +1,33 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlPopover } from '@gitlab/ui';
+import NodeErrorHelpText from '~/clusters_list/components/node_error_help_text.vue';
+
+describe('NodeErrorHelpText', () => {
+ let wrapper;
+
+ const createWrapper = propsData => {
+ wrapper = shallowMount(NodeErrorHelpText, { propsData, stubs: { GlPopover } });
+ return wrapper.vm.$nextTick();
+ };
+
+ const findPopover = () => wrapper.find(GlPopover);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it.each`
+ errorType | wrapperText | popoverText
+ ${'authentication_error'} | ${'Unable to Authenticate'} | ${'GitLab failed to authenticate'}
+ ${'connection_error'} | ${'Unable to Connect'} | ${'GitLab failed to connect to the cluster'}
+ ${'http_error'} | ${'Unable to Connect'} | ${'There was an HTTP error when connecting to your cluster'}
+ ${'default'} | ${'Unknown Error'} | ${'An unknown error occurred while attempting to connect to Kubernetes.'}
+ ${'unknown_error_type'} | ${'Unknown Error'} | ${'An unknown error occurred while attempting to connect to Kubernetes.'}
+ ${null} | ${'Unknown Error'} | ${'An unknown error occurred while attempting to connect to Kubernetes.'}
+ `('displays error text', ({ errorType, wrapperText, popoverText }) => {
+ return createWrapper({ errorType, popoverId: 'id' }).then(() => {
+ expect(wrapper.text()).toContain(wrapperText);
+ expect(findPopover().text()).toContain(popoverText);
+ });
+ });
+});
diff --git a/spec/frontend/clusters_list/mock_data.js b/spec/frontend/clusters_list/mock_data.js
index 48af3b91c94..ed32655d10e 100644
--- a/spec/frontend/clusters_list/mock_data.js
+++ b/spec/frontend/clusters_list/mock_data.js
@@ -6,6 +6,11 @@ export const clusterList = [
provider_type: 'gcp',
status: 'creating',
nodes: null,
+ kubernetes_errors: {
+ connection_error: 'authentication_error',
+ node_connection_error: 'connection_error',
+ metrics_connection_error: 'http_error',
+ },
},
{
name: 'My Cluster 2',
@@ -19,6 +24,7 @@ export const clusterList = [
usage: { cpu: '246155922n', memory: '1255212Ki' },
},
],
+ kubernetes_errors: {},
},
{
name: 'My Cluster 3',
@@ -36,6 +42,7 @@ export const clusterList = [
usage: { cpu: '307051934n', memory: '1379136Ki' },
},
],
+ kubernetes_errors: {},
},
{
name: 'My Cluster 4',
@@ -48,6 +55,7 @@ export const clusterList = [
usage: { cpu: '1missingCpuUnit', memory: '1missingMemoryUnit' },
},
],
+ kubernetes_errors: {},
},
{
name: 'My Cluster 5',
@@ -59,12 +67,14 @@ export const clusterList = [
status: { allocatable: { cpu: '1missingCpuUnit', memory: '1missingMemoryUnit' } },
},
],
+ kubernetes_errors: {},
},
{
name: 'My Cluster 6',
environment_scope: '*',
cluster_type: 'project_type',
status: 'cleanup_ongoing',
+ kubernetes_errors: {},
},
];
diff --git a/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap b/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap
index 745a163951a..62b751ec59b 100644
--- a/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap
+++ b/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap
@@ -56,6 +56,7 @@ exports[`Code navigation popover component renders popover 1`] = `
class="popover-body border-top"
>
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="w-100"
data-testid="go-to-definition-btn"
diff --git a/spec/frontend/commit/pipelines/pipelines_spec.js b/spec/frontend/commit/pipelines/pipelines_spec.js
index fdf3c2e85f3..a196b66daa0 100644
--- a/spec/frontend/commit/pipelines/pipelines_spec.js
+++ b/spec/frontend/commit/pipelines/pipelines_spec.js
@@ -21,6 +21,10 @@ describe('Pipelines table in Commits and Merge requests', () => {
preloadFixtures(jsonFixtureName);
+ const findRunPipelineBtn = () => vm.$el.querySelector('[data-testid="run_pipeline_button"]');
+ const findRunPipelineBtnMobile = () =>
+ vm.$el.querySelector('[data-testid="run_pipeline_button_mobile"]');
+
beforeEach(() => {
mock = new MockAdapter(axios);
@@ -131,7 +135,8 @@ describe('Pipelines table in Commits and Merge requests', () => {
vm = mountComponent(PipelinesTable, { ...props });
setImmediate(() => {
- expect(vm.$el.querySelector('.js-run-mr-pipeline')).not.toBeNull();
+ expect(findRunPipelineBtn()).not.toBeNull();
+ expect(findRunPipelineBtnMobile()).not.toBeNull();
done();
});
});
@@ -147,7 +152,8 @@ describe('Pipelines table in Commits and Merge requests', () => {
vm = mountComponent(PipelinesTable, { ...props });
setImmediate(() => {
- expect(vm.$el.querySelector('.js-run-mr-pipeline')).toBeNull();
+ expect(findRunPipelineBtn()).toBeNull();
+ expect(findRunPipelineBtnMobile()).toBeNull();
done();
});
});
@@ -157,7 +163,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
const findModal = () =>
document.querySelector('#create-pipeline-for-fork-merge-request-modal');
- beforeEach(() => {
+ beforeEach(done => {
pipelineCopy.flags.detached_merge_request_pipeline = true;
mock.onGet('endpoint.json').reply(200, [pipelineCopy]);
@@ -168,23 +174,46 @@ describe('Pipelines table in Commits and Merge requests', () => {
projectId: '5',
mergeRequestId: 3,
});
- });
- it('updates the loading state', done => {
jest.spyOn(Api, 'postMergeRequestPipeline').mockReturnValue(Promise.resolve());
setImmediate(() => {
- vm.$el.querySelector('.js-run-mr-pipeline').click();
+ done();
+ });
+ });
- vm.$nextTick(() => {
- expect(findModal()).toBeNull();
- expect(vm.state.isRunningMergeRequestPipeline).toBe(true);
+ it('on desktop, shows a loading button', done => {
+ findRunPipelineBtn().click();
- setImmediate(() => {
- expect(vm.state.isRunningMergeRequestPipeline).toBe(false);
+ vm.$nextTick(() => {
+ expect(findModal()).toBeNull();
- done();
- });
+ expect(findRunPipelineBtn().disabled).toBe(true);
+ expect(findRunPipelineBtn().querySelector('.gl-spinner')).not.toBeNull();
+
+ setImmediate(() => {
+ expect(findRunPipelineBtn().disabled).toBe(false);
+ expect(findRunPipelineBtn().querySelector('.gl-spinner')).toBeNull();
+
+ done();
+ });
+ });
+ });
+
+ it('on mobile, shows a loading button', done => {
+ findRunPipelineBtnMobile().click();
+
+ vm.$nextTick(() => {
+ expect(findModal()).toBeNull();
+
+ expect(findModal()).toBeNull();
+ expect(findRunPipelineBtn().querySelector('.gl-spinner')).not.toBeNull();
+
+ setImmediate(() => {
+ expect(findRunPipelineBtn().disabled).toBe(false);
+ expect(findRunPipelineBtn().querySelector('.gl-spinner')).toBeNull();
+
+ done();
});
});
});
@@ -194,7 +223,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
const findModal = () =>
document.querySelector('#create-pipeline-for-fork-merge-request-modal');
- beforeEach(() => {
+ beforeEach(done => {
pipelineCopy.flags.detached_merge_request_pipeline = true;
mock.onGet('endpoint.json').reply(200, [pipelineCopy]);
@@ -207,18 +236,29 @@ describe('Pipelines table in Commits and Merge requests', () => {
sourceProjectFullPath: 'test/parent-project',
targetProjectFullPath: 'test/fork-project',
});
- });
- it('shows a security warning modal', done => {
jest.spyOn(Api, 'postMergeRequestPipeline').mockReturnValue(Promise.resolve());
setImmediate(() => {
- vm.$el.querySelector('.js-run-mr-pipeline').click();
+ done();
+ });
+ });
- vm.$nextTick(() => {
- expect(findModal()).not.toBeNull();
- done();
- });
+ it('on desktop, shows a security warning modal', done => {
+ findRunPipelineBtn().click();
+
+ vm.$nextTick(() => {
+ expect(findModal()).not.toBeNull();
+ done();
+ });
+ });
+
+ it('on mobile, shows a security warning modal', done => {
+ findRunPipelineBtnMobile().click();
+
+ vm.$nextTick(() => {
+ expect(findModal()).not.toBeNull();
+ done();
});
});
});
diff --git a/spec/frontend/create_cluster/eks_cluster/components/create_eks_cluster_spec.js b/spec/frontend/create_cluster/eks_cluster/components/create_eks_cluster_spec.js
index 4bf3ac430f5..e0913fe2e88 100644
--- a/spec/frontend/create_cluster/eks_cluster/components/create_eks_cluster_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/components/create_eks_cluster_spec.js
@@ -12,6 +12,7 @@ describe('CreateEksCluster', () => {
let vm;
let state;
const gitlabManagedClusterHelpPath = 'gitlab-managed-cluster-help-path';
+ const namespacePerEnvironmentHelpPath = 'namespace-per-environment-help-path';
const accountAndExternalIdsHelpPath = 'account-and-external-id-help-path';
const createRoleArnHelpPath = 'role-arn-help-path';
const kubernetesIntegrationHelpPath = 'kubernetes-integration';
@@ -26,6 +27,7 @@ describe('CreateEksCluster', () => {
vm = shallowMount(CreateEksCluster, {
propsData: {
gitlabManagedClusterHelpPath,
+ namespacePerEnvironmentHelpPath,
accountAndExternalIdsHelpPath,
createRoleArnHelpPath,
externalLinkIcon,
@@ -53,6 +55,12 @@ describe('CreateEksCluster', () => {
);
});
+ it('help url for namespace per environment cluster documentation', () => {
+ expect(vm.find(EksClusterConfigurationForm).props('namespacePerEnvironmentHelpPath')).toBe(
+ namespacePerEnvironmentHelpPath,
+ );
+ });
+
it('help url for gitlab managed cluster documentation', () => {
expect(vm.find(EksClusterConfigurationForm).props('kubernetesIntegrationHelpPath')).toBe(
kubernetesIntegrationHelpPath,
diff --git a/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js b/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
index d7dd7072f67..2600415fc9f 100644
--- a/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/components/eks_cluster_configuration_form_spec.js
@@ -169,6 +169,7 @@ describe('EksClusterConfigurationForm', () => {
store,
propsData: {
gitlabManagedClusterHelpPath: '',
+ namespacePerEnvironmentHelpPath: '',
kubernetesIntegrationHelpPath: '',
externalLinkIcon: '',
},
diff --git a/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js b/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js
index ed753888790..f929216689a 100644
--- a/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js
@@ -14,6 +14,7 @@ import {
SET_ROLE,
SET_SECURITY_GROUP,
SET_GITLAB_MANAGED_CLUSTER,
+ SET_NAMESPACE_PER_ENVIRONMENT,
SET_INSTANCE_TYPE,
SET_NODE_COUNT,
REQUEST_CREATE_ROLE,
@@ -40,6 +41,7 @@ describe('EKS Cluster Store Actions', () => {
let instanceType;
let nodeCount;
let gitlabManagedCluster;
+ let namespacePerEnvironment;
let mock;
let state;
let newClusterUrl;
@@ -57,6 +59,7 @@ describe('EKS Cluster Store Actions', () => {
instanceType = 'small-1';
nodeCount = '5';
gitlabManagedCluster = true;
+ namespacePerEnvironment = true;
newClusterUrl = '/clusters/1';
@@ -76,19 +79,20 @@ describe('EKS Cluster Store Actions', () => {
});
it.each`
- action | mutation | payload | payloadDescription
- ${'setClusterName'} | ${SET_CLUSTER_NAME} | ${{ clusterName }} | ${'cluster name'}
- ${'setEnvironmentScope'} | ${SET_ENVIRONMENT_SCOPE} | ${{ environmentScope }} | ${'environment scope'}
- ${'setKubernetesVersion'} | ${SET_KUBERNETES_VERSION} | ${{ kubernetesVersion }} | ${'kubernetes version'}
- ${'setRole'} | ${SET_ROLE} | ${{ role }} | ${'role'}
- ${'setRegion'} | ${SET_REGION} | ${{ region }} | ${'region'}
- ${'setKeyPair'} | ${SET_KEY_PAIR} | ${{ keyPair }} | ${'key pair'}
- ${'setVpc'} | ${SET_VPC} | ${{ vpc }} | ${'vpc'}
- ${'setSubnet'} | ${SET_SUBNET} | ${{ subnet }} | ${'subnet'}
- ${'setSecurityGroup'} | ${SET_SECURITY_GROUP} | ${{ securityGroup }} | ${'securityGroup'}
- ${'setInstanceType'} | ${SET_INSTANCE_TYPE} | ${{ instanceType }} | ${'instance type'}
- ${'setNodeCount'} | ${SET_NODE_COUNT} | ${{ nodeCount }} | ${'node count'}
- ${'setGitlabManagedCluster'} | ${SET_GITLAB_MANAGED_CLUSTER} | ${gitlabManagedCluster} | ${'gitlab managed cluster'}
+ action | mutation | payload | payloadDescription
+ ${'setClusterName'} | ${SET_CLUSTER_NAME} | ${{ clusterName }} | ${'cluster name'}
+ ${'setEnvironmentScope'} | ${SET_ENVIRONMENT_SCOPE} | ${{ environmentScope }} | ${'environment scope'}
+ ${'setKubernetesVersion'} | ${SET_KUBERNETES_VERSION} | ${{ kubernetesVersion }} | ${'kubernetes version'}
+ ${'setRole'} | ${SET_ROLE} | ${{ role }} | ${'role'}
+ ${'setRegion'} | ${SET_REGION} | ${{ region }} | ${'region'}
+ ${'setKeyPair'} | ${SET_KEY_PAIR} | ${{ keyPair }} | ${'key pair'}
+ ${'setVpc'} | ${SET_VPC} | ${{ vpc }} | ${'vpc'}
+ ${'setSubnet'} | ${SET_SUBNET} | ${{ subnet }} | ${'subnet'}
+ ${'setSecurityGroup'} | ${SET_SECURITY_GROUP} | ${{ securityGroup }} | ${'securityGroup'}
+ ${'setInstanceType'} | ${SET_INSTANCE_TYPE} | ${{ instanceType }} | ${'instance type'}
+ ${'setNodeCount'} | ${SET_NODE_COUNT} | ${{ nodeCount }} | ${'node count'}
+ ${'setGitlabManagedCluster'} | ${SET_GITLAB_MANAGED_CLUSTER} | ${gitlabManagedCluster} | ${'gitlab managed cluster'}
+ ${'setNamespacePerEnvironment'} | ${SET_NAMESPACE_PER_ENVIRONMENT} | ${namespacePerEnvironment} | ${'namespace per environment'}
`(`$action commits $mutation with $payloadDescription payload`, data => {
const { action, mutation, payload } = data;
@@ -179,6 +183,7 @@ describe('EKS Cluster Store Actions', () => {
name: clusterName,
environment_scope: environmentScope,
managed: gitlabManagedCluster,
+ namespace_per_environment: namespacePerEnvironment,
provider_aws_attributes: {
kubernetes_version: kubernetesVersion,
region,
@@ -204,6 +209,7 @@ describe('EKS Cluster Store Actions', () => {
selectedInstanceType: instanceType,
nodeCount,
gitlabManagedCluster,
+ namespacePerEnvironment,
});
});
diff --git a/spec/frontend/design_management/components/design_todo_button_spec.js b/spec/frontend/design_management/components/design_todo_button_spec.js
index 451c23f0fea..9ebc6ca26a2 100644
--- a/spec/frontend/design_management/components/design_todo_button_spec.js
+++ b/spec/frontend/design_management/components/design_todo_button_spec.js
@@ -111,7 +111,7 @@ describe('Design management design todo button', () => {
});
it('renders correct button text', () => {
- expect(wrapper.text()).toBe('Add a To-Do');
+ expect(wrapper.text()).toBe('Add a To Do');
});
describe('when clicked', () => {
diff --git a/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap b/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
index 822df1f6472..de276bd300b 100644
--- a/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
+++ b/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
@@ -24,6 +24,7 @@ exports[`Design management list item component with notes renders item with mult
<img
alt="test"
class="gl-display-block gl-mx-auto gl-max-w-full mh-100 design-img"
+ data-qa-filename="test"
data-qa-selector="design_image"
src=""
/>
@@ -94,6 +95,7 @@ exports[`Design management list item component with notes renders item with sing
<img
alt="test"
class="gl-display-block gl-mx-auto gl-max-w-full mh-100 design-img"
+ data-qa-filename="test"
data-qa-selector="design_image"
src=""
/>
diff --git a/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap b/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap
index a7d6145285c..5eb86d4f9cb 100644
--- a/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap
+++ b/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap
@@ -4,15 +4,16 @@ exports[`Design management pagination component hides components when designs ar
exports[`Design management pagination component renders navigation buttons 1`] = `
<div
- class="d-flex align-items-center"
+ class="gl-display-flex gl-align-items-center"
>
0 of 2
<gl-button-group-stub
- class="ml-3 mr-3"
+ class="gl-mx-5"
>
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="js-previous-design"
disabled="true"
@@ -23,6 +24,7 @@ exports[`Design management pagination component renders navigation buttons 1`] =
/>
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="js-next-design"
icon="angle-right"
diff --git a/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap
index b286a74ebb8..723ac0491a7 100644
--- a/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap
@@ -19,16 +19,16 @@ exports[`Design management toolbar component renders design and updated data 1`]
</a>
<div
- class="overflow-hidden d-flex align-items-center"
+ class="gl-overflow-hidden gl-display-flex gl-align-items-center"
>
<h2
- class="m-0 str-truncated-100 gl-font-base"
+ class="gl-m-0 str-truncated-100 gl-font-base"
>
test.jpg
</h2>
<small
- class="text-secondary"
+ class="gl-text-gray-500"
>
Updated 1 hour ago by Test Name
</small>
@@ -36,11 +36,12 @@ exports[`Design management toolbar component renders design and updated data 1`]
</div>
<design-navigation-stub
- class="ml-auto flex-shrink-0"
+ class="gl-ml-auto gl-flex-shrink-0"
id="1"
/>
<gl-button-stub
+ buttontextclasses=""
category="primary"
href="/-/designs/306/7f747adcd4693afadbe968d7ba7d983349b9012d"
icon="download"
diff --git a/spec/frontend/design_management/components/toolbar/design_navigation_spec.js b/spec/frontend/design_management/components/toolbar/design_navigation_spec.js
index 1c6588a9628..1d9b9c002f9 100644
--- a/spec/frontend/design_management/components/toolbar/design_navigation_spec.js
+++ b/spec/frontend/design_management/components/toolbar/design_navigation_spec.js
@@ -43,7 +43,7 @@ describe('Design management pagination component', () => {
it('renders navigation buttons', () => {
wrapper.setData({
- designs: [{ id: '1' }, { id: '2' }],
+ designCollection: { designs: [{ id: '1' }, { id: '2' }] },
});
return wrapper.vm.$nextTick().then(() => {
@@ -54,7 +54,7 @@ describe('Design management pagination component', () => {
describe('keyboard buttons navigation', () => {
beforeEach(() => {
wrapper.setData({
- designs: [{ filename: '1' }, { filename: '2' }, { filename: '3' }],
+ designCollection: { designs: [{ filename: '1' }, { filename: '2' }, { filename: '3' }] },
});
});
diff --git a/spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap b/spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap
index 3d7939df28e..eaa7460ae15 100644
--- a/spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap
+++ b/spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap
@@ -5,6 +5,7 @@ exports[`Design management upload button component renders inverted upload desig
isinverted="true"
>
<gl-button-stub
+ buttontextclasses=""
category="primary"
icon=""
size="small"
@@ -30,6 +31,7 @@ exports[`Design management upload button component renders inverted upload desig
exports[`Design management upload button component renders loading icon 1`] = `
<div>
<gl-button-stub
+ buttontextclasses=""
category="primary"
disabled="true"
icon=""
@@ -62,6 +64,7 @@ exports[`Design management upload button component renders loading icon 1`] = `
exports[`Design management upload button component renders upload design button 1`] = `
<div>
<gl-button-stub
+ buttontextclasses=""
category="primary"
icon=""
size="small"
diff --git a/spec/frontend/design_management/mock_data/apollo_mock.js b/spec/frontend/design_management/mock_data/apollo_mock.js
index 1c7806c292f..a8b335c2c46 100644
--- a/spec/frontend/design_management/mock_data/apollo_mock.js
+++ b/spec/frontend/design_management/mock_data/apollo_mock.js
@@ -4,6 +4,7 @@ export const designListQueryResponse = {
id: '1',
issue: {
designCollection: {
+ copyState: 'READY',
designs: {
nodes: [
{
diff --git a/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap
index b80b7fdb43e..7ab2c02c786 100644
--- a/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap
@@ -19,6 +19,7 @@ exports[`Design management index page designs does not render toolbar when there
>
<design-dropzone-stub
class="design-list-item design-list-item-new"
+ data-qa-selector="design_dropzone_content"
hasdesigns="true"
/>
</li>
@@ -110,6 +111,7 @@ exports[`Design management index page designs renders designs list and header wi
class="qa-selector-toolbar gl-display-flex gl-align-items-center"
>
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="gl-mr-4 js-select-all"
icon=""
@@ -126,6 +128,7 @@ exports[`Design management index page designs renders designs list and header wi
buttonclass="gl-mr-3"
buttonsize="small"
buttonvariant="warning"
+ data-qa-selector="archive_button"
>
Archive selected
@@ -150,6 +153,7 @@ exports[`Design management index page designs renders designs list and header wi
>
<design-dropzone-stub
class="design-list-item design-list-item-new"
+ data-qa-selector="design_dropzone_content"
hasdesigns="true"
/>
</li>
@@ -171,6 +175,8 @@ exports[`Design management index page designs renders designs list and header wi
<input
class="design-checkbox"
+ data-qa-design="design-1-name"
+ data-qa-selector="design_checkbox"
type="checkbox"
/>
</li>
@@ -192,6 +198,8 @@ exports[`Design management index page designs renders designs list and header wi
<input
class="design-checkbox"
+ data-qa-design="design-2-name"
+ data-qa-selector="design_checkbox"
type="checkbox"
/>
</li>
@@ -213,6 +221,8 @@ exports[`Design management index page designs renders designs list and header wi
<input
class="design-checkbox"
+ data-qa-design="design-3-name"
+ data-qa-selector="design_checkbox"
type="checkbox"
/>
</li>
@@ -298,6 +308,7 @@ exports[`Design management index page when has no designs renders design dropzon
>
<design-dropzone-stub
class=""
+ data-qa-selector="design_dropzone_content"
/>
</li>
</ol>
diff --git a/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
index c849e4d4ed6..8546f9fbf51 100644
--- a/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
@@ -67,6 +67,7 @@ exports[`Design management design index page renders design index 1`] = `
/>
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="link-inherit-color gl-text-body gl-text-decoration-none gl-font-weight-bold gl-mb-4"
data-testid="resolved-comments"
diff --git a/spec/frontend/design_management/pages/index_spec.js b/spec/frontend/design_management/pages/index_spec.js
index 661717d29a3..55ccb668e81 100644
--- a/spec/frontend/design_management/pages/index_spec.js
+++ b/spec/frontend/design_management/pages/index_spec.js
@@ -92,6 +92,8 @@ describe('Design management index page', () => {
const findDesignCheckboxes = () => wrapper.findAll('.design-checkbox');
const findSelectAllButton = () => wrapper.find('.js-select-all');
const findToolbar = () => wrapper.find('.qa-selector-toolbar');
+ const findDesignCollectionIsCopying = () =>
+ wrapper.find('[data-testid="design-collection-is-copying"');
const findDeleteButton = () => wrapper.find(DeleteButton);
const findDropzone = () => wrapper.findAll(DesignDropzone).at(0);
const dropzoneClasses = () => findDropzone().classes();
@@ -99,6 +101,7 @@ describe('Design management index page', () => {
const findFirstDropzoneWithDesign = () => wrapper.findAll(DesignDropzone).at(1);
const findDesignsWrapper = () => wrapper.find('[data-testid="designs-root"]');
const findDesigns = () => wrapper.findAll(Design);
+ const draggableAttributes = () => wrapper.find(VueDraggable).vm.$attrs;
async function moveDesigns(localWrapper) {
await jest.runOnlyPendingTimers();
@@ -115,8 +118,8 @@ describe('Design management index page', () => {
function createComponent({
loading = false,
- designs = [],
allVersions = [],
+ designCollection = { designs: mockDesigns, copyState: 'READY' },
createDesign = true,
stubs = {},
mockMutate = jest.fn().mockResolvedValue(),
@@ -124,7 +127,7 @@ describe('Design management index page', () => {
mutate = mockMutate;
const $apollo = {
queries: {
- designs: {
+ designCollection: {
loading,
},
permissions: {
@@ -137,8 +140,8 @@ describe('Design management index page', () => {
wrapper = shallowMount(Index, {
data() {
return {
- designs,
allVersions,
+ designCollection,
permissions: {
createDesign,
},
@@ -200,13 +203,13 @@ describe('Design management index page', () => {
});
it('renders a toolbar with buttons when there are designs', () => {
- createComponent({ designs: mockDesigns, allVersions: [mockVersion] });
+ createComponent({ allVersions: [mockVersion] });
expect(findToolbar().exists()).toBe(true);
});
it('renders designs list and header with upload button', () => {
- createComponent({ designs: mockDesigns, allVersions: [mockVersion] });
+ createComponent({ allVersions: [mockVersion] });
expect(wrapper.element).toMatchSnapshot();
});
@@ -236,7 +239,7 @@ describe('Design management index page', () => {
describe('when has no designs', () => {
beforeEach(() => {
- createComponent();
+ createComponent({ designCollection: { designs: [], copyState: 'READY' } });
});
it('renders design dropzone', () =>
@@ -259,6 +262,21 @@ describe('Design management index page', () => {
}));
});
+ describe('handling design collection copy state', () => {
+ it.each`
+ copyState | isRendered | description
+ ${'IN_PROGRESS'} | ${true} | ${'renders'}
+ ${'READY'} | ${false} | ${'does not render'}
+ ${'ERROR'} | ${false} | ${'does not render'}
+ `(
+ '$description the copying message if design collection copyState is $copyState',
+ ({ copyState, isRendered }) => {
+ createComponent({ designCollection: { designs: [], copyState } });
+ expect(findDesignCollectionIsCopying().exists()).toBe(isRendered);
+ },
+ );
+ });
+
describe('uploading designs', () => {
it('calls mutation on upload', () => {
createComponent({ stubs: { GlEmptyState } });
@@ -282,6 +300,10 @@ describe('Design management index page', () => {
{
__typename: 'Design',
id: expect.anything(),
+ currentUserTodos: {
+ __typename: 'TodoConnection',
+ nodes: [],
+ },
image: '',
imageV432x230: '',
filename: 'test',
@@ -531,13 +553,16 @@ describe('Design management index page', () => {
});
it('on latest version when has no designs toolbar buttons are invisible', () => {
- createComponent({ designs: [], allVersions: [mockVersion] });
+ createComponent({
+ designCollection: { designs: [], copyState: 'READY' },
+ allVersions: [mockVersion],
+ });
expect(findToolbar().isVisible()).toBe(false);
});
describe('on non-latest version', () => {
beforeEach(() => {
- createComponent({ designs: mockDesigns, allVersions: [mockVersion] });
+ createComponent({ allVersions: [mockVersion] });
});
it('does not render design checkboxes', async () => {
@@ -628,7 +653,6 @@ describe('Design management index page', () => {
it('ensures fullscreen layout is not applied', () => {
createComponent(true);
- wrapper.vm.$router.push('/');
expect(mockPageEl.classList.remove).toHaveBeenCalledTimes(1);
expect(mockPageEl.classList.remove).toHaveBeenCalledWith(...DESIGN_DETAIL_LAYOUT_CLASSLIST);
});
@@ -676,6 +700,20 @@ describe('Design management index page', () => {
).toBe('2');
});
+ it('prevents reordering when reorderDesigns mutation is in progress', async () => {
+ createComponentWithApollo({});
+
+ await moveDesigns(wrapper);
+
+ expect(draggableAttributes().disabled).toBe(true);
+
+ await jest.runOnlyPendingTimers(); // kick off the mocked GQL stuff (promises)
+ await wrapper.vm.$nextTick(); // kick off the DOM update
+ await wrapper.vm.$nextTick(); // kick off the DOM update for finally block
+
+ expect(draggableAttributes().disabled).toBe(false);
+ });
+
it('displays flash if mutation had a recoverable error', async () => {
createComponentWithApollo({
moveHandler: jest.fn().mockResolvedValue(moveDesignMutationResponseWithErrors),
diff --git a/spec/frontend/design_management/router_spec.js b/spec/frontend/design_management/router_spec.js
index d4cb9f75a77..fac4f7d368d 100644
--- a/spec/frontend/design_management/router_spec.js
+++ b/spec/frontend/design_management/router_spec.js
@@ -25,7 +25,7 @@ function factory(routeArg) {
mocks: {
$apollo: {
queries: {
- designs: { loading: true },
+ designCollection: { loading: true },
design: { loading: true },
permissions: { loading: true },
},
diff --git a/spec/frontend/design_management/utils/design_management_utils_spec.js b/spec/frontend/design_management/utils/design_management_utils_spec.js
index 7e857d08d25..232cfa2f4ca 100644
--- a/spec/frontend/design_management/utils/design_management_utils_spec.js
+++ b/spec/frontend/design_management/utils/design_management_utils_spec.js
@@ -93,6 +93,10 @@ describe('optimistic responses', () => {
fullPath: '',
notesCount: 0,
event: 'NONE',
+ currentUserTodos: {
+ __typename: 'TodoConnection',
+ nodes: [],
+ },
diffRefs: { __typename: 'DiffRefs', baseSha: '', startSha: '', headSha: '' },
discussions: { __typename: 'DesignDiscussion', nodes: [] },
versions: {
diff --git a/spec/frontend/diff_comments_store_spec.js b/spec/frontend/diff_comments_store_spec.js
deleted file mode 100644
index 6f25c9dd3bc..00000000000
--- a/spec/frontend/diff_comments_store_spec.js
+++ /dev/null
@@ -1,136 +0,0 @@
-/* global CommentsStore */
-
-import '~/diff_notes/models/discussion';
-import '~/diff_notes/models/note';
-import '~/diff_notes/stores/comments';
-
-function createDiscussion(noteId = 1, resolved = true) {
- CommentsStore.create({
- discussionId: 'a',
- noteId,
- canResolve: true,
- resolved,
- resolvedBy: 'test',
- authorName: 'test',
- authorAvatar: 'test',
- noteTruncated: 'test...',
- });
-}
-
-beforeEach(() => {
- CommentsStore.state = {};
-});
-
-describe('New discussion', () => {
- it('creates new discussion', () => {
- expect(Object.keys(CommentsStore.state).length).toBe(0);
- createDiscussion();
-
- expect(Object.keys(CommentsStore.state).length).toBe(1);
- });
-
- it('creates new note in discussion', () => {
- createDiscussion();
- createDiscussion(2);
-
- const discussion = CommentsStore.state.a;
-
- expect(Object.keys(discussion.notes).length).toBe(2);
- });
-});
-
-describe('Get note', () => {
- beforeEach(() => {
- createDiscussion();
- });
-
- it('gets note by ID', () => {
- const note = CommentsStore.get('a', 1);
-
- expect(note).toBeDefined();
- expect(note.id).toBe(1);
- });
-});
-
-describe('Delete discussion', () => {
- beforeEach(() => {
- createDiscussion();
- });
-
- it('deletes discussion by ID', () => {
- CommentsStore.delete('a', 1);
-
- expect(Object.keys(CommentsStore.state).length).toBe(0);
- });
-
- it('deletes discussion when no more notes', () => {
- createDiscussion();
- createDiscussion(2);
-
- expect(Object.keys(CommentsStore.state).length).toBe(1);
- expect(Object.keys(CommentsStore.state.a.notes).length).toBe(2);
-
- CommentsStore.delete('a', 1);
- CommentsStore.delete('a', 2);
-
- expect(Object.keys(CommentsStore.state).length).toBe(0);
- });
-});
-
-describe('Update note', () => {
- beforeEach(() => {
- createDiscussion();
- });
-
- it('updates note to be unresolved', () => {
- CommentsStore.update('a', 1, false, 'test');
-
- const note = CommentsStore.get('a', 1);
-
- expect(note.resolved).toBe(false);
- });
-});
-
-describe('Discussion resolved', () => {
- beforeEach(() => {
- createDiscussion();
- });
-
- it('is resolved with single note', () => {
- const discussion = CommentsStore.state.a;
-
- expect(discussion.isResolved()).toBe(true);
- });
-
- it('is unresolved with 2 notes', () => {
- const discussion = CommentsStore.state.a;
- createDiscussion(2, false);
-
- expect(discussion.isResolved()).toBe(false);
- });
-
- it('is resolved with 2 notes', () => {
- const discussion = CommentsStore.state.a;
- createDiscussion(2);
-
- expect(discussion.isResolved()).toBe(true);
- });
-
- it('resolve all notes', () => {
- const discussion = CommentsStore.state.a;
- createDiscussion(2, false);
-
- discussion.resolveAllNotes();
-
- expect(discussion.isResolved()).toBe(true);
- });
-
- it('unresolve all notes', () => {
- const discussion = CommentsStore.state.a;
- createDiscussion(2);
-
- discussion.unResolveAllNotes();
-
- expect(discussion.isResolved()).toBe(false);
- });
-});
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index cd3a6aa0e28..86560470ada 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -699,7 +699,7 @@ describe('diffs/components/app', () => {
describe('collapsed files', () => {
it('should render the collapsed files warning if there are any collapsed files', () => {
createComponent({}, ({ state }) => {
- state.diffs.diffFiles = [{ viewer: { collapsed: true } }];
+ state.diffs.diffFiles = [{ viewer: { automaticallyCollapsed: true } }];
});
expect(getCollapsedFilesWarning(wrapper).exists()).toBe(true);
@@ -707,7 +707,7 @@ describe('diffs/components/app', () => {
it('should not render the collapsed files warning if the user has dismissed the alert already', async () => {
createComponent({}, ({ state }) => {
- state.diffs.diffFiles = [{ viewer: { collapsed: true } }];
+ state.diffs.diffFiles = [{ viewer: { automaticallyCollapsed: true } }];
});
expect(getCollapsedFilesWarning(wrapper).exists()).toBe(true);
diff --git a/spec/frontend/diffs/components/collapsed_files_warning_spec.js b/spec/frontend/diffs/components/collapsed_files_warning_spec.js
index 670eab5472f..7bbffb7a1cd 100644
--- a/spec/frontend/diffs/components/collapsed_files_warning_spec.js
+++ b/spec/frontend/diffs/components/collapsed_files_warning_spec.js
@@ -50,7 +50,7 @@ describe('CollapsedFilesWarning', () => {
({ limited, containerClasses }) => {
createComponent({ limited });
- expect(wrapper.classes()).toEqual(containerClasses);
+ expect(wrapper.classes()).toEqual(['col-12'].concat(containerClasses));
},
);
diff --git a/spec/frontend/diffs/components/commit_item_spec.js b/spec/frontend/diffs/components/commit_item_spec.js
index c48445790f7..9e4fcddd1b4 100644
--- a/spec/frontend/diffs/components/commit_item_spec.js
+++ b/spec/frontend/diffs/components/commit_item_spec.js
@@ -25,7 +25,7 @@ describe('diffs/components/commit_item', () => {
const getTitleElement = () => wrapper.find('.commit-row-message.item-title');
const getDescElement = () => wrapper.find('pre.commit-row-description');
const getDescExpandElement = () => wrapper.find('.commit-content .js-toggle-button');
- const getShaElement = () => wrapper.find('.commit-sha-group');
+ const getShaElement = () => wrapper.find('[data-testid="commit-sha-group"]');
const getAvatarElement = () => wrapper.find('.user-avatar-link');
const getCommitterElement = () => wrapper.find('.committer');
const getCommitActionsElement = () => wrapper.find('.commit-actions');
@@ -84,8 +84,8 @@ describe('diffs/components/commit_item', () => {
it('renders commit sha', () => {
const shaElement = getShaElement();
- const labelElement = shaElement.find('.label');
- const buttonElement = shaElement.find('button');
+ const labelElement = shaElement.find('[data-testid="commit-sha-group"] button');
+ const buttonElement = shaElement.find('button.input-group-text');
expect(labelElement.text()).toEqual(commit.short_id);
expect(buttonElement.props('text')).toBe(commit.id);
diff --git a/spec/frontend/diffs/components/diff_file_header_spec.js b/spec/frontend/diffs/components/diff_file_header_spec.js
index a0cad32b9fb..3a236228c40 100644
--- a/spec/frontend/diffs/components/diff_file_header_spec.js
+++ b/spec/frontend/diffs/components/diff_file_header_spec.js
@@ -1,9 +1,7 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
-import { GlIcon } from '@gitlab/ui';
import { cloneDeep } from 'lodash';
import DiffFileHeader from '~/diffs/components/diff_file_header.vue';
-import EditButton from '~/diffs/components/edit_button.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import diffDiscussionsMockData from '../mock_data/diff_discussions';
import { truncateSha } from '~/lib/utils/text_utility';
@@ -76,15 +74,7 @@ describe('DiffFileHeader component', () => {
const findReplacedFileButton = () => wrapper.find({ ref: 'replacedFileButton' });
const findViewFileButton = () => wrapper.find({ ref: 'viewButton' });
const findCollapseIcon = () => wrapper.find({ ref: 'collapseIcon' });
-
- const findIconByName = iconName => {
- const icons = wrapper.findAll(GlIcon).filter(w => w.props('name') === iconName);
- if (icons.length === 0) return icons;
- if (icons.length > 1) {
- throw new Error(`Multiple icons found for ${iconName}`);
- }
- return icons.at(0);
- };
+ const findEditButton = () => wrapper.find({ ref: 'editButton' });
const createComponent = props => {
mockStoreConfig = cloneDeep(defaultMockStoreConfig);
@@ -203,16 +193,6 @@ describe('DiffFileHeader component', () => {
describe('for any file', () => {
const otherModes = Object.keys(diffViewerModes).filter(m => m !== 'mode_changed');
- it('when edit button emits showForkMessage event it is re-emitted', () => {
- createComponent({
- addMergeRequestButtons: true,
- });
- wrapper.find(EditButton).vm.$emit('showForkMessage');
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.emitted().showForkMessage).toBeDefined();
- });
- });
-
it('for mode_changed file mode displays mode changes', () => {
createComponent({
diffFile: {
@@ -271,16 +251,16 @@ describe('DiffFileHeader component', () => {
});
it('should not render edit button', () => {
createComponent({ addMergeRequestButtons: false });
- expect(wrapper.find(EditButton).exists()).toBe(false);
+ expect(findEditButton().exists()).toBe(false);
});
});
describe('when addMergeRequestButtons is true', () => {
describe('without discussions', () => {
- it('renders a disabled toggle discussions button', () => {
+ it('does not render a toggle discussions button', () => {
diffHasDiscussionsResultMock.mockReturnValue(false);
createComponent({ addMergeRequestButtons: true });
- expect(findToggleDiscussionsButton().attributes('disabled')).toBe('true');
+ expect(findToggleDiscussionsButton().exists()).toBe(false);
});
});
@@ -288,7 +268,7 @@ describe('DiffFileHeader component', () => {
it('dispatches toggleFileDiscussionWrappers when user clicks on toggle discussions button', () => {
diffHasDiscussionsResultMock.mockReturnValue(true);
createComponent({ addMergeRequestButtons: true });
- expect(findToggleDiscussionsButton().attributes('disabled')).toBeFalsy();
+ expect(findToggleDiscussionsButton().exists()).toBe(true);
findToggleDiscussionsButton().vm.$emit('click');
expect(
mockStoreConfig.modules.diffs.actions.toggleFileDiscussionWrappers,
@@ -300,7 +280,7 @@ describe('DiffFileHeader component', () => {
createComponent({
addMergeRequestButtons: true,
});
- expect(wrapper.find(EditButton).exists()).toBe(true);
+ expect(findEditButton().exists()).toBe(true);
});
describe('view on environment button', () => {
@@ -334,7 +314,7 @@ describe('DiffFileHeader component', () => {
});
it('should not render edit button', () => {
- expect(wrapper.find(EditButton).exists()).toBe(false);
+ expect(findEditButton().exists()).toBe(false);
});
});
describe('with file blob', () => {
@@ -345,7 +325,7 @@ describe('DiffFileHeader component', () => {
addMergeRequestButtons: true,
});
expect(findViewFileButton().attributes('href')).toBe(viewPath);
- expect(findViewFileButton().attributes('title')).toEqual(
+ expect(findViewFileButton().text()).toEqual(
`View file @ ${diffFile.content_sha.substr(0, 8)}`,
);
});
@@ -375,21 +355,6 @@ describe('DiffFileHeader component', () => {
addMergeRequestButtons: true,
};
- it.each`
- iconName | isShowingFullFile
- ${'doc-expand'} | ${false}
- ${'doc-changes'} | ${true}
- `(
- 'shows $iconName when isShowingFullFile set to $isShowingFullFile',
- ({ iconName, isShowingFullFile }) => {
- createComponent({
- ...fullyNotExpandedFileProps,
- diffFile: { ...fullyNotExpandedFileProps.diffFile, isShowingFullFile },
- });
- expect(findIconByName(iconName).exists()).toBe(true);
- },
- );
-
it('renders expand to full file button if not showing full file already', () => {
createComponent(fullyNotExpandedFileProps);
expect(findExpandButton().exists()).toBe(true);
@@ -455,7 +420,7 @@ describe('DiffFileHeader component', () => {
it('does not show edit button', () => {
createComponent({ diffFile: { ...diffFile, deleted_file: true } });
- expect(wrapper.find(EditButton).exists()).toBe(false);
+ expect(findEditButton().exists()).toBe(false);
});
});
diff --git a/spec/frontend/diffs/components/diff_file_spec.js b/spec/frontend/diffs/components/diff_file_spec.js
index 79f0f6bc327..a6f0d2bf11d 100644
--- a/spec/frontend/diffs/components/diff_file_spec.js
+++ b/spec/frontend/diffs/components/diff_file_spec.js
@@ -37,7 +37,7 @@ describe('DiffFile', () => {
expect(el.querySelectorAll('.diff-content.hidden').length).toEqual(0);
expect(el.querySelector('.js-file-title')).toBeDefined();
- expect(el.querySelector('.btn-clipboard')).toBeDefined();
+ expect(el.querySelector('[data-testid="diff-file-copy-clipboard"]')).toBeDefined();
expect(el.querySelector('.file-title-name').innerText.indexOf(file_path)).toBeGreaterThan(-1);
expect(el.querySelector('.js-syntax-highlight')).toBeDefined();
@@ -47,7 +47,7 @@ describe('DiffFile', () => {
.then(() => {
expect(el.querySelectorAll('.line_content').length).toBe(8);
expect(el.querySelectorAll('.js-line-expansion-content').length).toBe(1);
- triggerEvent('.btn-clipboard');
+ triggerEvent('[data-testid="diff-file-copy-clipboard"]');
})
.then(done)
.catch(done.fail);
@@ -56,11 +56,11 @@ describe('DiffFile', () => {
it('should track a click event on copy to clip board button', done => {
const el = vm.$el;
- expect(el.querySelector('.btn-clipboard')).toBeDefined();
+ expect(el.querySelector('[data-testid="diff-file-copy-clipboard"]')).toBeDefined();
vm.file.renderIt = true;
vm.$nextTick()
.then(() => {
- triggerEvent('.btn-clipboard');
+ triggerEvent('[data-testid="diff-file-copy-clipboard"]');
expect(trackingSpy).toHaveBeenCalledWith('_category_', 'click_copy_file_button', {
label: 'diff_copy_file_path_button',
@@ -90,8 +90,8 @@ describe('DiffFile', () => {
vm.isCollapsed = true;
vm.$nextTick(() => {
- expect(vm.$el.innerText).toContain('This file is collapsed.');
- expect(vm.$el.querySelector('[data-testid="expandButton"]')).not.toBeFalsy();
+ expect(vm.$el.innerText).toContain('This diff is collapsed');
+ expect(vm.$el.querySelectorAll('.js-click-to-expand').length).toEqual(1);
done();
});
@@ -102,8 +102,8 @@ describe('DiffFile', () => {
vm.isCollapsed = true;
vm.$nextTick(() => {
- expect(vm.$el.innerText).toContain('This file is collapsed.');
- expect(vm.$el.querySelector('[data-testid="expandButton"]')).not.toBeFalsy();
+ expect(vm.$el.innerText).toContain('This diff is collapsed');
+ expect(vm.$el.querySelectorAll('.js-click-to-expand').length).toEqual(1);
done();
});
@@ -121,8 +121,8 @@ describe('DiffFile', () => {
vm.isCollapsed = true;
vm.$nextTick(() => {
- expect(vm.$el.innerText).toContain('This file is collapsed.');
- expect(vm.$el.querySelector('[data-testid="expandButton"]')).not.toBeFalsy();
+ expect(vm.$el.innerText).toContain('This diff is collapsed');
+ expect(vm.$el.querySelectorAll('.js-click-to-expand').length).toEqual(1);
done();
});
@@ -135,7 +135,7 @@ describe('DiffFile', () => {
vm.file.viewer.name = diffViewerModes.renamed;
vm.$nextTick(() => {
- expect(vm.$el.innerText).not.toContain('This file is collapsed.');
+ expect(vm.$el.innerText).not.toContain('This diff is collapsed');
done();
});
@@ -148,7 +148,7 @@ describe('DiffFile', () => {
vm.file.viewer.name = diffViewerModes.mode_changed;
vm.$nextTick(() => {
- expect(vm.$el.innerText).not.toContain('This file is collapsed.');
+ expect(vm.$el.innerText).not.toContain('This diff is collapsed');
done();
});
@@ -181,7 +181,7 @@ describe('DiffFile', () => {
});
it('updates local state when changing file state', done => {
- vm.file.viewer.collapsed = true;
+ vm.file.viewer.automaticallyCollapsed = true;
vm.$nextTick(() => {
expect(vm.isCollapsed).toBe(true);
diff --git a/spec/frontend/diffs/components/diff_row_utils_spec.js b/spec/frontend/diffs/components/diff_row_utils_spec.js
new file mode 100644
index 00000000000..394b6cb1914
--- /dev/null
+++ b/spec/frontend/diffs/components/diff_row_utils_spec.js
@@ -0,0 +1,203 @@
+import * as utils from '~/diffs/components/diff_row_utils';
+import {
+ MATCH_LINE_TYPE,
+ CONTEXT_LINE_TYPE,
+ OLD_NO_NEW_LINE_TYPE,
+ NEW_NO_NEW_LINE_TYPE,
+ EMPTY_CELL_TYPE,
+} from '~/diffs/constants';
+
+const LINE_CODE = 'abc123';
+
+describe('isHighlighted', () => {
+ it('should return true if line is highlighted', () => {
+ const state = { diffs: { highlightedRow: LINE_CODE } };
+ const line = { line_code: LINE_CODE };
+ const isCommented = false;
+ expect(utils.isHighlighted(state, line, isCommented)).toBe(true);
+ });
+
+ it('should return false if line is not highlighted', () => {
+ const state = { diffs: { highlightedRow: 'xxx' } };
+ const line = { line_code: LINE_CODE };
+ const isCommented = false;
+ expect(utils.isHighlighted(state, line, isCommented)).toBe(false);
+ });
+
+ it('should return true if isCommented is true', () => {
+ const state = { diffs: { highlightedRow: 'xxx' } };
+ const line = { line_code: LINE_CODE };
+ const isCommented = true;
+ expect(utils.isHighlighted(state, line, isCommented)).toBe(true);
+ });
+});
+
+describe('isContextLine', () => {
+ it('return true if line type is context', () => {
+ expect(utils.isContextLine(CONTEXT_LINE_TYPE)).toBe(true);
+ });
+
+ it('return false if line type is not context', () => {
+ expect(utils.isContextLine('xxx')).toBe(false);
+ });
+});
+
+describe('isMatchLine', () => {
+ it('return true if line type is match', () => {
+ expect(utils.isMatchLine(MATCH_LINE_TYPE)).toBe(true);
+ });
+
+ it('return false if line type is not match', () => {
+ expect(utils.isMatchLine('xxx')).toBe(false);
+ });
+});
+
+describe('isMetaLine', () => {
+ it.each`
+ type | expectation
+ ${OLD_NO_NEW_LINE_TYPE} | ${true}
+ ${NEW_NO_NEW_LINE_TYPE} | ${true}
+ ${EMPTY_CELL_TYPE} | ${true}
+ ${'xxx'} | ${false}
+ `('should return $expectation if type is $type', ({ type, expectation }) => {
+ expect(utils.isMetaLine(type)).toBe(expectation);
+ });
+});
+
+describe('shouldRenderCommentButton', () => {
+ it('should return false if comment button is not rendered', () => {
+ expect(utils.shouldRenderCommentButton(true, false)).toBe(false);
+ });
+
+ it('should return false if not logged in', () => {
+ expect(utils.shouldRenderCommentButton(false, true)).toBe(false);
+ });
+
+ it('should return true logged in and rendered', () => {
+ expect(utils.shouldRenderCommentButton(true, true)).toBe(true);
+ });
+});
+
+describe('hasDiscussions', () => {
+ it('should return false if line is undefined', () => {
+ expect(utils.hasDiscussions()).toBe(false);
+ });
+
+ it('should return false if discussions is undefined', () => {
+ expect(utils.hasDiscussions({})).toBe(false);
+ });
+
+ it('should return false if discussions has legnth of 0', () => {
+ expect(utils.hasDiscussions({ discussions: [] })).toBe(false);
+ });
+
+ it('should return true if discussions has legnth > 0', () => {
+ expect(utils.hasDiscussions({ discussions: [1] })).toBe(true);
+ });
+});
+
+describe('lineHref', () => {
+ it(`should return #${LINE_CODE}`, () => {
+ expect(utils.lineHref({ line_code: LINE_CODE })).toEqual(`#${LINE_CODE}`);
+ });
+
+ it(`should return '#' if line is undefined`, () => {
+ expect(utils.lineHref()).toEqual('#');
+ });
+
+ it(`should return '#' if line_code is undefined`, () => {
+ expect(utils.lineHref({})).toEqual('#');
+ });
+});
+
+describe('lineCode', () => {
+ it(`should return undefined if line_code is undefined`, () => {
+ expect(utils.lineCode()).toEqual(undefined);
+ expect(utils.lineCode({ left: {} })).toEqual(undefined);
+ expect(utils.lineCode({ right: {} })).toEqual(undefined);
+ });
+
+ it(`should return ${LINE_CODE}`, () => {
+ expect(utils.lineCode({ line_code: LINE_CODE })).toEqual(LINE_CODE);
+ expect(utils.lineCode({ left: { line_code: LINE_CODE } })).toEqual(LINE_CODE);
+ expect(utils.lineCode({ right: { line_code: LINE_CODE } })).toEqual(LINE_CODE);
+ });
+});
+
+describe('classNameMapCell', () => {
+ it.each`
+ line | hll | loggedIn | hovered | expectation
+ ${undefined} | ${true} | ${true} | ${true} | ${[]}
+ ${{ type: 'new' }} | ${false} | ${false} | ${false} | ${['new', { hll: false, 'is-over': false }]}
+ ${{ type: 'new' }} | ${true} | ${true} | ${false} | ${['new', { hll: true, 'is-over': false }]}
+ ${{ type: 'new' }} | ${true} | ${false} | ${true} | ${['new', { hll: true, 'is-over': false }]}
+ ${{ type: 'new' }} | ${true} | ${true} | ${true} | ${['new', { hll: true, 'is-over': true }]}
+ `('should return $expectation', ({ line, hll, loggedIn, hovered, expectation }) => {
+ const classes = utils.classNameMapCell(line, hll, loggedIn, hovered);
+ expect(classes).toEqual(expectation);
+ });
+});
+
+describe('addCommentTooltip', () => {
+ const brokenSymLinkTooltip =
+ 'Commenting on symbolic links that replace or are replaced by files is currently not supported.';
+ const brokenRealTooltip =
+ 'Commenting on files that replace or are replaced by symbolic links is currently not supported.';
+ it('should return default tooltip', () => {
+ expect(utils.addCommentTooltip()).toBeUndefined();
+ });
+
+ it('should return broken symlink tooltip', () => {
+ expect(utils.addCommentTooltip({ commentsDisabled: { wasSymbolic: true } })).toEqual(
+ brokenSymLinkTooltip,
+ );
+ expect(utils.addCommentTooltip({ commentsDisabled: { isSymbolic: true } })).toEqual(
+ brokenSymLinkTooltip,
+ );
+ });
+
+ it('should return broken real tooltip', () => {
+ expect(utils.addCommentTooltip({ commentsDisabled: { wasReal: true } })).toEqual(
+ brokenRealTooltip,
+ );
+ expect(utils.addCommentTooltip({ commentsDisabled: { isReal: true } })).toEqual(
+ brokenRealTooltip,
+ );
+ });
+});
+
+describe('parallelViewLeftLineType', () => {
+ it(`should return ${OLD_NO_NEW_LINE_TYPE}`, () => {
+ expect(utils.parallelViewLeftLineType({ right: { type: NEW_NO_NEW_LINE_TYPE } })).toEqual(
+ OLD_NO_NEW_LINE_TYPE,
+ );
+ });
+
+ it(`should return 'new'`, () => {
+ expect(utils.parallelViewLeftLineType({ left: { type: 'new' } })).toContain('new');
+ });
+
+ it(`should return ${EMPTY_CELL_TYPE}`, () => {
+ expect(utils.parallelViewLeftLineType({})).toContain(EMPTY_CELL_TYPE);
+ });
+
+ it(`should return hll:true`, () => {
+ expect(utils.parallelViewLeftLineType({}, true)[1]).toEqual({ hll: true });
+ });
+});
+
+describe('shouldShowCommentButton', () => {
+ it.each`
+ hover | context | meta | discussions | expectation
+ ${true} | ${false} | ${false} | ${false} | ${true}
+ ${false} | ${false} | ${false} | ${false} | ${false}
+ ${true} | ${true} | ${false} | ${false} | ${false}
+ ${true} | ${true} | ${true} | ${false} | ${false}
+ ${true} | ${true} | ${true} | ${true} | ${false}
+ `(
+ 'should return $expectation when hover is $hover',
+ ({ hover, context, meta, discussions, expectation }) => {
+ expect(utils.shouldShowCommentButton(hover, context, meta, discussions)).toBe(expectation);
+ },
+ );
+});
diff --git a/spec/frontend/diffs/components/diff_table_cell_spec.js b/spec/frontend/diffs/components/diff_table_cell_spec.js
deleted file mode 100644
index 02f5c27eecb..00000000000
--- a/spec/frontend/diffs/components/diff_table_cell_spec.js
+++ /dev/null
@@ -1,279 +0,0 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
-import Vuex from 'vuex';
-import { TEST_HOST } from 'helpers/test_constants';
-import DiffTableCell from '~/diffs/components/diff_table_cell.vue';
-import DiffGutterAvatars from '~/diffs/components/diff_gutter_avatars.vue';
-import { LINE_POSITION_RIGHT } from '~/diffs/constants';
-import { createStore } from '~/mr_notes/stores';
-import discussionsMockData from '../mock_data/diff_discussions';
-import diffFileMockData from '../mock_data/diff_file';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-const TEST_USER_ID = 'abc123';
-const TEST_USER = { id: TEST_USER_ID };
-const TEST_LINE_NUMBER = 1;
-const TEST_LINE_CODE = 'LC_42';
-const TEST_FILE_HASH = diffFileMockData.file_hash;
-
-describe('DiffTableCell', () => {
- const symlinkishFileTooltip =
- 'Commenting on symbolic links that replace or are replaced by files is currently not supported.';
- const realishFileTooltip =
- 'Commenting on files that replace or are replaced by symbolic links is currently not supported.';
- const otherFileTooltip = 'Add a comment to this line';
-
- let wrapper;
- let line;
- let store;
-
- beforeEach(() => {
- store = createStore();
- store.state.notes.userData = TEST_USER;
-
- line = {
- line_code: TEST_LINE_CODE,
- type: 'new',
- old_line: null,
- new_line: 1,
- discussions: [{ ...discussionsMockData }],
- discussionsExpanded: true,
- text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
- rich_text: '+<span id="LC1" class="line" lang="plaintext"> - Bad dates</span>\n',
- meta_data: null,
- };
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- const setWindowLocation = value => {
- Object.defineProperty(window, 'location', {
- writable: true,
- value,
- });
- };
-
- const createComponent = (props = {}) => {
- wrapper = shallowMount(DiffTableCell, {
- localVue,
- store,
- propsData: {
- line,
- fileHash: TEST_FILE_HASH,
- contextLinesPath: '/context/lines/path',
- isHighlighted: false,
- ...props,
- },
- });
- };
-
- const findTd = () => wrapper.find({ ref: 'td' });
- const findNoteButton = () => wrapper.find({ ref: 'addDiffNoteButton' });
- const findLineNumber = () => wrapper.find({ ref: 'lineNumberRef' });
- const findTooltip = () => wrapper.find({ ref: 'addNoteTooltip' });
- const findAvatars = () => wrapper.find(DiffGutterAvatars);
-
- describe('td', () => {
- it('highlights when isHighlighted true', () => {
- createComponent({ isHighlighted: true });
-
- expect(findTd().classes()).toContain('hll');
- });
-
- it('does not highlight when isHighlighted false', () => {
- createComponent({ isHighlighted: false });
-
- expect(findTd().classes()).not.toContain('hll');
- });
- });
-
- describe('comment button', () => {
- it.each`
- showCommentButton | userData | query | mergeRefHeadComments | expectation
- ${true} | ${TEST_USER} | ${'diff_head=false'} | ${false} | ${true}
- ${true} | ${TEST_USER} | ${'diff_head=true'} | ${true} | ${true}
- ${true} | ${TEST_USER} | ${'diff_head=true'} | ${false} | ${false}
- ${false} | ${TEST_USER} | ${'diff_head=true'} | ${true} | ${false}
- ${false} | ${TEST_USER} | ${'bogus'} | ${true} | ${false}
- ${true} | ${null} | ${''} | ${true} | ${false}
- `(
- 'exists is $expectation - with showCommentButton ($showCommentButton) userData ($userData) query ($query)',
- ({ showCommentButton, userData, query, mergeRefHeadComments, expectation }) => {
- store.state.notes.userData = userData;
- gon.features = { mergeRefHeadComments };
- setWindowLocation({ href: `${TEST_HOST}?${query}` });
- createComponent({ showCommentButton });
-
- wrapper.setData({ isCommentButtonRendered: showCommentButton });
-
- return wrapper.vm.$nextTick().then(() => {
- expect(findNoteButton().exists()).toBe(expectation);
- });
- },
- );
-
- it.each`
- isHover | otherProps | discussions | expectation
- ${true} | ${{}} | ${[]} | ${true}
- ${false} | ${{}} | ${[]} | ${false}
- ${true} | ${{ line: { ...line, type: 'context' } }} | ${[]} | ${false}
- ${true} | ${{ line: { ...line, type: 'old-nonewline' } }} | ${[]} | ${false}
- ${true} | ${{}} | ${[{}]} | ${false}
- `(
- 'visible is $expectation - with isHover ($isHover), discussions ($discussions), otherProps ($otherProps)',
- ({ isHover, otherProps, discussions, expectation }) => {
- line.discussions = discussions;
- createComponent({
- showCommentButton: true,
- isHover,
- ...otherProps,
- });
-
- wrapper.setData({
- isCommentButtonRendered: true,
- });
-
- return wrapper.vm.$nextTick().then(() => {
- expect(findNoteButton().isVisible()).toBe(expectation);
- });
- },
- );
-
- it.each`
- disabled | commentsDisabled
- ${'disabled'} | ${true}
- ${undefined} | ${false}
- `(
- 'has attribute disabled=$disabled when the outer component has prop commentsDisabled=$commentsDisabled',
- ({ disabled, commentsDisabled }) => {
- line.commentsDisabled = commentsDisabled;
-
- createComponent({
- showCommentButton: true,
- isHover: true,
- });
-
- wrapper.setData({ isCommentButtonRendered: true });
-
- return wrapper.vm.$nextTick().then(() => {
- expect(findNoteButton().attributes('disabled')).toBe(disabled);
- });
- },
- );
-
- it.each`
- tooltip | commentsDisabled
- ${symlinkishFileTooltip} | ${{ wasSymbolic: true }}
- ${symlinkishFileTooltip} | ${{ isSymbolic: true }}
- ${realishFileTooltip} | ${{ wasReal: true }}
- ${realishFileTooltip} | ${{ isReal: true }}
- ${otherFileTooltip} | ${false}
- `(
- 'has the correct tooltip when commentsDisabled=$commentsDisabled',
- ({ tooltip, commentsDisabled }) => {
- line.commentsDisabled = commentsDisabled;
-
- createComponent({
- showCommentButton: true,
- isHover: true,
- });
-
- wrapper.setData({ isCommentButtonRendered: true });
-
- return wrapper.vm.$nextTick().then(() => {
- expect(findTooltip().attributes('title')).toBe(tooltip);
- });
- },
- );
- });
-
- describe('line number', () => {
- describe('without lineNumber prop', () => {
- it('does not render', () => {
- createComponent({ lineType: 'old' });
-
- expect(findLineNumber().exists()).toBe(false);
- });
- });
-
- describe('with lineNumber prop', () => {
- describe.each`
- lineProps | expectedHref | expectedClickArg
- ${{ line_code: TEST_LINE_CODE }} | ${`#${TEST_LINE_CODE}`} | ${TEST_LINE_CODE}
- ${{ line_code: undefined }} | ${'#'} | ${undefined}
- ${{ line_code: undefined, left: { line_code: TEST_LINE_CODE } }} | ${'#'} | ${TEST_LINE_CODE}
- ${{ line_code: undefined, right: { line_code: TEST_LINE_CODE } }} | ${'#'} | ${TEST_LINE_CODE}
- `('with line ($lineProps)', ({ lineProps, expectedHref, expectedClickArg }) => {
- beforeEach(() => {
- jest.spyOn(store, 'dispatch').mockImplementation();
- Object.assign(line, lineProps);
- createComponent({ lineNumber: TEST_LINE_NUMBER });
- });
-
- it('renders', () => {
- expect(findLineNumber().exists()).toBe(true);
- expect(findLineNumber().attributes()).toEqual({
- href: expectedHref,
- 'data-linenumber': TEST_LINE_NUMBER.toString(),
- });
- });
-
- it('on click, dispatches setHighlightedRow', () => {
- expect(store.dispatch).not.toHaveBeenCalled();
-
- findLineNumber().trigger('click');
-
- expect(store.dispatch).toHaveBeenCalledWith('diffs/setHighlightedRow', expectedClickArg);
- });
- });
- });
- });
-
- describe('diff-gutter-avatars', () => {
- describe('with showCommentButton', () => {
- beforeEach(() => {
- jest.spyOn(store, 'dispatch').mockImplementation();
-
- createComponent({ showCommentButton: true });
- });
-
- it('renders', () => {
- expect(findAvatars().props()).toEqual({
- discussions: line.discussions,
- discussionsExpanded: line.discussionsExpanded,
- });
- });
-
- it('toggles line discussion', () => {
- expect(store.dispatch).not.toHaveBeenCalled();
-
- findAvatars().vm.$emit('toggleLineDiscussions');
-
- expect(store.dispatch).toHaveBeenCalledWith('diffs/toggleLineDiscussions', {
- lineCode: TEST_LINE_CODE,
- fileHash: TEST_FILE_HASH,
- expanded: !line.discussionsExpanded,
- });
- });
- });
-
- it.each`
- props | lineProps | expectation
- ${{ showCommentButton: true }} | ${{}} | ${true}
- ${{ showCommentButton: false }} | ${{}} | ${false}
- ${{ showCommentButton: true, linePosition: LINE_POSITION_RIGHT }} | ${{ type: null }} | ${false}
- ${{ showCommentButton: true }} | ${{ discussions: [] }} | ${false}
- `(
- 'exists is $expectation - with props ($props), line ($lineProps)',
- ({ props, lineProps, expectation }) => {
- Object.assign(line, lineProps);
- createComponent(props);
-
- expect(findAvatars().exists()).toBe(expectation);
- },
- );
- });
-});
diff --git a/spec/frontend/diffs/components/edit_button_spec.js b/spec/frontend/diffs/components/edit_button_spec.js
deleted file mode 100644
index 71512c1c4af..00000000000
--- a/spec/frontend/diffs/components/edit_button_spec.js
+++ /dev/null
@@ -1,75 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { GlDeprecatedButton } from '@gitlab/ui';
-import EditButton from '~/diffs/components/edit_button.vue';
-
-const editPath = 'test-path';
-
-describe('EditButton', () => {
- let wrapper;
-
- const createComponent = (props = {}) => {
- wrapper = shallowMount(EditButton, {
- propsData: { ...props },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('has correct href attribute', () => {
- createComponent({
- editPath,
- canCurrentUserFork: false,
- });
-
- expect(wrapper.find(GlDeprecatedButton).attributes('href')).toBe(editPath);
- });
-
- it('emits a show fork message event if current user can fork', () => {
- createComponent({
- editPath,
- canCurrentUserFork: true,
- });
- wrapper.find(GlDeprecatedButton).trigger('click');
-
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.emitted('showForkMessage')).toBeTruthy();
- });
- });
-
- it('doesnt emit a show fork message event if current user cannot fork', () => {
- createComponent({
- editPath,
- canCurrentUserFork: false,
- });
- wrapper.find(GlDeprecatedButton).trigger('click');
-
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.emitted('showForkMessage')).toBeFalsy();
- });
- });
-
- it('doesnt emit a show fork message event if current user can modify blob', () => {
- createComponent({
- editPath,
- canCurrentUserFork: true,
- canModifyBlob: true,
- });
- wrapper.find(GlDeprecatedButton).trigger('click');
-
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.emitted('showForkMessage')).toBeFalsy();
- });
- });
-
- it('disables button if editPath is empty', () => {
- createComponent({
- editPath: '',
- canCurrentUserFork: true,
- canModifyBlob: true,
- });
-
- expect(wrapper.find(GlDeprecatedButton).attributes('disabled')).toBe('true');
- });
-});
diff --git a/spec/frontend/diffs/mock_data/diff_file.js b/spec/frontend/diffs/mock_data/diff_file.js
index c2a4424ee95..babaaa21dab 100644
--- a/spec/frontend/diffs/mock_data/diff_file.js
+++ b/spec/frontend/diffs/mock_data/diff_file.js
@@ -27,7 +27,7 @@ export default {
viewer: {
name: 'text',
error: null,
- collapsed: false,
+ automaticallyCollapsed: false,
},
added_lines: 2,
removed_lines: 0,
diff --git a/spec/frontend/diffs/mock_data/diff_file_unreadable.js b/spec/frontend/diffs/mock_data/diff_file_unreadable.js
index 8c2df45988e..fca81faabf6 100644
--- a/spec/frontend/diffs/mock_data/diff_file_unreadable.js
+++ b/spec/frontend/diffs/mock_data/diff_file_unreadable.js
@@ -26,7 +26,7 @@ export default {
viewer: {
name: 'text',
error: null,
- collapsed: false,
+ automaticallyCollapsed: false,
},
added_lines: 0,
removed_lines: 0,
diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js
index 4f647b0cd41..c3e4ee9c531 100644
--- a/spec/frontend/diffs/store/actions_spec.js
+++ b/spec/frontend/diffs/store/actions_spec.js
@@ -483,14 +483,14 @@ describe('DiffsStoreActions', () => {
id: 1,
renderIt: false,
viewer: {
- collapsed: false,
+ automaticallyCollapsed: false,
},
},
{
id: 2,
renderIt: false,
viewer: {
- collapsed: false,
+ automaticallyCollapsed: false,
},
},
],
@@ -967,7 +967,7 @@ describe('DiffsStoreActions', () => {
{
file_hash: 'HASH',
viewer: {
- collapsed,
+ automaticallyCollapsed: collapsed,
},
renderIt,
},
@@ -1167,7 +1167,7 @@ describe('DiffsStoreActions', () => {
file_hash: 'testhash',
alternate_viewer: { name: updatedViewerName },
};
- const updatedViewer = { name: updatedViewerName, collapsed: false };
+ const updatedViewer = { name: updatedViewerName, automaticallyCollapsed: false };
const testData = [{ rich_text: 'test' }, { rich_text: 'file2' }];
let renamedFile;
let mock;
diff --git a/spec/frontend/diffs/store/getters_spec.js b/spec/frontend/diffs/store/getters_spec.js
index dac5be2d656..0083f1d8b44 100644
--- a/spec/frontend/diffs/store/getters_spec.js
+++ b/spec/frontend/diffs/store/getters_spec.js
@@ -51,13 +51,19 @@ describe('Diffs Module Getters', () => {
describe('hasCollapsedFile', () => {
it('returns true when all files are collapsed', () => {
- localState.diffFiles = [{ viewer: { collapsed: true } }, { viewer: { collapsed: true } }];
+ localState.diffFiles = [
+ { viewer: { automaticallyCollapsed: true } },
+ { viewer: { automaticallyCollapsed: true } },
+ ];
expect(getters.hasCollapsedFile(localState)).toEqual(true);
});
it('returns true when at least one file is collapsed', () => {
- localState.diffFiles = [{ viewer: { collapsed: false } }, { viewer: { collapsed: true } }];
+ localState.diffFiles = [
+ { viewer: { automaticallyCollapsed: false } },
+ { viewer: { automaticallyCollapsed: true } },
+ ];
expect(getters.hasCollapsedFile(localState)).toEqual(true);
});
@@ -139,50 +145,74 @@ describe('Diffs Module Getters', () => {
describe('diffHasExpandedDiscussions', () => {
it('returns true when one of the discussions is expanded', () => {
- discussionMock1.expanded = false;
+ const diffFile = {
+ parallel_diff_lines: [],
+ highlighted_diff_lines: [
+ {
+ discussions: [discussionMock, discussionMock],
+ discussionsExpanded: true,
+ },
+ ],
+ };
- expect(
- getters.diffHasExpandedDiscussions(localState, {
- getDiffFileDiscussions: () => [discussionMock, discussionMock],
- })(diffFileMock),
- ).toEqual(true);
+ expect(getters.diffHasExpandedDiscussions(localState)(diffFile)).toEqual(true);
});
it('returns false when there are no discussions', () => {
- expect(
- getters.diffHasExpandedDiscussions(localState, { getDiffFileDiscussions: () => [] })(
- diffFileMock,
- ),
- ).toEqual(false);
+ const diffFile = {
+ parallel_diff_lines: [],
+ highlighted_diff_lines: [
+ {
+ discussions: [],
+ discussionsExpanded: true,
+ },
+ ],
+ };
+ expect(getters.diffHasExpandedDiscussions(localState)(diffFile)).toEqual(false);
});
it('returns false when no discussion is expanded', () => {
- discussionMock.expanded = false;
- discussionMock1.expanded = false;
+ const diffFile = {
+ parallel_diff_lines: [],
+ highlighted_diff_lines: [
+ {
+ discussions: [discussionMock, discussionMock],
+ discussionsExpanded: false,
+ },
+ ],
+ };
- expect(
- getters.diffHasExpandedDiscussions(localState, {
- getDiffFileDiscussions: () => [discussionMock, discussionMock1],
- })(diffFileMock),
- ).toEqual(false);
+ expect(getters.diffHasExpandedDiscussions(localState)(diffFile)).toEqual(false);
});
});
describe('diffHasDiscussions', () => {
it('returns true when getDiffFileDiscussions returns discussions', () => {
- expect(
- getters.diffHasDiscussions(localState, {
- getDiffFileDiscussions: () => [discussionMock],
- })(diffFileMock),
- ).toEqual(true);
+ const diffFile = {
+ parallel_diff_lines: [],
+ highlighted_diff_lines: [
+ {
+ discussions: [discussionMock, discussionMock],
+ discussionsExpanded: false,
+ },
+ ],
+ };
+
+ expect(getters.diffHasDiscussions(localState)(diffFile)).toEqual(true);
});
it('returns false when getDiffFileDiscussions returns no discussions', () => {
- expect(
- getters.diffHasDiscussions(localState, {
- getDiffFileDiscussions: () => [],
- })(diffFileMock),
- ).toEqual(false);
+ const diffFile = {
+ parallel_diff_lines: [],
+ highlighted_diff_lines: [
+ {
+ discussions: [],
+ discussionsExpanded: false,
+ },
+ ],
+ };
+
+ expect(getters.diffHasDiscussions(localState)(diffFile)).toEqual(false);
});
});
diff --git a/spec/frontend/diffs/store/mutations_spec.js b/spec/frontend/diffs/store/mutations_spec.js
index e1d855ae0cf..a84ad63c695 100644
--- a/spec/frontend/diffs/store/mutations_spec.js
+++ b/spec/frontend/diffs/store/mutations_spec.js
@@ -130,14 +130,14 @@ describe('DiffsStoreMutations', () => {
it('should change the collapsed prop from diffFiles', () => {
const diffFile = {
viewer: {
- collapsed: true,
+ automaticallyCollapsed: true,
},
};
const state = { expandAllFiles: true, diffFiles: [diffFile] };
mutations[types.EXPAND_ALL_FILES](state);
- expect(state.diffFiles[0].viewer.collapsed).toEqual(false);
+ expect(state.diffFiles[0].viewer.automaticallyCollapsed).toEqual(false);
});
});
@@ -933,12 +933,12 @@ describe('DiffsStoreMutations', () => {
describe('SET_FILE_COLLAPSED', () => {
it('sets collapsed', () => {
const state = {
- diffFiles: [{ file_path: 'test', viewer: { collapsed: false } }],
+ diffFiles: [{ file_path: 'test', viewer: { automaticallyCollapsed: false } }],
};
mutations[types.SET_FILE_COLLAPSED](state, { filePath: 'test', collapsed: true });
- expect(state.diffFiles[0].viewer.collapsed).toBe(true);
+ expect(state.diffFiles[0].viewer.automaticallyCollapsed).toBe(true);
});
});
diff --git a/spec/frontend/emoji/emoji_spec.js b/spec/frontend/emoji/emoji_spec.js
index 53c6d0835bc..2f174c45ad7 100644
--- a/spec/frontend/emoji/emoji_spec.js
+++ b/spec/frontend/emoji/emoji_spec.js
@@ -1,7 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import { trimText } from 'helpers/text_helper';
import axios from '~/lib/utils/axios_utils';
-import { initEmojiMap, glEmojiTag, EMOJI_VERSION } from '~/emoji';
+import { initEmojiMap, glEmojiTag, searchEmoji, EMOJI_VERSION } from '~/emoji';
import isEmojiUnicodeSupported, {
isFlagEmoji,
isRainbowFlagEmoji,
@@ -31,25 +31,35 @@ const emptySupportMap = {
};
const emojiFixtureMap = {
+ atom: {
+ name: 'atom',
+ moji: '⚛',
+ description: 'atom symbol',
+ unicodeVersion: '4.1',
+ },
bomb: {
name: 'bomb',
moji: '💣',
unicodeVersion: '6.0',
+ description: 'bomb',
},
construction_worker_tone5: {
name: 'construction_worker_tone5',
moji: '👷🏿',
unicodeVersion: '8.0',
+ description: 'construction worker tone 5',
},
five: {
name: 'five',
moji: '5️⃣',
unicodeVersion: '3.0',
+ description: 'keycap digit five',
},
grey_question: {
name: 'grey_question',
moji: '❔',
unicodeVersion: '6.0',
+ description: 'white question mark ornament',
},
};
@@ -57,8 +67,15 @@ describe('gl_emoji', () => {
let mock;
beforeEach(() => {
+ const emojiData = Object.fromEntries(
+ Object.values(emojiFixtureMap).map(m => {
+ const { name: n, moji: e, unicodeVersion: u, category: c, description: d } = m;
+ return [n, { c, e, d, u }];
+ }),
+ );
+
mock = new MockAdapter(axios);
- mock.onGet(`/-/emojis/${EMOJI_VERSION}/emojis.json`).reply(200);
+ mock.onGet(`/-/emojis/${EMOJI_VERSION}/emojis.json`).reply(200, JSON.stringify(emojiData));
return initEmojiMap().catch(() => {});
});
@@ -378,4 +395,24 @@ describe('gl_emoji', () => {
expect(isSupported).toBeFalsy();
});
});
+
+ describe('searchEmoji', () => {
+ const { atom, grey_question } = emojiFixtureMap;
+ const contains = (e, term) =>
+ expect(searchEmoji(term).map(({ name }) => name)).toContain(e.name);
+
+ it('should match by full name', () => contains(grey_question, 'grey_question'));
+ it('should match by full alias', () => contains(atom, 'atom_symbol'));
+ it('should match by full description', () => contains(grey_question, 'ornament'));
+
+ it('should match by partial name', () => contains(grey_question, 'question'));
+ it('should match by partial alias', () => contains(atom, '_symbol'));
+ it('should match by partial description', () => contains(grey_question, 'ment'));
+
+ it('should fuzzy match by name', () => contains(grey_question, 'greion'));
+ it('should fuzzy match by alias', () => contains(atom, 'atobol'));
+ it('should fuzzy match by description', () => contains(grey_question, 'ornt'));
+
+ it('should match by character', () => contains(grey_question, '❔'));
+ });
});
diff --git a/spec/frontend/environment.js b/spec/frontend/environment.js
index 35ca323f5a9..c958fb7ce03 100644
--- a/spec/frontend/environment.js
+++ b/spec/frontend/environment.js
@@ -1,4 +1,4 @@
-/* eslint-disable import/no-commonjs */
+/* eslint-disable import/no-commonjs, max-classes-per-file */
const path = require('path');
const { ErrorWithStack } = require('jest-util');
@@ -58,6 +58,14 @@ class CustomEnvironment extends JSDOMEnvironment {
measure: () => null,
getEntriesByName: () => [],
});
+
+ this.global.PerformanceObserver = class {
+ /* eslint-disable no-useless-constructor, no-unused-vars, no-empty-function, class-methods-use-this */
+ constructor(callback) {}
+ disconnect() {}
+ observe(element, initObject) {}
+ /* eslint-enable no-useless-constructor, no-unused-vars, no-empty-function, class-methods-use-this */
+ };
}
async teardown() {
diff --git a/spec/frontend/environments/environments_app_spec.js b/spec/frontend/environments/environments_app_spec.js
index fe32bf918dd..22b066fae41 100644
--- a/spec/frontend/environments/environments_app_spec.js
+++ b/spec/frontend/environments/environments_app_spec.js
@@ -40,6 +40,9 @@ describe('Environment', () => {
return axios.waitForAll();
};
+ const findEnvironmentsTabAvailable = () => wrapper.find('.js-environments-tab-available > a');
+ const findEnvironmentsTabStopped = () => wrapper.find('.js-environments-tab-stopped > a');
+
beforeEach(() => {
mock = new MockAdapter(axios);
});
@@ -108,9 +111,16 @@ describe('Environment', () => {
it('should make an API request when using tabs', () => {
jest.spyOn(wrapper.vm, 'updateContent').mockImplementation(() => {});
- wrapper.find('.js-environments-tab-stopped').trigger('click');
+ findEnvironmentsTabStopped().trigger('click');
expect(wrapper.vm.updateContent).toHaveBeenCalledWith({ scope: 'stopped', page: '1' });
});
+
+ it('should not make the same API request when clicking on the current scope tab', () => {
+ // component starts at available
+ jest.spyOn(wrapper.vm, 'updateContent').mockImplementation(() => {});
+ findEnvironmentsTabAvailable().trigger('click');
+ expect(wrapper.vm.updateContent).toHaveBeenCalledTimes(0);
+ });
});
});
});
diff --git a/spec/frontend/environments/folder/environments_folder_view_spec.js b/spec/frontend/environments/folder/environments_folder_view_spec.js
index f33c8de0094..14c710dd7ba 100644
--- a/spec/frontend/environments/folder/environments_folder_view_spec.js
+++ b/spec/frontend/environments/folder/environments_folder_view_spec.js
@@ -46,9 +46,10 @@ describe('Environments Folder View', () => {
wrapper = mount(EnvironmentsFolderViewComponent, { propsData: mockData });
};
- const findEnvironmentsTabAvailable = () => wrapper.find('.js-environments-tab-available');
+ const findEnvironmentsTabAvailable = () =>
+ wrapper.find('[data-testid="environments-tab-available"]');
- const findEnvironmentsTabStopped = () => wrapper.find('.js-environments-tab-stopped');
+ const findEnvironmentsTabStopped = () => wrapper.find('[data-testid="environments-tab-stopped"]');
beforeEach(() => {
mock = new MockAdapter(axios);
diff --git a/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js b/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
index 21edcb7235a..f4a765a3d73 100644
--- a/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
+++ b/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
@@ -1,7 +1,6 @@
import Vuex from 'vuex';
import { createLocalVue, shallowMount } from '@vue/test-utils';
-import { GlFormInput } from '@gitlab/ui';
-import LoadingButton from '~/vue_shared/components/loading_button.vue';
+import { GlFormInput, GlButton } from '@gitlab/ui';
import ErrorTrackingForm from '~/error_tracking_settings/components/error_tracking_form.vue';
import createStore from '~/error_tracking_settings/store';
import { defaultProps } from '../mock';
@@ -43,7 +42,7 @@ describe('error tracking settings form', () => {
.attributes('id'),
).toBe('error-tracking-token');
- expect(wrapper.findAll(LoadingButton).exists()).toBe(true);
+ expect(wrapper.findAll(GlButton).exists()).toBe(true);
});
it('is rendered with labels and placeholders', () => {
@@ -72,9 +71,10 @@ describe('error tracking settings form', () => {
});
it('shows loading spinner', () => {
- const { label, loading } = wrapper.find(LoadingButton).props();
- expect(loading).toBe(true);
- expect(label).toBe('Connecting');
+ const buttonEl = wrapper.find(GlButton);
+
+ expect(buttonEl.props('loading')).toBe(true);
+ expect(buttonEl.text()).toBe('Connecting');
});
});
diff --git a/spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js b/spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js
new file mode 100644
index 00000000000..47f786827f1
--- /dev/null
+++ b/spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js
@@ -0,0 +1,159 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlModal, GlSprintf } from '@gitlab/ui';
+import Component from '~/feature_flags/components/configure_feature_flags_modal.vue';
+import Callout from '~/vue_shared/components/callout.vue';
+
+describe('Configure Feature Flags Modal', () => {
+ const mockEvent = { preventDefault: jest.fn() };
+ const provide = {
+ projectName: 'fakeProjectName',
+ featureFlagsHelpPagePath: '/help/path',
+ };
+
+ const propsData = {
+ helpClientLibrariesPath: '/help/path/#flags',
+ helpClientExamplePath: '/feature-flags#clientexample',
+ apiUrl: '/api/url',
+ instanceId: 'instance-id-token',
+ isRotating: false,
+ hasRotateError: false,
+ canUserRotateToken: true,
+ };
+
+ let wrapper;
+ const factory = (props = {}, { mountFn = shallowMount, ...options } = {}) => {
+ wrapper = mountFn(Component, {
+ provide,
+ stubs: { GlSprintf },
+ propsData: {
+ ...propsData,
+ ...props,
+ },
+ ...options,
+ });
+ };
+
+ const findGlModal = () => wrapper.find(GlModal);
+ const findPrimaryAction = () => findGlModal().props('actionPrimary');
+ const findProjectNameInput = () => wrapper.find('#project_name_verification');
+ const findDangerCallout = () =>
+ wrapper.findAll(Callout).filter(c => c.props('category') === 'danger');
+
+ describe('idle', () => {
+ afterEach(() => wrapper.destroy());
+ beforeEach(factory);
+
+ it('should have Primary and Cancel actions', () => {
+ expect(findGlModal().props('actionCancel').text).toBe('Close');
+ expect(findPrimaryAction().text).toBe('Regenerate instance ID');
+ });
+
+ it('should default disable the primary action', async () => {
+ const [{ disabled }] = findPrimaryAction().attributes;
+ expect(disabled).toBe(true);
+ });
+
+ it('should emit a `token` event when clicking on the Primary action', async () => {
+ findGlModal().vm.$emit('primary', mockEvent);
+ await wrapper.vm.$nextTick();
+ expect(wrapper.emitted('token')).toEqual([[]]);
+ expect(mockEvent.preventDefault).toHaveBeenCalled();
+ });
+
+ it('should clear the project name input after generating the token', async () => {
+ findProjectNameInput().vm.$emit('input', provide.projectName);
+ findGlModal().vm.$emit('primary', mockEvent);
+ await wrapper.vm.$nextTick();
+ expect(findProjectNameInput().attributes('value')).toBe('');
+ });
+
+ it('should provide an input for filling the project name', () => {
+ expect(findProjectNameInput().exists()).toBe(true);
+ expect(findProjectNameInput().attributes('value')).toBe('');
+ });
+
+ it('should display an help text', () => {
+ const help = wrapper.find('p');
+ expect(help.text()).toMatch(/More Information/);
+ });
+
+ it('should have links to the documentation', () => {
+ expect(wrapper.find('[data-testid="help-link"]').attributes('href')).toBe(
+ provide.featureFlagsHelpPagePath,
+ );
+ expect(wrapper.find('[data-testid="help-client-link"]').attributes('href')).toBe(
+ propsData.helpClientLibrariesPath,
+ );
+ });
+
+ it('should display one and only one danger callout', () => {
+ const dangerCallout = findDangerCallout();
+ expect(dangerCallout.length).toBe(1);
+ expect(dangerCallout.at(0).props('message')).toMatch(/Regenerating the instance ID/);
+ });
+
+ it('should display a message asking to fill the project name', () => {
+ expect(wrapper.find('[data-testid="prevent-accident-text"]').text()).toMatch(
+ provide.projectName,
+ );
+ });
+
+ it('should display the api URL in an input box', () => {
+ const input = wrapper.find('#api_url');
+ expect(input.element.value).toBe('/api/url');
+ });
+
+ it('should display the instance ID in an input box', () => {
+ const input = wrapper.find('#instance_id');
+ expect(input.element.value).toBe('instance-id-token');
+ });
+ });
+
+ describe('verified', () => {
+ afterEach(() => wrapper.destroy());
+ beforeEach(factory);
+
+ it('should enable the primary action', async () => {
+ findProjectNameInput().vm.$emit('input', provide.projectName);
+ await wrapper.vm.$nextTick();
+ const [{ disabled }] = findPrimaryAction().attributes;
+ expect(disabled).toBe(false);
+ });
+ });
+
+ describe('cannot rotate token', () => {
+ afterEach(() => wrapper.destroy());
+ beforeEach(factory.bind(null, { canUserRotateToken: false }));
+
+ it('should not display the primary action', async () => {
+ expect(findPrimaryAction()).toBe(null);
+ });
+
+ it('shold not display regenerating instance ID', async () => {
+ expect(findDangerCallout().exists()).toBe(false);
+ });
+
+ it('should disable the project name input', async () => {
+ expect(findProjectNameInput().exists()).toBe(false);
+ });
+ });
+
+ describe('has rotate error', () => {
+ afterEach(() => wrapper.destroy());
+ beforeEach(factory.bind(null, { hasRotateError: false }));
+
+ it('should display an error', async () => {
+ expect(wrapper.find('.text-danger')).toExist();
+ expect(wrapper.find('[name="warning"]')).toExist();
+ });
+ });
+
+ describe('is rotating', () => {
+ afterEach(() => wrapper.destroy());
+ beforeEach(factory.bind(null, { isRotating: true }));
+
+ it('should disable the project name input', async () => {
+ expect(findProjectNameInput().attributes('disabled')).toBeTruthy();
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/edit_feature_flag_spec.js b/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
new file mode 100644
index 00000000000..f2e587bb8d9
--- /dev/null
+++ b/spec/frontend/feature_flags/components/edit_feature_flag_spec.js
@@ -0,0 +1,197 @@
+import Vuex from 'vuex';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import { GlToggle, GlAlert } from '@gitlab/ui';
+import { TEST_HOST } from 'spec/test_constants';
+import { mockTracking } from 'helpers/tracking_helper';
+import { LEGACY_FLAG, NEW_VERSION_FLAG, NEW_FLAG_ALERT } from '~/feature_flags/constants';
+import Form from '~/feature_flags/components/form.vue';
+import editModule from '~/feature_flags/store/modules/edit';
+import EditFeatureFlag from '~/feature_flags/components/edit_feature_flag.vue';
+import axios from '~/lib/utils/axios_utils';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+const userCalloutId = 'feature_flags_new_version';
+const userCalloutsPath = `${TEST_HOST}/user_callouts`;
+
+describe('Edit feature flag form', () => {
+ let wrapper;
+ let mock;
+
+ const store = new Vuex.Store({
+ modules: {
+ edit: editModule,
+ },
+ });
+
+ const factory = (opts = {}) => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ wrapper = shallowMount(EditFeatureFlag, {
+ localVue,
+ propsData: {
+ endpoint: `${TEST_HOST}/feature_flags.json`,
+ path: '/feature_flags',
+ environmentsEndpoint: 'environments.json',
+ projectId: '8',
+ featureFlagIssuesEndpoint: `${TEST_HOST}/feature_flags/5/issues`,
+ showUserCallout: true,
+ userCalloutId,
+ userCalloutsPath,
+ },
+ store,
+ provide: {
+ glFeatures: {
+ featureFlagsNewVersion: true,
+ },
+ },
+ ...opts,
+ });
+ };
+
+ beforeEach(done => {
+ mock = new MockAdapter(axios);
+ mock.onGet(`${TEST_HOST}/feature_flags.json`).replyOnce(200, {
+ id: 21,
+ iid: 5,
+ active: true,
+ created_at: '2019-01-17T17:27:39.778Z',
+ updated_at: '2019-01-17T17:27:39.778Z',
+ name: 'feature_flag',
+ description: '',
+ version: LEGACY_FLAG,
+ edit_path: '/h5bp/html5-boilerplate/-/feature_flags/21/edit',
+ destroy_path: '/h5bp/html5-boilerplate/-/feature_flags/21',
+ scopes: [
+ {
+ id: 21,
+ active: false,
+ environment_scope: '*',
+ created_at: '2019-01-17T17:27:39.778Z',
+ updated_at: '2019-01-17T17:27:39.778Z',
+ },
+ ],
+ });
+ factory();
+ setImmediate(() => done());
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ mock.restore();
+ });
+
+ const findAlert = () => wrapper.find(GlAlert);
+
+ it('should display the iid', () => {
+ expect(wrapper.find('h3').text()).toContain('^5');
+ });
+
+ it('should render the toggle', () => {
+ expect(wrapper.find(GlToggle).exists()).toBe(true);
+ });
+
+ it('should set the value of the toggle to whether or not the flag is active', () => {
+ expect(wrapper.find(GlToggle).props('value')).toBe(true);
+ });
+
+ it('should not alert users that feature flags are changing soon', () => {
+ expect(findAlert().text()).toContain('GitLab is moving to a new way of managing feature flags');
+ });
+
+ describe('with error', () => {
+ it('should render the error', () => {
+ store.dispatch('edit/receiveUpdateFeatureFlagError', { message: ['The name is required'] });
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find('.alert-danger').exists()).toEqual(true);
+ expect(wrapper.find('.alert-danger').text()).toContain('The name is required');
+ });
+ });
+ });
+
+ describe('without error', () => {
+ it('renders form title', () => {
+ expect(wrapper.text()).toContain('^5 feature_flag');
+ });
+
+ it('should render feature flag form', () => {
+ expect(wrapper.find(Form).exists()).toEqual(true);
+ });
+
+ it('should set the version of the form from the feature flag', () => {
+ expect(wrapper.find(Form).props('version')).toBe(LEGACY_FLAG);
+
+ mock.resetHandlers();
+
+ mock.onGet(`${TEST_HOST}/feature_flags.json`).replyOnce(200, {
+ id: 21,
+ iid: 5,
+ active: true,
+ created_at: '2019-01-17T17:27:39.778Z',
+ updated_at: '2019-01-17T17:27:39.778Z',
+ name: 'feature_flag',
+ description: '',
+ version: NEW_VERSION_FLAG,
+ edit_path: '/h5bp/html5-boilerplate/-/feature_flags/21/edit',
+ destroy_path: '/h5bp/html5-boilerplate/-/feature_flags/21',
+ strategies: [],
+ });
+
+ factory();
+
+ return axios.waitForAll().then(() => {
+ expect(wrapper.find(Form).props('version')).toBe(NEW_VERSION_FLAG);
+ });
+ });
+
+ it('renders the related issues widget', () => {
+ const expected = `${TEST_HOST}/feature_flags/5/issues`;
+
+ expect(wrapper.find(Form).props('featureFlagIssuesEndpoint')).toBe(expected);
+ });
+
+ it('should track when the toggle is clicked', () => {
+ const toggle = wrapper.find(GlToggle);
+ const spy = mockTracking('_category_', toggle.element, jest.spyOn);
+
+ toggle.trigger('click');
+
+ expect(spy).toHaveBeenCalledWith('_category_', 'click_button', {
+ label: 'feature_flag_toggle',
+ });
+ });
+ });
+
+ describe('without new version flags', () => {
+ beforeEach(() => factory({ provide: { glFeatures: { featureFlagsNewVersion: false } } }));
+
+ it('should alert users that feature flags are changing soon', () => {
+ expect(findAlert().text()).toBe(NEW_FLAG_ALERT);
+ });
+ });
+
+ describe('dismissing new version alert', () => {
+ beforeEach(() => {
+ factory({ provide: { glFeatures: { featureFlagsNewVersion: false } } });
+ mock.onPost(userCalloutsPath, { feature_name: userCalloutId }).reply(200);
+ findAlert().vm.$emit('dismiss');
+ return wrapper.vm.$nextTick();
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('should hide the alert', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('should send the dismissal event', () => {
+ expect(mock.history.post.length).toBe(1);
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/environments_dropdown_spec.js b/spec/frontend/feature_flags/components/environments_dropdown_spec.js
new file mode 100644
index 00000000000..2aa75ef6652
--- /dev/null
+++ b/spec/frontend/feature_flags/components/environments_dropdown_spec.js
@@ -0,0 +1,145 @@
+import MockAdapter from 'axios-mock-adapter';
+import { shallowMount } from '@vue/test-utils';
+import { GlLoadingIcon, GlDeprecatedButton, GlSearchBoxByType } from '@gitlab/ui';
+import { TEST_HOST } from 'spec/test_constants';
+import waitForPromises from 'helpers/wait_for_promises';
+import EnvironmentsDropdown from '~/feature_flags/components/environments_dropdown.vue';
+import axios from '~/lib/utils/axios_utils';
+import httpStatusCodes from '~/lib/utils/http_status';
+
+describe('Feature flags > Environments dropdown ', () => {
+ let wrapper;
+ let mock;
+ const results = ['production', 'staging'];
+ const factory = props => {
+ wrapper = shallowMount(EnvironmentsDropdown, {
+ propsData: {
+ endpoint: `${TEST_HOST}/environments.json'`,
+ ...props,
+ },
+ });
+ };
+
+ const findEnvironmentSearchInput = () => wrapper.find(GlSearchBoxByType);
+ const findDropdownMenu = () => wrapper.find('.dropdown-menu');
+
+ afterEach(() => {
+ wrapper.destroy();
+ mock.restore();
+ });
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ describe('without value', () => {
+ it('renders the placeholder', () => {
+ factory();
+ expect(findEnvironmentSearchInput().vm.$attrs.placeholder).toBe('Search an environment spec');
+ });
+ });
+
+ describe('with value', () => {
+ it('sets filter to equal the value', () => {
+ factory({ value: 'production' });
+ expect(findEnvironmentSearchInput().props('value')).toBe('production');
+ });
+ });
+
+ describe('on focus', () => {
+ it('sets results with the received data', async () => {
+ mock.onGet(`${TEST_HOST}/environments.json'`).replyOnce(httpStatusCodes.OK, results);
+ factory();
+ findEnvironmentSearchInput().vm.$emit('focus');
+ await waitForPromises();
+ await wrapper.vm.$nextTick();
+ expect(wrapper.find('.dropdown-content > ul').exists()).toBe(true);
+ expect(wrapper.findAll('.dropdown-content > ul > li').exists()).toBe(true);
+ });
+ });
+
+ describe('on keyup', () => {
+ it('sets results with the received data', async () => {
+ mock.onGet(`${TEST_HOST}/environments.json'`).replyOnce(httpStatusCodes.OK, results);
+ factory();
+ findEnvironmentSearchInput().vm.$emit('keyup');
+ await waitForPromises();
+ await wrapper.vm.$nextTick();
+ expect(wrapper.find('.dropdown-content > ul').exists()).toBe(true);
+ expect(wrapper.findAll('.dropdown-content > ul > li').exists()).toBe(true);
+ });
+ });
+
+ describe('on input change', () => {
+ describe('on success', () => {
+ beforeEach(async () => {
+ mock.onGet(`${TEST_HOST}/environments.json'`).replyOnce(httpStatusCodes.OK, results);
+ factory();
+ findEnvironmentSearchInput().vm.$emit('focus');
+ findEnvironmentSearchInput().vm.$emit('input', 'production');
+ await waitForPromises();
+ await wrapper.vm.$nextTick();
+ });
+
+ it('sets filter value', () => {
+ expect(findEnvironmentSearchInput().props('value')).toBe('production');
+ });
+
+ describe('with received data', () => {
+ it('sets is loading to false', () => {
+ expect(wrapper.vm.isLoading).toBe(false);
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ });
+
+ it('shows the suggestions', () => {
+ expect(findDropdownMenu().exists()).toBe(true);
+ });
+
+ it('emits event when a suggestion is clicked', async () => {
+ const button = wrapper
+ .findAll(GlDeprecatedButton)
+ .filter(b => b.text() === 'production')
+ .at(0);
+ button.vm.$emit('click');
+ await wrapper.vm.$nextTick();
+ expect(wrapper.emitted('selectEnvironment')).toEqual([['production']]);
+ });
+ });
+
+ describe('on click clear button', () => {
+ beforeEach(async () => {
+ wrapper.find(GlDeprecatedButton).vm.$emit('click');
+ await wrapper.vm.$nextTick();
+ });
+
+ it('resets filter value', () => {
+ expect(findEnvironmentSearchInput().props('value')).toBe('');
+ });
+
+ it('closes list of suggestions', () => {
+ expect(wrapper.vm.showSuggestions).toBe(false);
+ });
+ });
+ });
+ });
+
+ describe('on click create button', () => {
+ beforeEach(async () => {
+ mock.onGet(`${TEST_HOST}/environments.json'`).replyOnce(httpStatusCodes.OK, []);
+ factory();
+ findEnvironmentSearchInput().vm.$emit('focus');
+ findEnvironmentSearchInput().vm.$emit('input', 'production');
+ await waitForPromises();
+ await wrapper.vm.$nextTick();
+ });
+
+ it('emits create event', async () => {
+ wrapper
+ .findAll(GlDeprecatedButton)
+ .at(0)
+ .vm.$emit('click');
+ await wrapper.vm.$nextTick();
+ expect(wrapper.emitted('createClicked')).toEqual([['production']]);
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/feature_flags_spec.js b/spec/frontend/feature_flags/components/feature_flags_spec.js
new file mode 100644
index 00000000000..5ff39937113
--- /dev/null
+++ b/spec/frontend/feature_flags/components/feature_flags_spec.js
@@ -0,0 +1,343 @@
+import { shallowMount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import { GlEmptyState, GlLoadingIcon } from '@gitlab/ui';
+import { TEST_HOST } from 'spec/test_constants';
+import Api from '~/api';
+import { createStore } from '~/feature_flags/store';
+import FeatureFlagsTab from '~/feature_flags/components/feature_flags_tab.vue';
+import FeatureFlagsComponent from '~/feature_flags/components/feature_flags.vue';
+import FeatureFlagsTable from '~/feature_flags/components/feature_flags_table.vue';
+import UserListsTable from '~/feature_flags/components/user_lists_table.vue';
+import ConfigureFeatureFlagsModal from '~/feature_flags/components/configure_feature_flags_modal.vue';
+import { FEATURE_FLAG_SCOPE, USER_LIST_SCOPE } from '~/feature_flags/constants';
+import TablePagination from '~/vue_shared/components/pagination/table_pagination.vue';
+import axios from '~/lib/utils/axios_utils';
+import { getRequestData, userList } from '../mock_data';
+
+describe('Feature flags', () => {
+ const mockData = {
+ endpoint: `${TEST_HOST}/endpoint.json`,
+ csrfToken: 'testToken',
+ featureFlagsClientLibrariesHelpPagePath: '/help/feature-flags#unleash-clients',
+ featureFlagsClientExampleHelpPagePath: '/help/feature-flags#client-example',
+ unleashApiUrl: `${TEST_HOST}/api/unleash`,
+ unleashApiInstanceId: 'oP6sCNRqtRHmpy1gw2-F',
+ canUserConfigure: true,
+ canUserRotateToken: true,
+ newFeatureFlagPath: 'feature-flags/new',
+ newUserListPath: '/user-list/new',
+ projectId: '8',
+ };
+
+ let wrapper;
+ let mock;
+ let store;
+
+ const factory = (propsData = mockData, fn = shallowMount) => {
+ store = createStore();
+ wrapper = fn(FeatureFlagsComponent, {
+ store,
+ propsData,
+ provide: {
+ projectName: 'fakeProjectName',
+ errorStateSvgPath: '/assets/illustrations/feature_flag.svg',
+ featureFlagsHelpPagePath: '/help/feature-flags',
+ },
+ stubs: {
+ FeatureFlagsTab,
+ },
+ });
+ };
+
+ const configureButton = () => wrapper.find('[data-testid="ff-configure-button"]');
+ const newButton = () => wrapper.find('[data-testid="ff-new-button"]');
+ const newUserListButton = () => wrapper.find('[data-testid="ff-new-list-button"]');
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ jest.spyOn(Api, 'fetchFeatureFlagUserLists').mockResolvedValue({
+ data: [userList],
+ headers: {
+ 'x-next-page': '2',
+ 'x-page': '1',
+ 'X-Per-Page': '8',
+ 'X-Prev-Page': '',
+ 'X-TOTAL': '40',
+ 'X-Total-Pages': '5',
+ },
+ });
+ });
+
+ afterEach(() => {
+ mock.restore();
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('without permissions', () => {
+ const propsData = {
+ endpoint: `${TEST_HOST}/endpoint.json`,
+ csrfToken: 'testToken',
+ errorStateSvgPath: '/assets/illustrations/feature_flag.svg',
+ featureFlagsHelpPagePath: '/help/feature-flags',
+ canUserConfigure: false,
+ canUserRotateToken: false,
+ featureFlagsClientLibrariesHelpPagePath: '/help/feature-flags#unleash-clients',
+ featureFlagsClientExampleHelpPagePath: '/help/feature-flags#client-example',
+ unleashApiUrl: `${TEST_HOST}/api/unleash`,
+ unleashApiInstanceId: 'oP6sCNRqtRHmpy1gw2-F',
+ projectId: '8',
+ };
+
+ beforeEach(done => {
+ mock
+ .onGet(`${TEST_HOST}/endpoint.json`, { params: { scope: FEATURE_FLAG_SCOPE, page: '1' } })
+ .reply(200, getRequestData, {});
+
+ factory(propsData);
+
+ setImmediate(() => {
+ done();
+ });
+ });
+
+ it('does not render configure button', () => {
+ expect(configureButton().exists()).toBe(false);
+ });
+
+ it('does not render new feature flag button', () => {
+ expect(newButton().exists()).toBe(false);
+ });
+
+ it('does not render new user list button', () => {
+ expect(newUserListButton().exists()).toBe(false);
+ });
+ });
+
+ describe('loading state', () => {
+ it('renders a loading icon', () => {
+ mock
+ .onGet(`${TEST_HOST}/endpoint.json`, { params: { scope: FEATURE_FLAG_SCOPE, page: '1' } })
+ .replyOnce(200, getRequestData, {});
+
+ factory();
+
+ const loadingElement = wrapper.find(GlLoadingIcon);
+
+ expect(loadingElement.exists()).toBe(true);
+ expect(loadingElement.props('label')).toEqual('Loading feature flags');
+ });
+ });
+
+ describe('successful request', () => {
+ describe('without feature flags', () => {
+ let emptyState;
+
+ beforeEach(async () => {
+ mock.onGet(mockData.endpoint, { params: { scope: FEATURE_FLAG_SCOPE, page: '1' } }).reply(
+ 200,
+ {
+ feature_flags: [],
+ count: {
+ all: 0,
+ enabled: 0,
+ disabled: 0,
+ },
+ },
+ {},
+ );
+
+ factory();
+ await wrapper.vm.$nextTick();
+
+ emptyState = wrapper.find(GlEmptyState);
+ });
+
+ it('should render the empty state', async () => {
+ await axios.waitForAll();
+ emptyState = wrapper.find(GlEmptyState);
+ expect(emptyState.exists()).toBe(true);
+ });
+
+ it('renders configure button', () => {
+ expect(configureButton().exists()).toBe(true);
+ });
+
+ it('renders new feature flag button', () => {
+ expect(newButton().exists()).toBe(true);
+ });
+
+ it('renders new user list button', () => {
+ expect(newUserListButton().exists()).toBe(true);
+ expect(newUserListButton().attributes('href')).toBe('/user-list/new');
+ });
+
+ describe('in feature flags tab', () => {
+ it('renders generic title', () => {
+ expect(emptyState.props('title')).toEqual('Get started with feature flags');
+ });
+ });
+ });
+
+ describe('with paginated feature flags', () => {
+ beforeEach(done => {
+ mock
+ .onGet(mockData.endpoint, { params: { scope: FEATURE_FLAG_SCOPE, page: '1' } })
+ .replyOnce(200, getRequestData, {
+ 'x-next-page': '2',
+ 'x-page': '1',
+ 'X-Per-Page': '2',
+ 'X-Prev-Page': '',
+ 'X-TOTAL': '37',
+ 'X-Total-Pages': '5',
+ });
+
+ factory();
+ jest.spyOn(store, 'dispatch');
+ setImmediate(() => {
+ done();
+ });
+ });
+
+ it('should render a table with feature flags', () => {
+ const table = wrapper.find(FeatureFlagsTable);
+ expect(table.exists()).toBe(true);
+ expect(table.props(FEATURE_FLAG_SCOPE)).toEqual(
+ expect.arrayContaining([
+ expect.objectContaining({
+ name: getRequestData.feature_flags[0].name,
+ description: getRequestData.feature_flags[0].description,
+ }),
+ ]),
+ );
+ });
+
+ it('should toggle a flag when receiving the toggle-flag event', () => {
+ const table = wrapper.find(FeatureFlagsTable);
+
+ const [flag] = table.props(FEATURE_FLAG_SCOPE);
+ table.vm.$emit('toggle-flag', flag);
+
+ expect(store.dispatch).toHaveBeenCalledWith('index/toggleFeatureFlag', flag);
+ });
+
+ it('renders configure button', () => {
+ expect(configureButton().exists()).toBe(true);
+ });
+
+ it('renders new feature flag button', () => {
+ expect(newButton().exists()).toBe(true);
+ });
+
+ it('renders new user list button', () => {
+ expect(newUserListButton().exists()).toBe(true);
+ expect(newUserListButton().attributes('href')).toBe('/user-list/new');
+ });
+
+ describe('pagination', () => {
+ it('should render pagination', () => {
+ expect(wrapper.find(TablePagination).exists()).toBe(true);
+ });
+
+ it('should make an API request when page is clicked', () => {
+ jest.spyOn(wrapper.vm, 'updateFeatureFlagOptions');
+ wrapper.find(TablePagination).vm.change(4);
+
+ expect(wrapper.vm.updateFeatureFlagOptions).toHaveBeenCalledWith({
+ scope: FEATURE_FLAG_SCOPE,
+ page: '4',
+ });
+ });
+
+ it('should make an API request when using tabs', () => {
+ jest.spyOn(wrapper.vm, 'updateFeatureFlagOptions');
+ wrapper.find('[data-testid="user-lists-tab"]').vm.$emit('changeTab');
+
+ expect(wrapper.vm.updateFeatureFlagOptions).toHaveBeenCalledWith({
+ scope: USER_LIST_SCOPE,
+ page: '1',
+ });
+ });
+ });
+ });
+
+ describe('in user lists tab', () => {
+ beforeEach(done => {
+ factory();
+
+ setImmediate(() => {
+ done();
+ });
+ });
+ beforeEach(() => {
+ wrapper.find('[data-testid="user-lists-tab"]').vm.$emit('changeTab');
+ return wrapper.vm.$nextTick();
+ });
+
+ it('should display the user list table', () => {
+ expect(wrapper.find(UserListsTable).exists()).toBe(true);
+ });
+
+ it('should set the user lists to display', () => {
+ expect(wrapper.find(UserListsTable).props('userLists')).toEqual([userList]);
+ });
+ });
+ });
+
+ describe('unsuccessful request', () => {
+ beforeEach(done => {
+ mock
+ .onGet(mockData.endpoint, { params: { scope: FEATURE_FLAG_SCOPE, page: '1' } })
+ .replyOnce(500, {});
+ Api.fetchFeatureFlagUserLists.mockRejectedValueOnce();
+
+ factory();
+
+ setImmediate(() => {
+ done();
+ });
+ });
+
+ it('should render error state', () => {
+ const emptyState = wrapper.find(GlEmptyState);
+ expect(emptyState.props('title')).toEqual('There was an error fetching the feature flags.');
+ expect(emptyState.props('description')).toEqual(
+ 'Try again in a few moments or contact your support team.',
+ );
+ });
+
+ it('renders configure button', () => {
+ expect(configureButton().exists()).toBe(true);
+ });
+
+ it('renders new feature flag button', () => {
+ expect(newButton().exists()).toBe(true);
+ });
+
+ it('renders new user list button', () => {
+ expect(newUserListButton().exists()).toBe(true);
+ expect(newUserListButton().attributes('href')).toBe('/user-list/new');
+ });
+ });
+
+ describe('rotate instance id', () => {
+ beforeEach(done => {
+ mock
+ .onGet(`${TEST_HOST}/endpoint.json`, { params: { scope: FEATURE_FLAG_SCOPE, page: '1' } })
+ .reply(200, getRequestData, {});
+ factory();
+
+ setImmediate(() => {
+ done();
+ });
+ });
+
+ it('should fire the rotate action when a `token` event is received', () => {
+ const actionSpy = jest.spyOn(wrapper.vm, 'rotateInstanceId');
+ const modal = wrapper.find(ConfigureFeatureFlagsModal);
+ modal.vm.$emit('token');
+
+ expect(actionSpy).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/feature_flags_tab_spec.js b/spec/frontend/feature_flags/components/feature_flags_tab_spec.js
new file mode 100644
index 00000000000..bc90c5ceb2d
--- /dev/null
+++ b/spec/frontend/feature_flags/components/feature_flags_tab_spec.js
@@ -0,0 +1,168 @@
+import { mount } from '@vue/test-utils';
+import { GlAlert, GlBadge, GlEmptyState, GlLink, GlLoadingIcon, GlTabs } from '@gitlab/ui';
+import FeatureFlagsTab from '~/feature_flags/components/feature_flags_tab.vue';
+
+const DEFAULT_PROPS = {
+ title: 'test',
+ count: 5,
+ alerts: ['an alert', 'another alert'],
+ isLoading: false,
+ loadingLabel: 'test loading',
+ errorState: false,
+ errorTitle: 'test title',
+ emptyState: true,
+ emptyTitle: 'test empty',
+};
+
+const DEFAULT_PROVIDE = {
+ errorStateSvgPath: '/error.svg',
+ featureFlagsHelpPagePath: '/help/page/path',
+};
+
+describe('feature_flags/components/feature_flags_tab.vue', () => {
+ let wrapper;
+
+ const factory = (props = {}) =>
+ mount(
+ {
+ components: {
+ GlTabs,
+ FeatureFlagsTab,
+ },
+ render(h) {
+ return h(GlTabs, [
+ h(FeatureFlagsTab, { props: this.$attrs, on: this.$listeners }, this.$slots.default),
+ ]);
+ },
+ },
+ {
+ propsData: {
+ ...DEFAULT_PROPS,
+ ...props,
+ },
+ provide: DEFAULT_PROVIDE,
+ slots: {
+ default: '<p data-testid="test-slot">testing</p>',
+ },
+ },
+ );
+
+ afterEach(() => {
+ if (wrapper?.destroy) {
+ wrapper.destroy();
+ }
+
+ wrapper = null;
+ });
+
+ describe('alerts', () => {
+ let alerts;
+
+ beforeEach(() => {
+ wrapper = factory();
+ alerts = wrapper.findAll(GlAlert);
+ });
+
+ it('should show any alerts', () => {
+ expect(alerts).toHaveLength(DEFAULT_PROPS.alerts.length);
+ alerts.wrappers.forEach((alert, i) => expect(alert.text()).toBe(DEFAULT_PROPS.alerts[i]));
+ });
+
+ it('should emit a dismiss event for a dismissed alert', () => {
+ alerts.at(0).vm.$emit('dismiss');
+
+ expect(wrapper.find(FeatureFlagsTab).emitted('dismissAlert')).toEqual([[0]]);
+ });
+ });
+
+ describe('loading', () => {
+ beforeEach(() => {
+ wrapper = factory({ isLoading: true });
+ });
+
+ it('should show a loading icon and nothing else', () => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findAll(GlEmptyState)).toHaveLength(0);
+ });
+ });
+
+ describe('error', () => {
+ let emptyState;
+
+ beforeEach(() => {
+ wrapper = factory({ errorState: true });
+ emptyState = wrapper.find(GlEmptyState);
+ });
+
+ it('should show an error state if there has been an error', () => {
+ expect(emptyState.text()).toContain(DEFAULT_PROPS.errorTitle);
+ expect(emptyState.text()).toContain(
+ 'Try again in a few moments or contact your support team.',
+ );
+ expect(emptyState.props('svgPath')).toBe(DEFAULT_PROVIDE.errorStateSvgPath);
+ });
+ });
+
+ describe('empty', () => {
+ let emptyState;
+ let emptyStateLink;
+
+ beforeEach(() => {
+ wrapper = factory({ emptyState: true });
+ emptyState = wrapper.find(GlEmptyState);
+ emptyStateLink = emptyState.find(GlLink);
+ });
+
+ it('should show an empty state if it is empty', () => {
+ expect(emptyState.text()).toContain(DEFAULT_PROPS.emptyTitle);
+ expect(emptyState.text()).toContain(
+ 'Feature flags allow you to configure your code into different flavors by dynamically toggling certain functionality.',
+ );
+ expect(emptyState.props('svgPath')).toBe(DEFAULT_PROVIDE.errorStateSvgPath);
+ expect(emptyStateLink.attributes('href')).toBe(DEFAULT_PROVIDE.featureFlagsHelpPagePath);
+ expect(emptyStateLink.text()).toBe('More information');
+ });
+ });
+
+ describe('slot', () => {
+ let slot;
+
+ beforeEach(async () => {
+ wrapper = factory();
+ await wrapper.vm.$nextTick();
+
+ slot = wrapper.find('[data-testid="test-slot"]');
+ });
+
+ it('should display the passed slot', () => {
+ expect(slot.exists()).toBe(true);
+ expect(slot.text()).toBe('testing');
+ });
+ });
+
+ describe('count', () => {
+ it('should display a count if there is one', async () => {
+ wrapper = factory();
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find(GlBadge).text()).toBe(DEFAULT_PROPS.count.toString());
+ });
+ it('should display 0 if there is no count', async () => {
+ wrapper = factory({ count: undefined });
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find(GlBadge).text()).toBe('0');
+ });
+ });
+
+ describe('title', () => {
+ it('should show the title', async () => {
+ wrapper = factory();
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find('[data-testid="feature-flags-tab-title"]').text()).toBe(
+ DEFAULT_PROPS.title,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/feature_flags_table_spec.js b/spec/frontend/feature_flags/components/feature_flags_table_spec.js
new file mode 100644
index 00000000000..c59ecbf3b06
--- /dev/null
+++ b/spec/frontend/feature_flags/components/feature_flags_table_spec.js
@@ -0,0 +1,262 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlToggle, GlBadge } from '@gitlab/ui';
+import { trimText } from 'helpers/text_helper';
+import { mockTracking } from 'helpers/tracking_helper';
+import {
+ ROLLOUT_STRATEGY_ALL_USERS,
+ ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ ROLLOUT_STRATEGY_USER_ID,
+ ROLLOUT_STRATEGY_GITLAB_USER_LIST,
+ NEW_VERSION_FLAG,
+ LEGACY_FLAG,
+ DEFAULT_PERCENT_ROLLOUT,
+} from '~/feature_flags/constants';
+import FeatureFlagsTable from '~/feature_flags/components/feature_flags_table.vue';
+
+const getDefaultProps = () => ({
+ featureFlags: [
+ {
+ id: 1,
+ iid: 1,
+ active: true,
+ name: 'flag name',
+ description: 'flag description',
+ destroy_path: 'destroy/path',
+ edit_path: 'edit/path',
+ version: LEGACY_FLAG,
+ scopes: [
+ {
+ id: 1,
+ active: true,
+ environmentScope: 'scope',
+ canUpdate: true,
+ protected: false,
+ rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
+ rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
+ shouldBeDestroyed: false,
+ },
+ ],
+ },
+ ],
+ csrfToken: 'fakeToken',
+});
+
+describe('Feature flag table', () => {
+ let wrapper;
+ let props;
+
+ const createWrapper = (propsData, opts = {}) => {
+ wrapper = shallowMount(FeatureFlagsTable, {
+ propsData,
+ ...opts,
+ });
+ };
+
+ beforeEach(() => {
+ props = getDefaultProps();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('with an active scope and a standard rollout strategy', () => {
+ beforeEach(() => {
+ createWrapper(props);
+ });
+
+ it('Should render a table', () => {
+ expect(wrapper.classes('table-holder')).toBe(true);
+ });
+
+ it('Should render rows', () => {
+ expect(wrapper.find('.gl-responsive-table-row').exists()).toBe(true);
+ });
+
+ it('should render an ID column', () => {
+ expect(wrapper.find('.js-feature-flag-id').exists()).toBe(true);
+ expect(trimText(wrapper.find('.js-feature-flag-id').text())).toEqual('^1');
+ });
+
+ it('Should render a status column', () => {
+ const badge = wrapper.find('[data-testid="feature-flag-status-badge"]');
+
+ expect(badge.exists()).toBe(true);
+ expect(trimText(badge.text())).toEqual('Active');
+ });
+
+ it('Should render a feature flag column', () => {
+ expect(wrapper.find('.js-feature-flag-title').exists()).toBe(true);
+ expect(trimText(wrapper.find('.feature-flag-name').text())).toEqual('flag name');
+
+ expect(trimText(wrapper.find('.feature-flag-description').text())).toEqual(
+ 'flag description',
+ );
+ });
+
+ it('should render an environments specs column', () => {
+ const envColumn = wrapper.find('.js-feature-flag-environments');
+
+ expect(envColumn).toBeDefined();
+ expect(trimText(envColumn.text())).toBe('scope');
+ });
+
+ it('should render an environments specs badge with active class', () => {
+ const envColumn = wrapper.find('.js-feature-flag-environments');
+
+ expect(trimText(envColumn.find(GlBadge).text())).toBe('scope');
+ });
+
+ it('should render an actions column', () => {
+ expect(wrapper.find('.table-action-buttons').exists()).toBe(true);
+ expect(wrapper.find('.js-feature-flag-delete-button').exists()).toBe(true);
+ expect(wrapper.find('.js-feature-flag-edit-button').exists()).toBe(true);
+ expect(wrapper.find('.js-feature-flag-edit-button').attributes('href')).toEqual('edit/path');
+ });
+ });
+
+ describe('when active and with an update toggle', () => {
+ let toggle;
+ let spy;
+
+ beforeEach(() => {
+ props.featureFlags[0].update_path = props.featureFlags[0].destroy_path;
+ createWrapper(props);
+ toggle = wrapper.find(GlToggle);
+ spy = mockTracking('_category_', toggle.element, jest.spyOn);
+ });
+
+ it('should have a toggle', () => {
+ expect(toggle.exists()).toBe(true);
+ expect(toggle.props('value')).toBe(true);
+ });
+
+ it('should trigger a toggle event', () => {
+ toggle.vm.$emit('change');
+ const flag = { ...props.featureFlags[0], active: !props.featureFlags[0].active };
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('toggle-flag')).toEqual([[flag]]);
+ });
+ });
+
+ it('should track a click', () => {
+ toggle.trigger('click');
+
+ expect(spy).toHaveBeenCalledWith('_category_', 'click_button', {
+ label: 'feature_flag_toggle',
+ });
+ });
+ });
+
+ describe('with an active scope and a percentage rollout strategy', () => {
+ beforeEach(() => {
+ props.featureFlags[0].scopes[0].rolloutStrategy = ROLLOUT_STRATEGY_PERCENT_ROLLOUT;
+ props.featureFlags[0].scopes[0].rolloutPercentage = '54';
+ createWrapper(props);
+ });
+
+ it('should render an environments specs badge with percentage', () => {
+ const envColumn = wrapper.find('.js-feature-flag-environments');
+
+ expect(trimText(envColumn.find(GlBadge).text())).toBe('scope: 54%');
+ });
+ });
+
+ describe('with an inactive scope', () => {
+ beforeEach(() => {
+ props.featureFlags[0].scopes[0].active = false;
+ createWrapper(props);
+ });
+
+ it('should render an environments specs badge with inactive class', () => {
+ const envColumn = wrapper.find('.js-feature-flag-environments');
+
+ expect(trimText(envColumn.find(GlBadge).text())).toBe('scope');
+ });
+ });
+
+ describe('with a new version flag', () => {
+ let badges;
+
+ beforeEach(() => {
+ const newVersionProps = {
+ ...props,
+ featureFlags: [
+ {
+ id: 1,
+ iid: 1,
+ active: true,
+ name: 'flag name',
+ description: 'flag description',
+ destroy_path: 'destroy/path',
+ edit_path: 'edit/path',
+ version: NEW_VERSION_FLAG,
+ scopes: [],
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ scopes: [{ environment_scope: '*' }],
+ },
+ {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '50' },
+ scopes: [{ environment_scope: 'production' }, { environment_scope: 'staging' }],
+ },
+ {
+ name: ROLLOUT_STRATEGY_USER_ID,
+ parameters: { userIds: '1,2,3,4' },
+ scopes: [{ environment_scope: 'review/*' }],
+ },
+ {
+ name: ROLLOUT_STRATEGY_GITLAB_USER_LIST,
+ parameters: {},
+ user_list: { name: 'test list' },
+ scopes: [{ environment_scope: '*' }],
+ },
+ ],
+ },
+ ],
+ };
+ createWrapper(newVersionProps, { provide: { glFeatures: { featureFlagsNewVersion: true } } });
+
+ badges = wrapper.findAll('[data-testid="strategy-badge"]');
+ });
+
+ it('shows All Environments if the environment scope is *', () => {
+ expect(badges.at(0).text()).toContain('All Environments');
+ });
+
+ it('shows the environment scope if another is set', () => {
+ expect(badges.at(1).text()).toContain('production');
+ expect(badges.at(1).text()).toContain('staging');
+ expect(badges.at(2).text()).toContain('review/*');
+ });
+
+ it('shows All Users for the default strategy', () => {
+ expect(badges.at(0).text()).toContain('All Users');
+ });
+
+ it('shows the percent for a percent rollout', () => {
+ expect(badges.at(1).text()).toContain('Percent of users - 50%');
+ });
+
+ it('shows the number of users for users with ID', () => {
+ expect(badges.at(2).text()).toContain('User IDs - 4 users');
+ });
+
+ it('shows the name of a user list for user list', () => {
+ expect(badges.at(3).text()).toContain('User List - test list');
+ });
+ });
+
+ it('renders a feature flag without an iid', () => {
+ delete props.featureFlags[0].iid;
+ createWrapper(props);
+
+ expect(wrapper.find('.js-feature-flag-id').exists()).toBe(true);
+ expect(trimText(wrapper.find('.js-feature-flag-id').text())).toBe('');
+ });
+});
diff --git a/spec/frontend/feature_flags/components/form_spec.js b/spec/frontend/feature_flags/components/form_spec.js
new file mode 100644
index 00000000000..451bb4176ef
--- /dev/null
+++ b/spec/frontend/feature_flags/components/form_spec.js
@@ -0,0 +1,485 @@
+import { uniqueId } from 'lodash';
+import { shallowMount } from '@vue/test-utils';
+import { GlFormTextarea, GlFormCheckbox, GlButton } from '@gitlab/ui';
+import Api from '~/api';
+import Form from '~/feature_flags/components/form.vue';
+import EnvironmentsDropdown from '~/feature_flags/components/environments_dropdown.vue';
+import Strategy from '~/feature_flags/components/strategy.vue';
+import {
+ ROLLOUT_STRATEGY_ALL_USERS,
+ ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ INTERNAL_ID_PREFIX,
+ DEFAULT_PERCENT_ROLLOUT,
+ LEGACY_FLAG,
+ NEW_VERSION_FLAG,
+} from '~/feature_flags/constants';
+import RelatedIssuesRoot from '~/related_issues/components/related_issues_root.vue';
+import ToggleButton from '~/vue_shared/components/toggle_button.vue';
+import { featureFlag, userList, allUsersStrategy } from '../mock_data';
+
+jest.mock('~/api.js');
+
+describe('feature flag form', () => {
+ let wrapper;
+ const requiredProps = {
+ cancelPath: 'feature_flags',
+ submitText: 'Create',
+ environmentsEndpoint: '/environments.json',
+ projectId: '1',
+ };
+
+ const factory = (props = {}) => {
+ wrapper = shallowMount(Form, {
+ propsData: props,
+ provide: {
+ glFeatures: {
+ featureFlagPermissions: true,
+ featureFlagsNewVersion: true,
+ },
+ },
+ });
+ };
+
+ beforeEach(() => {
+ Api.fetchFeatureFlagUserLists.mockResolvedValue({ data: [] });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should render provided submitText', () => {
+ factory(requiredProps);
+
+ expect(wrapper.find('.js-ff-submit').text()).toEqual(requiredProps.submitText);
+ });
+
+ it('should render provided cancelPath', () => {
+ factory(requiredProps);
+
+ expect(wrapper.find('.js-ff-cancel').attributes('href')).toEqual(requiredProps.cancelPath);
+ });
+
+ it('does not render the related issues widget without the featureFlagIssuesEndpoint', () => {
+ factory(requiredProps);
+
+ expect(wrapper.find(RelatedIssuesRoot).exists()).toBe(false);
+ });
+
+ it('renders the related issues widget when the featureFlagIssuesEndpoint is provided', () => {
+ factory({
+ ...requiredProps,
+ featureFlagIssuesEndpoint: '/some/endpoint',
+ });
+
+ expect(wrapper.find(RelatedIssuesRoot).exists()).toBe(true);
+ });
+
+ describe('without provided data', () => {
+ beforeEach(() => {
+ factory(requiredProps);
+ });
+
+ it('should render name input text', () => {
+ expect(wrapper.find('#feature-flag-name').exists()).toBe(true);
+ });
+
+ it('should render description textarea', () => {
+ expect(wrapper.find('#feature-flag-description').exists()).toBe(true);
+ });
+
+ describe('scopes', () => {
+ it('should render scopes table', () => {
+ expect(wrapper.find('.js-scopes-table').exists()).toBe(true);
+ });
+
+ it('should render scopes table with a new row ', () => {
+ expect(wrapper.find('.js-add-new-scope').exists()).toBe(true);
+ });
+
+ describe('status toggle', () => {
+ describe('without filled text input', () => {
+ it('should add a new scope with the text value empty and the status', () => {
+ wrapper.find(ToggleButton).vm.$emit('change', true);
+
+ expect(wrapper.vm.formScopes).toHaveLength(1);
+ expect(wrapper.vm.formScopes[0].active).toEqual(true);
+ expect(wrapper.vm.formScopes[0].environmentScope).toEqual('');
+
+ expect(wrapper.vm.newScope).toEqual('');
+ });
+ });
+
+ it('should be disabled if the feature flag is not active', done => {
+ wrapper.setProps({ active: false });
+ wrapper.vm.$nextTick(() => {
+ expect(wrapper.find(ToggleButton).props('disabledInput')).toBe(true);
+ done();
+ });
+ });
+ });
+ });
+ });
+
+ describe('with provided data', () => {
+ beforeEach(() => {
+ factory({
+ ...requiredProps,
+ name: featureFlag.name,
+ description: featureFlag.description,
+ active: true,
+ version: LEGACY_FLAG,
+ scopes: [
+ {
+ id: 1,
+ active: true,
+ environmentScope: 'scope',
+ canUpdate: true,
+ protected: false,
+ rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ rolloutPercentage: '54',
+ rolloutUserIds: '123',
+ shouldIncludeUserIds: true,
+ },
+ {
+ id: 2,
+ active: true,
+ environmentScope: 'scope',
+ canUpdate: false,
+ protected: true,
+ rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ rolloutPercentage: '54',
+ rolloutUserIds: '123',
+ shouldIncludeUserIds: true,
+ },
+ ],
+ });
+ });
+
+ describe('scopes', () => {
+ it('should be possible to remove a scope', () => {
+ expect(wrapper.find('.js-feature-flag-delete').exists()).toEqual(true);
+ });
+
+ it('renders empty row to add a new scope', () => {
+ expect(wrapper.find('.js-add-new-scope').exists()).toEqual(true);
+ });
+
+ it('renders the user id checkbox', () => {
+ expect(wrapper.find(GlFormCheckbox).exists()).toBe(true);
+ });
+
+ it('renders the user id text area', () => {
+ expect(wrapper.find(GlFormTextarea).exists()).toBe(true);
+
+ expect(wrapper.find(GlFormTextarea).vm.value).toBe('123');
+ });
+
+ describe('update scope', () => {
+ describe('on click on toggle', () => {
+ it('should update the scope', () => {
+ wrapper.find(ToggleButton).vm.$emit('change', false);
+
+ expect(wrapper.vm.formScopes[0].active).toBe(false);
+ });
+
+ it('should be disabled if the feature flag is not active', done => {
+ wrapper.setProps({ active: false });
+
+ wrapper.vm.$nextTick(() => {
+ expect(wrapper.find(ToggleButton).props('disabledInput')).toBe(true);
+ done();
+ });
+ });
+ });
+ describe('on strategy change', () => {
+ it('should not include user IDs if All Users is selected', () => {
+ const scope = wrapper.find({ ref: 'scopeRow' });
+ scope.find('select').setValue(ROLLOUT_STRATEGY_ALL_USERS);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(scope.find('#rollout-user-id-0').exists()).toBe(false);
+ });
+ });
+ });
+ });
+
+ describe('deleting an existing scope', () => {
+ beforeEach(() => {
+ wrapper.find('.js-delete-scope').vm.$emit('click');
+ });
+
+ it('should add `shouldBeDestroyed` key the clicked scope', () => {
+ expect(wrapper.vm.formScopes[0].shouldBeDestroyed).toBe(true);
+ });
+
+ it('should not render deleted scopes', () => {
+ expect(wrapper.vm.filteredScopes).toEqual([expect.objectContaining({ id: 2 })]);
+ });
+ });
+
+ describe('deleting a new scope', () => {
+ it('should remove the scope from formScopes', () => {
+ factory({
+ ...requiredProps,
+ name: 'feature_flag_1',
+ description: 'this is a feature flag',
+ scopes: [
+ {
+ environmentScope: 'new_scope',
+ active: false,
+ id: uniqueId(INTERNAL_ID_PREFIX),
+ canUpdate: true,
+ protected: false,
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ },
+ ],
+ },
+ ],
+ });
+
+ wrapper.find('.js-delete-scope').vm.$emit('click');
+
+ expect(wrapper.vm.formScopes).toEqual([]);
+ });
+ });
+
+ describe('with * scope', () => {
+ beforeEach(() => {
+ factory({
+ ...requiredProps,
+ name: 'feature_flag_1',
+ description: 'this is a feature flag',
+ scopes: [
+ {
+ environmentScope: '*',
+ active: false,
+ canUpdate: false,
+ rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
+ rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
+ },
+ ],
+ });
+ });
+
+ it('renders read only name', () => {
+ expect(wrapper.find('.js-scope-all').exists()).toEqual(true);
+ });
+ });
+
+ describe('without permission to update', () => {
+ it('should have the flag name input disabled', () => {
+ const input = wrapper.find('#feature-flag-name');
+
+ expect(input.element.disabled).toBe(true);
+ });
+
+ it('should have the flag discription text area disabled', () => {
+ const textarea = wrapper.find('#feature-flag-description');
+
+ expect(textarea.element.disabled).toBe(true);
+ });
+
+ it('should have the scope that cannot be updated be disabled', () => {
+ const row = wrapper.findAll('.gl-responsive-table-row').at(2);
+
+ expect(row.find(EnvironmentsDropdown).vm.disabled).toBe(true);
+ expect(row.find(ToggleButton).vm.disabledInput).toBe(true);
+ expect(row.find('.js-delete-scope').exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('on submit', () => {
+ const selectFirstRolloutStrategyOption = dropdownIndex => {
+ wrapper
+ .findAll('select.js-rollout-strategy')
+ .at(dropdownIndex)
+ .findAll('option')
+ .at(1)
+ .setSelected();
+ };
+
+ beforeEach(() => {
+ factory({
+ ...requiredProps,
+ name: 'feature_flag_1',
+ active: true,
+ description: 'this is a feature flag',
+ scopes: [
+ {
+ id: 1,
+ environmentScope: 'production',
+ canUpdate: true,
+ protected: true,
+ active: false,
+ rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
+ rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
+ rolloutUserIds: '',
+ },
+ ],
+ });
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('should emit handleSubmit with the updated data', () => {
+ wrapper.find('#feature-flag-name').setValue('feature_flag_2');
+
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ wrapper
+ .find('.js-new-scope-name')
+ .find(EnvironmentsDropdown)
+ .vm.$emit('selectEnvironment', 'review');
+
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ wrapper
+ .find('.js-add-new-scope')
+ .find(ToggleButton)
+ .vm.$emit('change', true);
+ })
+ .then(() => {
+ wrapper.find(ToggleButton).vm.$emit('change', true);
+ return wrapper.vm.$nextTick();
+ })
+
+ .then(() => {
+ selectFirstRolloutStrategyOption(0);
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ selectFirstRolloutStrategyOption(2);
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ wrapper.find('.js-rollout-percentage').setValue('55');
+
+ return wrapper.vm.$nextTick();
+ })
+ .then(() => {
+ wrapper.find({ ref: 'submitButton' }).vm.$emit('click');
+
+ const data = wrapper.emitted().handleSubmit[0][0];
+
+ expect(data.name).toEqual('feature_flag_2');
+ expect(data.description).toEqual('this is a feature flag');
+ expect(data.active).toBe(true);
+
+ expect(data.scopes).toEqual([
+ {
+ id: 1,
+ active: true,
+ environmentScope: 'production',
+ canUpdate: true,
+ protected: true,
+ rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ rolloutPercentage: '55',
+ rolloutUserIds: '',
+ shouldIncludeUserIds: false,
+ },
+ {
+ id: expect.any(String),
+ active: false,
+ environmentScope: 'review',
+ canUpdate: true,
+ protected: false,
+ rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
+ rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
+ rolloutUserIds: '',
+ },
+ {
+ id: expect.any(String),
+ active: true,
+ environmentScope: '',
+ canUpdate: true,
+ protected: false,
+ rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
+ rolloutUserIds: '',
+ shouldIncludeUserIds: false,
+ },
+ ]);
+ });
+ });
+ });
+ });
+
+ describe('with strategies', () => {
+ beforeEach(() => {
+ Api.fetchFeatureFlagUserLists.mockResolvedValue({ data: [userList] });
+ factory({
+ ...requiredProps,
+ name: featureFlag.name,
+ description: featureFlag.description,
+ active: true,
+ version: NEW_VERSION_FLAG,
+ strategies: [
+ {
+ type: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '30' },
+ scopes: [],
+ },
+ {
+ type: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ scopes: [{ environment_scope: 'review/*' }],
+ },
+ ],
+ });
+ });
+
+ it('should request the user lists on mount', () => {
+ return wrapper.vm.$nextTick(() => {
+ expect(Api.fetchFeatureFlagUserLists).toHaveBeenCalledWith('1');
+ });
+ });
+
+ it('should show the strategy component', () => {
+ const strategy = wrapper.find(Strategy);
+ expect(strategy.exists()).toBe(true);
+ expect(strategy.props('strategy')).toEqual({
+ type: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '30' },
+ scopes: [],
+ });
+ });
+
+ it('should show one strategy component per strategy', () => {
+ expect(wrapper.findAll(Strategy)).toHaveLength(2);
+ });
+
+ it('adds an all users strategy when clicking the Add button', () => {
+ wrapper.find(GlButton).vm.$emit('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ const strategies = wrapper.findAll(Strategy);
+
+ expect(strategies).toHaveLength(3);
+ expect(strategies.at(2).props('strategy')).toEqual(allUsersStrategy);
+ });
+ });
+
+ it('should remove a strategy on delete', () => {
+ const strategy = {
+ type: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '30' },
+ scopes: [],
+ };
+ wrapper.find(Strategy).vm.$emit('delete');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.findAll(Strategy)).toHaveLength(1);
+ expect(wrapper.find(Strategy).props('strategy')).not.toEqual(strategy);
+ });
+ });
+
+ it('should provide the user lists to the strategy', () => {
+ expect(wrapper.find(Strategy).props('userLists')).toEqual([userList]);
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js b/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js
new file mode 100644
index 00000000000..10e9ed4d3bf
--- /dev/null
+++ b/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js
@@ -0,0 +1,103 @@
+import { shallowMount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import { GlLoadingIcon, GlSearchBoxByType, GlDropdownItem } from '@gitlab/ui';
+import NewEnvironmentsDropdown from '~/feature_flags/components/new_environments_dropdown.vue';
+import axios from '~/lib/utils/axios_utils';
+import httpStatusCodes from '~/lib/utils/http_status';
+
+const TEST_HOST = '/test';
+const TEST_SEARCH = 'production';
+
+describe('New Environments Dropdown', () => {
+ let wrapper;
+ let axiosMock;
+
+ beforeEach(() => {
+ axiosMock = new MockAdapter(axios);
+ wrapper = shallowMount(NewEnvironmentsDropdown, { propsData: { endpoint: TEST_HOST } });
+ });
+
+ afterEach(() => {
+ axiosMock.restore();
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ });
+
+ describe('before results', () => {
+ it('should show a loading icon', () => {
+ axiosMock.onGet(TEST_HOST).reply(() => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ });
+ wrapper.find(GlSearchBoxByType).vm.$emit('focus');
+ return axios.waitForAll();
+ });
+
+ it('should not show any dropdown items', () => {
+ axiosMock.onGet(TEST_HOST).reply(() => {
+ expect(wrapper.findAll(GlDropdownItem)).toHaveLength(0);
+ });
+ wrapper.find(GlSearchBoxByType).vm.$emit('focus');
+ return axios.waitForAll();
+ });
+ });
+
+ describe('with empty results', () => {
+ let item;
+ beforeEach(() => {
+ axiosMock.onGet(TEST_HOST).reply(200, []);
+ wrapper.find(GlSearchBoxByType).vm.$emit('focus');
+ wrapper.find(GlSearchBoxByType).vm.$emit('input', TEST_SEARCH);
+ return axios
+ .waitForAll()
+ .then(() => wrapper.vm.$nextTick())
+ .then(() => {
+ item = wrapper.find(GlDropdownItem);
+ });
+ });
+
+ it('should display a Create item label', () => {
+ expect(item.text()).toBe('Create production');
+ });
+
+ it('should display that no matching items are found', () => {
+ expect(wrapper.find({ ref: 'noResults' }).exists()).toBe(true);
+ });
+
+ it('should emit a new scope when selected', () => {
+ item.vm.$emit('click');
+ expect(wrapper.emitted('add')).toEqual([[TEST_SEARCH]]);
+ });
+ });
+
+ describe('with results', () => {
+ let items;
+ beforeEach(() => {
+ axiosMock.onGet(TEST_HOST).reply(httpStatusCodes.OK, ['prod', 'production']);
+ wrapper.find(GlSearchBoxByType).vm.$emit('focus');
+ wrapper.find(GlSearchBoxByType).vm.$emit('input', 'prod');
+ return axios.waitForAll().then(() => {
+ items = wrapper.findAll(GlDropdownItem);
+ });
+ });
+
+ it('should display one item per result', () => {
+ expect(items).toHaveLength(2);
+ });
+
+ it('should emit an add if an item is clicked', () => {
+ items.at(0).vm.$emit('click');
+ expect(wrapper.emitted('add')).toEqual([['prod']]);
+ });
+
+ it('should not display a create label', () => {
+ items = items.filter(i => i.text().startsWith('Create'));
+ expect(items).toHaveLength(0);
+ });
+
+ it('should not display a message about no results', () => {
+ expect(wrapper.find({ ref: 'noResults' }).exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/new_feature_flag_spec.js b/spec/frontend/feature_flags/components/new_feature_flag_spec.js
new file mode 100644
index 00000000000..284ba09d7fd
--- /dev/null
+++ b/spec/frontend/feature_flags/components/new_feature_flag_spec.js
@@ -0,0 +1,145 @@
+import Vuex from 'vuex';
+import { shallowMount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import { GlAlert } from '@gitlab/ui';
+import { TEST_HOST } from 'spec/test_constants';
+import Form from '~/feature_flags/components/form.vue';
+import newModule from '~/feature_flags/store/modules/new';
+import NewFeatureFlag from '~/feature_flags/components/new_feature_flag.vue';
+import {
+ ROLLOUT_STRATEGY_ALL_USERS,
+ DEFAULT_PERCENT_ROLLOUT,
+ NEW_FLAG_ALERT,
+} from '~/feature_flags/constants';
+import axios from '~/lib/utils/axios_utils';
+import { allUsersStrategy } from '../mock_data';
+
+const userCalloutId = 'feature_flags_new_version';
+const userCalloutsPath = `${TEST_HOST}/user_callouts`;
+
+describe('New feature flag form', () => {
+ let wrapper;
+
+ const store = new Vuex.Store({
+ modules: {
+ new: newModule,
+ },
+ });
+
+ const factory = (opts = {}) => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ wrapper = shallowMount(NewFeatureFlag, {
+ propsData: {
+ endpoint: `${TEST_HOST}/feature_flags.json`,
+ path: '/feature_flags',
+ environmentsEndpoint: 'environments.json',
+ projectId: '8',
+ showUserCallout: true,
+ userCalloutId,
+ userCalloutsPath,
+ },
+ store,
+ provide: {
+ glFeatures: {
+ featureFlagsNewVersion: true,
+ },
+ },
+ ...opts,
+ });
+ };
+
+ beforeEach(() => {
+ factory();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findAlert = () => wrapper.find(GlAlert);
+
+ describe('with error', () => {
+ it('should render the error', () => {
+ store.dispatch('new/receiveCreateFeatureFlagError', { message: ['The name is required'] });
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find('.alert').exists()).toEqual(true);
+ expect(wrapper.find('.alert').text()).toContain('The name is required');
+ });
+ });
+ });
+
+ it('renders form title', () => {
+ expect(wrapper.find('h3').text()).toEqual('New feature flag');
+ });
+
+ it('should render feature flag form', () => {
+ expect(wrapper.find(Form).exists()).toEqual(true);
+ });
+
+ it('does not render the related issues widget', () => {
+ expect(wrapper.find(Form).props('featureFlagIssuesEndpoint')).toBe('');
+ });
+
+ it('should render default * row', () => {
+ const defaultScope = {
+ id: expect.any(String),
+ environmentScope: '*',
+ active: true,
+ rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
+ rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
+ rolloutUserIds: '',
+ };
+ expect(wrapper.vm.scopes).toEqual([defaultScope]);
+
+ expect(wrapper.find(Form).props('scopes')).toContainEqual(defaultScope);
+ });
+
+ it('should not alert users that feature flags are changing soon', () => {
+ expect(wrapper.find(GlAlert).exists()).toBe(false);
+ });
+
+ it('should pass in the project ID', () => {
+ expect(wrapper.find(Form).props('projectId')).toBe('8');
+ });
+
+ it('has an all users strategy by default', () => {
+ const strategies = wrapper.find(Form).props('strategies');
+
+ expect(strategies).toEqual([allUsersStrategy]);
+ });
+
+ describe('without new version flags', () => {
+ beforeEach(() => factory({ provide: { glFeatures: { featureFlagsNewVersion: false } } }));
+
+ it('should alert users that feature flags are changing soon', () => {
+ expect(findAlert().text()).toBe(NEW_FLAG_ALERT);
+ });
+ });
+
+ describe('dismissing new version alert', () => {
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ mock.onPost(userCalloutsPath, { feature_name: userCalloutId }).reply(200);
+ factory({ provide: { glFeatures: { featureFlagsNewVersion: false } } });
+ findAlert().vm.$emit('dismiss');
+ return wrapper.vm.$nextTick();
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('should hide the alert', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('should send the dismissal event', () => {
+ expect(mock.history.post.length).toBe(1);
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/strategy_spec.js b/spec/frontend/feature_flags/components/strategy_spec.js
new file mode 100644
index 00000000000..8436f1cbe97
--- /dev/null
+++ b/spec/frontend/feature_flags/components/strategy_spec.js
@@ -0,0 +1,320 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlFormSelect, GlFormTextarea, GlFormInput, GlLink, GlToken, GlButton } from '@gitlab/ui';
+import {
+ PERCENT_ROLLOUT_GROUP_ID,
+ ROLLOUT_STRATEGY_ALL_USERS,
+ ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ ROLLOUT_STRATEGY_USER_ID,
+ ROLLOUT_STRATEGY_GITLAB_USER_LIST,
+} from '~/feature_flags/constants';
+import Strategy from '~/feature_flags/components/strategy.vue';
+import NewEnvironmentsDropdown from '~/feature_flags/components/new_environments_dropdown.vue';
+
+import { userList } from '../mock_data';
+
+const provide = {
+ strategyTypeDocsPagePath: 'link-to-strategy-docs',
+ environmentsScopeDocsPath: 'link-scope-docs',
+};
+
+describe('Feature flags strategy', () => {
+ let wrapper;
+
+ const findStrategy = () => wrapper.find('[data-testid="strategy"]');
+ const findDocsLinks = () => wrapper.findAll(GlLink);
+
+ const factory = (
+ opts = {
+ propsData: {
+ strategy: {},
+ index: 0,
+ endpoint: '',
+ userLists: [userList],
+ },
+ provide,
+ },
+ ) => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ wrapper = shallowMount(Strategy, opts);
+ };
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ });
+
+ describe('helper links', () => {
+ const propsData = { strategy: {}, index: 0, endpoint: '', userLists: [userList] };
+ factory({ propsData, provide });
+
+ it('should display 2 helper links', () => {
+ const links = findDocsLinks();
+ expect(links.exists()).toBe(true);
+ expect(links.at(0).attributes('href')).toContain('docs');
+ expect(links.at(1).attributes('href')).toContain('docs');
+ });
+ });
+
+ describe.each`
+ name | parameter | value | newValue | input
+ ${ROLLOUT_STRATEGY_ALL_USERS} | ${null} | ${null} | ${null} | ${null}
+ ${ROLLOUT_STRATEGY_PERCENT_ROLLOUT} | ${'percentage'} | ${'50'} | ${'20'} | ${GlFormInput}
+ ${ROLLOUT_STRATEGY_USER_ID} | ${'userIds'} | ${'1,2'} | ${'1,2,3'} | ${GlFormTextarea}
+ `('with strategy $name', ({ name, parameter, value, newValue, input }) => {
+ let propsData;
+ let strategy;
+ beforeEach(() => {
+ const parameters = {};
+ if (parameter !== null) {
+ parameters[parameter] = value;
+ }
+ strategy = { name, parameters };
+ propsData = { strategy, index: 0, endpoint: '' };
+ factory({ propsData, provide });
+ });
+
+ it('should set the select to match the strategy name', () => {
+ expect(wrapper.find(GlFormSelect).attributes('value')).toBe(name);
+ });
+
+ it('should not show inputs for other parameters', () => {
+ [GlFormTextarea, GlFormInput, GlFormSelect]
+ .filter(component => component !== input)
+ .map(component => findStrategy().findAll(component))
+ .forEach(inputWrapper => expect(inputWrapper).toHaveLength(0));
+ });
+
+ if (parameter !== null) {
+ it(`should show the input for ${parameter} with the correct value`, () => {
+ const inputWrapper = findStrategy().find(input);
+ expect(inputWrapper.exists()).toBe(true);
+ expect(inputWrapper.attributes('value')).toBe(value);
+ });
+
+ it(`should emit a change event when altering ${parameter}`, () => {
+ const inputWrapper = findStrategy().find(input);
+ inputWrapper.vm.$emit('input', newValue);
+ expect(wrapper.emitted('change')).toEqual([
+ [{ name, parameters: expect.objectContaining({ [parameter]: newValue }), scopes: [] }],
+ ]);
+ });
+ }
+ });
+
+ describe('with strategy gitlabUserList', () => {
+ let propsData;
+ let strategy;
+ beforeEach(() => {
+ strategy = { name: ROLLOUT_STRATEGY_GITLAB_USER_LIST, userListId: '2', parameters: {} };
+ propsData = { strategy, index: 0, endpoint: '', userLists: [userList] };
+ factory({ propsData, provide });
+ });
+
+ it('should set the select to match the strategy name', () => {
+ expect(wrapper.find(GlFormSelect).attributes('value')).toBe(
+ ROLLOUT_STRATEGY_GITLAB_USER_LIST,
+ );
+ });
+
+ it('should not show inputs for other parameters', () => {
+ expect(
+ findStrategy()
+ .find(GlFormTextarea)
+ .exists(),
+ ).toBe(false);
+ expect(
+ findStrategy()
+ .find(GlFormInput)
+ .exists(),
+ ).toBe(false);
+ });
+
+ it('should show the input for userListId with the correct value', () => {
+ const inputWrapper = findStrategy().find(GlFormSelect);
+ expect(inputWrapper.exists()).toBe(true);
+ expect(inputWrapper.attributes('value')).toBe('2');
+ });
+
+ it('should emit a change event when altering the userListId', () => {
+ const inputWrapper = findStrategy().find(GlFormSelect);
+ inputWrapper.vm.$emit('input', '3');
+ inputWrapper.vm.$emit('change', '3');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('change')).toEqual([
+ [
+ {
+ name: ROLLOUT_STRATEGY_GITLAB_USER_LIST,
+ userListId: '3',
+ scopes: [],
+ parameters: {},
+ },
+ ],
+ ]);
+ });
+ });
+ });
+
+ describe('with a strategy', () => {
+ describe('with a single environment scope defined', () => {
+ let strategy;
+
+ beforeEach(() => {
+ strategy = {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '50' },
+ scopes: [{ environmentScope: 'production' }],
+ };
+ const propsData = { strategy, index: 0, endpoint: '' };
+ factory({ propsData, provide });
+ });
+
+ it('should revert to all-environments scope when last scope is removed', () => {
+ const token = wrapper.find(GlToken);
+ token.vm.$emit('close');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.findAll(GlToken)).toHaveLength(0);
+ expect(wrapper.emitted('change')).toEqual([
+ [
+ {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '50', groupId: PERCENT_ROLLOUT_GROUP_ID },
+ scopes: [{ environmentScope: '*' }],
+ },
+ ],
+ ]);
+ });
+ });
+ });
+
+ describe('with an all-environments scope defined', () => {
+ let strategy;
+
+ beforeEach(() => {
+ strategy = {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '50' },
+ scopes: [{ environmentScope: '*' }],
+ };
+ const propsData = { strategy, index: 0, endpoint: '' };
+ factory({ propsData, provide });
+ });
+
+ it('should change the parameters if a different strategy is chosen', () => {
+ const select = wrapper.find(GlFormSelect);
+ select.vm.$emit('input', ROLLOUT_STRATEGY_ALL_USERS);
+ select.vm.$emit('change', ROLLOUT_STRATEGY_ALL_USERS);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(GlFormInput).exists()).toBe(false);
+ expect(wrapper.emitted('change')).toEqual([
+ [
+ {
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ scopes: [{ environmentScope: '*' }],
+ },
+ ],
+ ]);
+ });
+ });
+
+ it('should display selected scopes', () => {
+ const dropdown = wrapper.find(NewEnvironmentsDropdown);
+ dropdown.vm.$emit('add', 'production');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.findAll(GlToken)).toHaveLength(1);
+ expect(wrapper.find(GlToken).text()).toBe('production');
+ });
+ });
+
+ it('should display all selected scopes', () => {
+ const dropdown = wrapper.find(NewEnvironmentsDropdown);
+ dropdown.vm.$emit('add', 'production');
+ dropdown.vm.$emit('add', 'staging');
+ return wrapper.vm.$nextTick().then(() => {
+ const tokens = wrapper.findAll(GlToken);
+ expect(tokens).toHaveLength(2);
+ expect(tokens.at(0).text()).toBe('production');
+ expect(tokens.at(1).text()).toBe('staging');
+ });
+ });
+
+ it('should emit selected scopes', () => {
+ const dropdown = wrapper.find(NewEnvironmentsDropdown);
+ dropdown.vm.$emit('add', 'production');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('change')).toEqual([
+ [
+ {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '50', groupId: PERCENT_ROLLOUT_GROUP_ID },
+ scopes: [
+ { environmentScope: '*', shouldBeDestroyed: true },
+ { environmentScope: 'production' },
+ ],
+ },
+ ],
+ ]);
+ });
+ });
+
+ it('should emit a delete if the delete button is clicked', () => {
+ wrapper.find(GlButton).vm.$emit('click');
+ expect(wrapper.emitted('delete')).toEqual([[]]);
+ });
+ });
+
+ describe('without scopes defined', () => {
+ beforeEach(() => {
+ const strategy = {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '50' },
+ scopes: [],
+ };
+ const propsData = { strategy, index: 0, endpoint: '' };
+ factory({ propsData, provide });
+ });
+
+ it('should display selected scopes', () => {
+ const dropdown = wrapper.find(NewEnvironmentsDropdown);
+ dropdown.vm.$emit('add', 'production');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.findAll(GlToken)).toHaveLength(1);
+ expect(wrapper.find(GlToken).text()).toBe('production');
+ });
+ });
+
+ it('should display all selected scopes', () => {
+ const dropdown = wrapper.find(NewEnvironmentsDropdown);
+ dropdown.vm.$emit('add', 'production');
+ dropdown.vm.$emit('add', 'staging');
+ return wrapper.vm.$nextTick().then(() => {
+ const tokens = wrapper.findAll(GlToken);
+ expect(tokens).toHaveLength(2);
+ expect(tokens.at(0).text()).toBe('production');
+ expect(tokens.at(1).text()).toBe('staging');
+ });
+ });
+
+ it('should emit selected scopes', () => {
+ const dropdown = wrapper.find(NewEnvironmentsDropdown);
+ dropdown.vm.$emit('add', 'production');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('change')).toEqual([
+ [
+ {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: { percentage: '50', groupId: PERCENT_ROLLOUT_GROUP_ID },
+ scopes: [{ environmentScope: 'production' }],
+ },
+ ],
+ ]);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/components/user_lists_table_spec.js b/spec/frontend/feature_flags/components/user_lists_table_spec.js
new file mode 100644
index 00000000000..d6ced3be168
--- /dev/null
+++ b/spec/frontend/feature_flags/components/user_lists_table_spec.js
@@ -0,0 +1,98 @@
+import { mount } from '@vue/test-utils';
+import * as timeago from 'timeago.js';
+import { GlModal } from '@gitlab/ui';
+import UserListsTable from '~/feature_flags/components/user_lists_table.vue';
+import { userList } from '../mock_data';
+
+jest.mock('timeago.js', () => ({
+ format: jest.fn().mockReturnValue('2 weeks ago'),
+ register: jest.fn(),
+}));
+
+describe('User Lists Table', () => {
+ let wrapper;
+ let userLists;
+
+ beforeEach(() => {
+ userLists = new Array(5).fill(userList).map((x, i) => ({ ...x, id: i }));
+ wrapper = mount(UserListsTable, {
+ propsData: { userLists },
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should display the details of a user list', () => {
+ expect(wrapper.find('[data-testid="ffUserListName"]').text()).toBe(userList.name);
+ expect(wrapper.find('[data-testid="ffUserListIds"]').text()).toBe(
+ userList.user_xids.replace(/,/g, ', '),
+ );
+ expect(wrapper.find('[data-testid="ffUserListTimestamp"]').text()).toBe('created 2 weeks ago');
+ expect(timeago.format).toHaveBeenCalledWith(userList.created_at);
+ });
+
+ it('should set the title for a tooltip on the created stamp', () => {
+ expect(wrapper.find('[data-testid="ffUserListTimestamp"]').attributes('title')).toBe(
+ 'Feb 4, 2020 8:13am GMT+0000',
+ );
+ });
+
+ it('should display a user list entry per user list', () => {
+ const lists = wrapper.findAll('[data-testid="ffUserList"]');
+ expect(lists).toHaveLength(5);
+ lists.wrappers.forEach(list => {
+ expect(list.find('[data-testid="ffUserListName"]').exists()).toBe(true);
+ expect(list.find('[data-testid="ffUserListIds"]').exists()).toBe(true);
+ expect(list.find('[data-testid="ffUserListTimestamp"]').exists()).toBe(true);
+ });
+ });
+
+ describe('edit button', () => {
+ it('should link to the path for the user list', () => {
+ expect(wrapper.find('[data-testid="edit-user-list"]').attributes('href')).toBe(userList.path);
+ });
+ });
+
+ describe('delete button', () => {
+ it('should display the confirmation modal', () => {
+ const modal = wrapper.find(GlModal);
+
+ wrapper.find('[data-testid="delete-user-list"]').trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(modal.text()).toContain(`Delete ${userList.name}?`);
+ expect(modal.text()).toContain(`User list ${userList.name} will be removed.`);
+ });
+ });
+ });
+
+ describe('confirmation modal', () => {
+ let modal;
+
+ beforeEach(() => {
+ modal = wrapper.find(GlModal);
+
+ wrapper.find('button').trigger('click');
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('should emit delete with list on confirmation', () => {
+ modal.find('[data-testid="modal-confirm"]').trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('delete')).toEqual([[userLists[0]]]);
+ });
+ });
+
+ it('should not emit delete with list when not confirmed', () => {
+ modal.find('button').trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('delete')).toBeUndefined();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/mock_data.js b/spec/frontend/feature_flags/mock_data.js
new file mode 100644
index 00000000000..47e4957f208
--- /dev/null
+++ b/spec/frontend/feature_flags/mock_data.js
@@ -0,0 +1,109 @@
+import {
+ ROLLOUT_STRATEGY_ALL_USERS,
+ ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+} from '~/feature_flags/constants';
+
+export const featureFlag = {
+ id: 1,
+ active: true,
+ created_at: '2018-12-12T22:07:31.401Z',
+ updated_at: '2018-12-12T22:07:31.401Z',
+ name: 'test flag',
+ description: 'flag for tests',
+ destroy_path: 'feature_flags/1',
+ update_path: 'feature_flags/1',
+ edit_path: 'feature_flags/1/edit',
+ scopes: [
+ {
+ id: 1,
+ active: true,
+ environment_scope: '*',
+ can_update: true,
+ protected: false,
+ created_at: '2019-01-14T06:41:40.987Z',
+ updated_at: '2019-01-14T06:41:40.987Z',
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ },
+ ],
+ },
+ {
+ id: 2,
+ active: false,
+ environment_scope: 'production',
+ can_update: true,
+ protected: false,
+ created_at: '2019-01-14T06:41:40.987Z',
+ updated_at: '2019-01-14T06:41:40.987Z',
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ },
+ ],
+ },
+ {
+ id: 3,
+ active: false,
+ environment_scope: 'review/*',
+ can_update: true,
+ protected: false,
+ created_at: '2019-01-14T06:41:40.987Z',
+ updated_at: '2019-01-14T06:41:40.987Z',
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ },
+ ],
+ },
+ {
+ id: 4,
+ active: true,
+ environment_scope: 'development',
+ can_update: true,
+ protected: false,
+ created_at: '2019-01-14T06:41:40.987Z',
+ updated_at: '2019-01-14T06:41:40.987Z',
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: {
+ percentage: '86',
+ },
+ },
+ ],
+ },
+ ],
+};
+
+export const getRequestData = {
+ feature_flags: [featureFlag],
+ count: {
+ all: 1,
+ disabled: 1,
+ enabled: 0,
+ },
+};
+
+export const rotateData = { token: 'oP6sCNRqtRHmpy1gw2-F' };
+
+export const userList = {
+ name: 'test_users',
+ user_xids: 'user3,user4,user5',
+ id: 2,
+ iid: 2,
+ project_id: 1,
+ created_at: '2020-02-04T08:13:10.507Z',
+ updated_at: '2020-02-04T08:13:10.507Z',
+ path: '/path/to/user/list',
+ edit_path: '/path/to/user/list/edit',
+};
+
+export const allUsersStrategy = {
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ scopes: [],
+};
diff --git a/spec/frontend/feature_flags/store/edit/actions_spec.js b/spec/frontend/feature_flags/store/edit/actions_spec.js
new file mode 100644
index 00000000000..4f20b9713bf
--- /dev/null
+++ b/spec/frontend/feature_flags/store/edit/actions_spec.js
@@ -0,0 +1,334 @@
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import { TEST_HOST } from 'spec/test_constants';
+import {
+ setEndpoint,
+ setPath,
+ updateFeatureFlag,
+ requestUpdateFeatureFlag,
+ receiveUpdateFeatureFlagSuccess,
+ receiveUpdateFeatureFlagError,
+ fetchFeatureFlag,
+ requestFeatureFlag,
+ receiveFeatureFlagSuccess,
+ receiveFeatureFlagError,
+ toggleActive,
+} from '~/feature_flags/store/modules/edit/actions';
+import state from '~/feature_flags/store/modules/edit/state';
+import {
+ mapStrategiesToRails,
+ mapFromScopesViewModel,
+} from '~/feature_flags/store/modules/helpers';
+import {
+ NEW_VERSION_FLAG,
+ LEGACY_FLAG,
+ ROLLOUT_STRATEGY_ALL_USERS,
+} from '~/feature_flags/constants';
+import * as types from '~/feature_flags/store/modules/edit/mutation_types';
+import axios from '~/lib/utils/axios_utils';
+
+jest.mock('~/lib/utils/url_utility');
+
+describe('Feature flags Edit Module actions', () => {
+ let mockedState;
+
+ beforeEach(() => {
+ mockedState = state();
+ });
+
+ describe('setEndpoint', () => {
+ it('should commit SET_ENDPOINT mutation', done => {
+ testAction(
+ setEndpoint,
+ 'feature_flags.json',
+ mockedState,
+ [{ type: types.SET_ENDPOINT, payload: 'feature_flags.json' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setPath', () => {
+ it('should commit SET_PATH mutation', done => {
+ testAction(
+ setPath,
+ '/feature_flags',
+ mockedState,
+ [{ type: types.SET_PATH, payload: '/feature_flags' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('updateFeatureFlag', () => {
+ let mock;
+
+ beforeEach(() => {
+ mockedState.endpoint = `${TEST_HOST}/endpoint.json`;
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('success', () => {
+ it('dispatches requestUpdateFeatureFlag and receiveUpdateFeatureFlagSuccess ', done => {
+ const featureFlag = {
+ name: 'feature_flag',
+ description: 'feature flag',
+ scopes: [
+ {
+ id: '1',
+ environmentScope: '*',
+ active: true,
+ shouldBeDestroyed: false,
+ canUpdate: true,
+ protected: false,
+ rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
+ },
+ ],
+ version: LEGACY_FLAG,
+ active: true,
+ };
+ mock.onPut(mockedState.endpoint, mapFromScopesViewModel(featureFlag)).replyOnce(200);
+
+ testAction(
+ updateFeatureFlag,
+ featureFlag,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestUpdateFeatureFlag',
+ },
+ {
+ type: 'receiveUpdateFeatureFlagSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ it('handles new version flags as well', done => {
+ const featureFlag = {
+ name: 'name',
+ description: 'description',
+ active: true,
+ version: NEW_VERSION_FLAG,
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ id: 1,
+ scopes: [{ id: 1, environmentScope: 'environmentScope', shouldBeDestroyed: false }],
+ shouldBeDestroyed: false,
+ },
+ ],
+ };
+ mock.onPut(mockedState.endpoint, mapStrategiesToRails(featureFlag)).replyOnce(200);
+
+ testAction(
+ updateFeatureFlag,
+ featureFlag,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestUpdateFeatureFlag',
+ },
+ {
+ type: 'receiveUpdateFeatureFlagSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ it('dispatches requestUpdateFeatureFlag and receiveUpdateFeatureFlagError ', done => {
+ mock.onPut(`${TEST_HOST}/endpoint.json`).replyOnce(500, { message: [] });
+
+ testAction(
+ updateFeatureFlag,
+ {
+ name: 'feature_flag',
+ description: 'feature flag',
+ scopes: [{ environment_scope: '*', active: true }],
+ },
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestUpdateFeatureFlag',
+ },
+ {
+ type: 'receiveUpdateFeatureFlagError',
+ payload: { message: [] },
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('requestUpdateFeatureFlag', () => {
+ it('should commit REQUEST_UPDATE_FEATURE_FLAG mutation', done => {
+ testAction(
+ requestUpdateFeatureFlag,
+ null,
+ mockedState,
+ [{ type: types.REQUEST_UPDATE_FEATURE_FLAG }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveUpdateFeatureFlagSuccess', () => {
+ it('should commit RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS mutation', done => {
+ testAction(
+ receiveUpdateFeatureFlagSuccess,
+ null,
+ mockedState,
+ [
+ {
+ type: types.RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveUpdateFeatureFlagError', () => {
+ it('should commit RECEIVE_UPDATE_FEATURE_FLAG_ERROR mutation', done => {
+ testAction(
+ receiveUpdateFeatureFlagError,
+ 'There was an error',
+ mockedState,
+ [{ type: types.RECEIVE_UPDATE_FEATURE_FLAG_ERROR, payload: 'There was an error' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('fetchFeatureFlag', () => {
+ let mock;
+
+ beforeEach(() => {
+ mockedState.endpoint = `${TEST_HOST}/endpoint.json`;
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('success', () => {
+ it('dispatches requestFeatureFlag and receiveFeatureFlagSuccess ', done => {
+ mock.onGet(`${TEST_HOST}/endpoint.json`).replyOnce(200, { id: 1 });
+
+ testAction(
+ fetchFeatureFlag,
+ { id: 1 },
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestFeatureFlag',
+ },
+ {
+ type: 'receiveFeatureFlagSuccess',
+ payload: { id: 1 },
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ it('dispatches requestFeatureFlag and receiveUpdateFeatureFlagError ', done => {
+ mock.onGet(`${TEST_HOST}/endpoint.json`, {}).replyOnce(500, {});
+
+ testAction(
+ fetchFeatureFlag,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestFeatureFlag',
+ },
+ {
+ type: 'receiveFeatureFlagError',
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('requestFeatureFlag', () => {
+ it('should commit REQUEST_FEATURE_FLAG mutation', done => {
+ testAction(
+ requestFeatureFlag,
+ null,
+ mockedState,
+ [{ type: types.REQUEST_FEATURE_FLAG }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveFeatureFlagSuccess', () => {
+ it('should commit RECEIVE_FEATURE_FLAG_SUCCESS mutation', done => {
+ testAction(
+ receiveFeatureFlagSuccess,
+ { id: 1 },
+ mockedState,
+ [{ type: types.RECEIVE_FEATURE_FLAG_SUCCESS, payload: { id: 1 } }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveFeatureFlagError', () => {
+ it('should commit RECEIVE_FEATURE_FLAG_ERROR mutation', done => {
+ testAction(
+ receiveFeatureFlagError,
+ null,
+ mockedState,
+ [
+ {
+ type: types.RECEIVE_FEATURE_FLAG_ERROR,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('toggelActive', () => {
+ it('should commit TOGGLE_ACTIVE mutation', done => {
+ testAction(
+ toggleActive,
+ true,
+ mockedState,
+ [{ type: types.TOGGLE_ACTIVE, payload: true }],
+ [],
+ done,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/store/edit/mutations_spec.js b/spec/frontend/feature_flags/store/edit/mutations_spec.js
new file mode 100644
index 00000000000..21d4e962b48
--- /dev/null
+++ b/spec/frontend/feature_flags/store/edit/mutations_spec.js
@@ -0,0 +1,150 @@
+import state from '~/feature_flags/store/modules/edit/state';
+import mutations from '~/feature_flags/store/modules/edit/mutations';
+import * as types from '~/feature_flags/store/modules/edit/mutation_types';
+
+describe('Feature flags Edit Module Mutations', () => {
+ let stateCopy;
+
+ beforeEach(() => {
+ stateCopy = state();
+ });
+
+ describe('SET_ENDPOINT', () => {
+ it('should set endpoint', () => {
+ mutations[types.SET_ENDPOINT](stateCopy, 'feature_flags.json');
+
+ expect(stateCopy.endpoint).toEqual('feature_flags.json');
+ });
+ });
+
+ describe('SET_PATH', () => {
+ it('should set provided options', () => {
+ mutations[types.SET_PATH](stateCopy, 'feature_flags');
+
+ expect(stateCopy.path).toEqual('feature_flags');
+ });
+ });
+
+ describe('REQUEST_FEATURE_FLAG', () => {
+ it('should set isLoading to true', () => {
+ mutations[types.REQUEST_FEATURE_FLAG](stateCopy);
+
+ expect(stateCopy.isLoading).toEqual(true);
+ });
+
+ it('should set error to an empty array', () => {
+ mutations[types.REQUEST_FEATURE_FLAG](stateCopy);
+
+ expect(stateCopy.error).toEqual([]);
+ });
+ });
+
+ describe('RECEIVE_FEATURE_FLAG_SUCCESS', () => {
+ const data = {
+ name: '*',
+ description: 'All environments',
+ scopes: [{ id: 1 }],
+ iid: 5,
+ version: 'new_version_flag',
+ strategies: [
+ { id: 1, scopes: [{ environment_scope: '*' }], name: 'default', parameters: {} },
+ ],
+ };
+
+ beforeEach(() => {
+ mutations[types.RECEIVE_FEATURE_FLAG_SUCCESS](stateCopy, data);
+ });
+
+ it('should set isLoading to false', () => {
+ expect(stateCopy.isLoading).toEqual(false);
+ });
+
+ it('should set hasError to false', () => {
+ expect(stateCopy.hasError).toEqual(false);
+ });
+
+ it('should set name with the provided one', () => {
+ expect(stateCopy.name).toEqual(data.name);
+ });
+
+ it('should set description with the provided one', () => {
+ expect(stateCopy.description).toEqual(data.description);
+ });
+
+ it('should set scope with the provided one', () => {
+ expect(stateCopy.scope).toEqual(data.scope);
+ });
+
+ it('should set the iid to the provided one', () => {
+ expect(stateCopy.iid).toEqual(data.iid);
+ });
+
+ it('should set the version to the provided one', () => {
+ expect(stateCopy.version).toBe('new_version_flag');
+ });
+
+ it('should set the strategies to the provided one', () => {
+ expect(stateCopy.strategies).toEqual([
+ {
+ id: 1,
+ scopes: [{ environmentScope: '*', shouldBeDestroyed: false }],
+ name: 'default',
+ parameters: {},
+ shouldBeDestroyed: false,
+ },
+ ]);
+ });
+ });
+
+ describe('RECEIVE_FEATURE_FLAG_ERROR', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_FEATURE_FLAG_ERROR](stateCopy);
+ });
+
+ it('should set isLoading to false', () => {
+ expect(stateCopy.isLoading).toEqual(false);
+ });
+
+ it('should set hasError to true', () => {
+ expect(stateCopy.hasError).toEqual(true);
+ });
+ });
+
+ describe('REQUEST_UPDATE_FEATURE_FLAG', () => {
+ beforeEach(() => {
+ mutations[types.REQUEST_UPDATE_FEATURE_FLAG](stateCopy);
+ });
+
+ it('should set isSendingRequest to true', () => {
+ expect(stateCopy.isSendingRequest).toEqual(true);
+ });
+
+ it('should set error to an empty array', () => {
+ expect(stateCopy.error).toEqual([]);
+ });
+ });
+
+ describe('RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS', () => {
+ it('should set isSendingRequest to false', () => {
+ mutations[types.RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS](stateCopy);
+
+ expect(stateCopy.isSendingRequest).toEqual(false);
+ });
+ });
+
+ describe('RECEIVE_UPDATE_FEATURE_FLAG_ERROR', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_UPDATE_FEATURE_FLAG_ERROR](stateCopy, {
+ message: ['Name is required'],
+ });
+ });
+
+ it('should set isSendingRequest to false', () => {
+ expect(stateCopy.isSendingRequest).toEqual(false);
+ });
+
+ it('should set error to the given message', () => {
+ expect(stateCopy.error).toEqual(['Name is required']);
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/store/helpers_spec.js b/spec/frontend/feature_flags/store/helpers_spec.js
new file mode 100644
index 00000000000..0bc15ab70aa
--- /dev/null
+++ b/spec/frontend/feature_flags/store/helpers_spec.js
@@ -0,0 +1,514 @@
+import { uniqueId } from 'lodash';
+import {
+ mapToScopesViewModel,
+ mapFromScopesViewModel,
+ createNewEnvironmentScope,
+ mapStrategiesToViewModel,
+ mapStrategiesToRails,
+} from '~/feature_flags/store/modules/helpers';
+import {
+ ROLLOUT_STRATEGY_ALL_USERS,
+ ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ ROLLOUT_STRATEGY_USER_ID,
+ PERCENT_ROLLOUT_GROUP_ID,
+ INTERNAL_ID_PREFIX,
+ DEFAULT_PERCENT_ROLLOUT,
+ LEGACY_FLAG,
+ NEW_VERSION_FLAG,
+} from '~/feature_flags/constants';
+
+describe('feature flags helpers spec', () => {
+ describe('mapToScopesViewModel', () => {
+ it('converts the data object from the Rails API into something more usable by Vue', () => {
+ const input = [
+ {
+ id: 3,
+ environment_scope: 'environment_scope',
+ active: true,
+ can_update: true,
+ protected: true,
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: {
+ percentage: '56',
+ },
+ },
+ {
+ name: ROLLOUT_STRATEGY_USER_ID,
+ parameters: {
+ userIds: '123,234',
+ },
+ },
+ ],
+
+ _destroy: true,
+ },
+ ];
+
+ const expected = [
+ expect.objectContaining({
+ id: 3,
+ environmentScope: 'environment_scope',
+ active: true,
+ canUpdate: true,
+ protected: true,
+ rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ rolloutPercentage: '56',
+ rolloutUserIds: '123, 234',
+ shouldBeDestroyed: true,
+ }),
+ ];
+
+ const actual = mapToScopesViewModel(input);
+
+ expect(actual).toEqual(expected);
+ });
+
+ it('returns Boolean properties even when their Rails counterparts were not provided (are `undefined`)', () => {
+ const input = [
+ {
+ id: 3,
+ environment_scope: 'environment_scope',
+ },
+ ];
+
+ const [result] = mapToScopesViewModel(input);
+
+ expect(result).toEqual(
+ expect.objectContaining({
+ active: false,
+ canUpdate: false,
+ protected: false,
+ shouldBeDestroyed: false,
+ }),
+ );
+ });
+
+ it('returns an empty array if null or undefined is provided as a parameter', () => {
+ expect(mapToScopesViewModel(null)).toEqual([]);
+ expect(mapToScopesViewModel(undefined)).toEqual([]);
+ });
+
+ describe('with user IDs per environment', () => {
+ let oldGon;
+
+ beforeEach(() => {
+ oldGon = window.gon;
+ window.gon = { features: { featureFlagsUsersPerEnvironment: true } };
+ });
+
+ afterEach(() => {
+ window.gon = oldGon;
+ });
+
+ it('sets the user IDs as a comma separated string', () => {
+ const input = [
+ {
+ id: 3,
+ environment_scope: 'environment_scope',
+ active: true,
+ can_update: true,
+ protected: true,
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: {
+ percentage: '56',
+ },
+ },
+ {
+ name: ROLLOUT_STRATEGY_USER_ID,
+ parameters: {
+ userIds: '123,234',
+ },
+ },
+ ],
+
+ _destroy: true,
+ },
+ ];
+
+ const expected = [
+ {
+ id: 3,
+ environmentScope: 'environment_scope',
+ active: true,
+ canUpdate: true,
+ protected: true,
+ rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ rolloutPercentage: '56',
+ rolloutUserIds: '123, 234',
+ shouldBeDestroyed: true,
+ shouldIncludeUserIds: true,
+ },
+ ];
+
+ const actual = mapToScopesViewModel(input);
+
+ expect(actual).toEqual(expected);
+ });
+ });
+ });
+
+ describe('mapFromScopesViewModel', () => {
+ it('converts the object emitted from the Vue component into an object than is in the right format to be submitted to the Rails API', () => {
+ const input = {
+ name: 'name',
+ description: 'description',
+ active: true,
+ scopes: [
+ {
+ id: 4,
+ environmentScope: 'environmentScope',
+ active: true,
+ canUpdate: true,
+ protected: true,
+ shouldBeDestroyed: true,
+ shouldIncludeUserIds: true,
+ rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ rolloutPercentage: '48',
+ rolloutUserIds: '123, 234',
+ },
+ ],
+ };
+
+ const expected = {
+ operations_feature_flag: {
+ name: 'name',
+ description: 'description',
+ active: true,
+ version: LEGACY_FLAG,
+ scopes_attributes: [
+ {
+ id: 4,
+ environment_scope: 'environmentScope',
+ active: true,
+ can_update: true,
+ protected: true,
+ _destroy: true,
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: {
+ groupId: PERCENT_ROLLOUT_GROUP_ID,
+ percentage: '48',
+ },
+ },
+ {
+ name: ROLLOUT_STRATEGY_USER_ID,
+ parameters: {
+ userIds: '123,234',
+ },
+ },
+ ],
+ },
+ ],
+ },
+ };
+
+ const actual = mapFromScopesViewModel(input);
+
+ expect(actual).toEqual(expected);
+ });
+
+ it('should strip out internal IDs', () => {
+ const input = {
+ scopes: [{ id: 3 }, { id: uniqueId(INTERNAL_ID_PREFIX) }],
+ };
+
+ const result = mapFromScopesViewModel(input);
+ const [realId, internalId] = result.operations_feature_flag.scopes_attributes;
+
+ expect(realId.id).toBe(3);
+ expect(internalId.id).toBeUndefined();
+ });
+
+ it('returns scopes_attributes as [] if param.scopes is null or undefined', () => {
+ let {
+ operations_feature_flag: { scopes_attributes: actualScopes },
+ } = mapFromScopesViewModel({ scopes: null });
+
+ expect(actualScopes).toEqual([]);
+
+ ({
+ operations_feature_flag: { scopes_attributes: actualScopes },
+ } = mapFromScopesViewModel({ scopes: undefined }));
+
+ expect(actualScopes).toEqual([]);
+ });
+ describe('with user IDs per environment', () => {
+ it('sets the user IDs as a comma separated string', () => {
+ const input = {
+ name: 'name',
+ description: 'description',
+ active: true,
+ scopes: [
+ {
+ id: 4,
+ environmentScope: 'environmentScope',
+ active: true,
+ canUpdate: true,
+ protected: true,
+ shouldBeDestroyed: true,
+ rolloutStrategy: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ rolloutPercentage: '48',
+ rolloutUserIds: '123, 234',
+ shouldIncludeUserIds: true,
+ },
+ ],
+ };
+
+ const expected = {
+ operations_feature_flag: {
+ name: 'name',
+ description: 'description',
+ version: LEGACY_FLAG,
+ active: true,
+ scopes_attributes: [
+ {
+ id: 4,
+ environment_scope: 'environmentScope',
+ active: true,
+ can_update: true,
+ protected: true,
+ _destroy: true,
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ parameters: {
+ groupId: PERCENT_ROLLOUT_GROUP_ID,
+ percentage: '48',
+ },
+ },
+ {
+ name: ROLLOUT_STRATEGY_USER_ID,
+ parameters: {
+ userIds: '123,234',
+ },
+ },
+ ],
+ },
+ ],
+ },
+ };
+
+ const actual = mapFromScopesViewModel(input);
+
+ expect(actual).toEqual(expected);
+ });
+ });
+ });
+
+ describe('createNewEnvironmentScope', () => {
+ it('should return a new environment scope object populated with the default options', () => {
+ const expected = {
+ environmentScope: '',
+ active: false,
+ id: expect.stringContaining(INTERNAL_ID_PREFIX),
+ rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
+ rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
+ rolloutUserIds: '',
+ };
+
+ const actual = createNewEnvironmentScope();
+
+ expect(actual).toEqual(expected);
+ });
+
+ it('should return a new environment scope object with overrides applied', () => {
+ const overrides = {
+ environmentScope: 'environmentScope',
+ active: true,
+ };
+
+ const expected = {
+ environmentScope: 'environmentScope',
+ active: true,
+ id: expect.stringContaining(INTERNAL_ID_PREFIX),
+ rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
+ rolloutPercentage: DEFAULT_PERCENT_ROLLOUT,
+ rolloutUserIds: '',
+ };
+
+ const actual = createNewEnvironmentScope(overrides);
+
+ expect(actual).toEqual(expected);
+ });
+
+ it('sets canUpdate and protected when called with featureFlagPermissions=true', () => {
+ expect(createNewEnvironmentScope({}, true)).toEqual(
+ expect.objectContaining({
+ canUpdate: true,
+ protected: false,
+ }),
+ );
+ });
+ });
+
+ describe('mapStrategiesToViewModel', () => {
+ it('should map rails casing to view model casing', () => {
+ expect(
+ mapStrategiesToViewModel([
+ {
+ id: '1',
+ name: 'default',
+ parameters: {},
+ scopes: [
+ {
+ environment_scope: '*',
+ id: '1',
+ },
+ ],
+ },
+ ]),
+ ).toEqual([
+ {
+ id: '1',
+ name: 'default',
+ parameters: {},
+ shouldBeDestroyed: false,
+ scopes: [
+ {
+ shouldBeDestroyed: false,
+ environmentScope: '*',
+ id: '1',
+ },
+ ],
+ },
+ ]);
+ });
+
+ it('inserts spaces between user ids', () => {
+ const strategy = mapStrategiesToViewModel([
+ {
+ id: '1',
+ name: 'userWithId',
+ parameters: { userIds: 'user1,user2,user3' },
+ scopes: [],
+ },
+ ])[0];
+
+ expect(strategy.parameters).toEqual({ userIds: 'user1, user2, user3' });
+ });
+ });
+
+ describe('mapStrategiesToRails', () => {
+ it('should map rails casing to view model casing', () => {
+ expect(
+ mapStrategiesToRails({
+ name: 'test',
+ description: 'test description',
+ version: NEW_VERSION_FLAG,
+ active: true,
+ strategies: [
+ {
+ id: '1',
+ name: 'default',
+ parameters: {},
+ shouldBeDestroyed: true,
+ scopes: [
+ {
+ environmentScope: '*',
+ id: '1',
+ shouldBeDestroyed: true,
+ },
+ ],
+ },
+ ],
+ }),
+ ).toEqual({
+ operations_feature_flag: {
+ name: 'test',
+ description: 'test description',
+ version: NEW_VERSION_FLAG,
+ active: true,
+ strategies_attributes: [
+ {
+ id: '1',
+ name: 'default',
+ parameters: {},
+ _destroy: true,
+ scopes_attributes: [
+ {
+ environment_scope: '*',
+ id: '1',
+ _destroy: true,
+ },
+ ],
+ },
+ ],
+ },
+ });
+ });
+
+ it('should insert a default * scope if there are none', () => {
+ expect(
+ mapStrategiesToRails({
+ name: 'test',
+ description: 'test description',
+ version: NEW_VERSION_FLAG,
+ active: true,
+ strategies: [
+ {
+ id: '1',
+ name: 'default',
+ parameters: {},
+ scopes: [],
+ },
+ ],
+ }),
+ ).toEqual({
+ operations_feature_flag: {
+ name: 'test',
+ description: 'test description',
+ version: NEW_VERSION_FLAG,
+ active: true,
+ strategies_attributes: [
+ {
+ id: '1',
+ name: 'default',
+ parameters: {},
+ scopes_attributes: [
+ {
+ environment_scope: '*',
+ },
+ ],
+ },
+ ],
+ },
+ });
+ });
+
+ it('removes white space between user ids', () => {
+ const result = mapStrategiesToRails({
+ name: 'test',
+ version: NEW_VERSION_FLAG,
+ active: true,
+ strategies: [
+ {
+ id: '1',
+ name: 'userWithId',
+ parameters: { userIds: 'user1, user2, user3' },
+ scopes: [],
+ },
+ ],
+ });
+
+ const strategyAttrs = result.operations_feature_flag.strategies_attributes[0];
+
+ expect(strategyAttrs.parameters).toEqual({ userIds: 'user1,user2,user3' });
+ });
+
+ it('preserves the value of active', () => {
+ const result = mapStrategiesToRails({
+ name: 'test',
+ version: NEW_VERSION_FLAG,
+ active: false,
+ strategies: [],
+ });
+
+ expect(result.operations_feature_flag.active).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/store/index/actions_spec.js b/spec/frontend/feature_flags/store/index/actions_spec.js
new file mode 100644
index 00000000000..0ada84aed33
--- /dev/null
+++ b/spec/frontend/feature_flags/store/index/actions_spec.js
@@ -0,0 +1,605 @@
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import { TEST_HOST } from 'spec/test_constants';
+import Api from '~/api';
+import {
+ requestFeatureFlags,
+ receiveFeatureFlagsSuccess,
+ receiveFeatureFlagsError,
+ fetchFeatureFlags,
+ setFeatureFlagsEndpoint,
+ setFeatureFlagsOptions,
+ setInstanceIdEndpoint,
+ setInstanceId,
+ rotateInstanceId,
+ requestRotateInstanceId,
+ receiveRotateInstanceIdSuccess,
+ receiveRotateInstanceIdError,
+ toggleFeatureFlag,
+ updateFeatureFlag,
+ receiveUpdateFeatureFlagSuccess,
+ receiveUpdateFeatureFlagError,
+ requestUserLists,
+ receiveUserListsSuccess,
+ receiveUserListsError,
+ fetchUserLists,
+ deleteUserList,
+ receiveDeleteUserListError,
+ clearAlert,
+} from '~/feature_flags/store/modules/index/actions';
+import { mapToScopesViewModel } from '~/feature_flags/store/modules/helpers';
+import state from '~/feature_flags/store/modules/index/state';
+import * as types from '~/feature_flags/store/modules/index/mutation_types';
+import axios from '~/lib/utils/axios_utils';
+import { getRequestData, rotateData, featureFlag, userList } from '../../mock_data';
+
+jest.mock('~/api.js');
+
+describe('Feature flags actions', () => {
+ let mockedState;
+
+ beforeEach(() => {
+ mockedState = state();
+ });
+
+ describe('setFeatureFlagsEndpoint', () => {
+ it('should commit SET_FEATURE_FLAGS_ENDPOINT mutation', done => {
+ testAction(
+ setFeatureFlagsEndpoint,
+ 'feature_flags.json',
+ mockedState,
+ [{ type: types.SET_FEATURE_FLAGS_ENDPOINT, payload: 'feature_flags.json' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setFeatureFlagsOptions', () => {
+ it('should commit SET_FEATURE_FLAGS_OPTIONS mutation', done => {
+ testAction(
+ setFeatureFlagsOptions,
+ { page: '1', scope: 'all' },
+ mockedState,
+ [{ type: types.SET_FEATURE_FLAGS_OPTIONS, payload: { page: '1', scope: 'all' } }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setInstanceIdEndpoint', () => {
+ it('should commit SET_INSTANCE_ID_ENDPOINT mutation', done => {
+ testAction(
+ setInstanceIdEndpoint,
+ 'instance_id.json',
+ mockedState,
+ [{ type: types.SET_INSTANCE_ID_ENDPOINT, payload: 'instance_id.json' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setInstanceId', () => {
+ it('should commit SET_INSTANCE_ID mutation', done => {
+ testAction(
+ setInstanceId,
+ 'test_instance_id',
+ mockedState,
+ [{ type: types.SET_INSTANCE_ID, payload: 'test_instance_id' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('fetchFeatureFlags', () => {
+ let mock;
+
+ beforeEach(() => {
+ mockedState.endpoint = `${TEST_HOST}/endpoint.json`;
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('success', () => {
+ it('dispatches requestFeatureFlags and receiveFeatureFlagsSuccess ', done => {
+ mock.onGet(`${TEST_HOST}/endpoint.json`).replyOnce(200, getRequestData, {});
+
+ testAction(
+ fetchFeatureFlags,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestFeatureFlags',
+ },
+ {
+ payload: { data: getRequestData, headers: {} },
+ type: 'receiveFeatureFlagsSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ it('dispatches requestFeatureFlags and receiveFeatureFlagsError ', done => {
+ mock.onGet(`${TEST_HOST}/endpoint.json`, {}).replyOnce(500, {});
+
+ testAction(
+ fetchFeatureFlags,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestFeatureFlags',
+ },
+ {
+ type: 'receiveFeatureFlagsError',
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('requestFeatureFlags', () => {
+ it('should commit RECEIVE_FEATURE_FLAGS_SUCCESS mutation', done => {
+ testAction(
+ requestFeatureFlags,
+ null,
+ mockedState,
+ [{ type: types.REQUEST_FEATURE_FLAGS }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveFeatureFlagsSuccess', () => {
+ it('should commit RECEIVE_FEATURE_FLAGS_SUCCESS mutation', done => {
+ testAction(
+ receiveFeatureFlagsSuccess,
+ { data: getRequestData, headers: {} },
+ mockedState,
+ [
+ {
+ type: types.RECEIVE_FEATURE_FLAGS_SUCCESS,
+ payload: { data: getRequestData, headers: {} },
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveFeatureFlagsError', () => {
+ it('should commit RECEIVE_FEATURE_FLAGS_ERROR mutation', done => {
+ testAction(
+ receiveFeatureFlagsError,
+ null,
+ mockedState,
+ [{ type: types.RECEIVE_FEATURE_FLAGS_ERROR }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('fetchUserLists', () => {
+ beforeEach(() => {
+ Api.fetchFeatureFlagUserLists.mockResolvedValue({ data: [userList], headers: {} });
+ });
+
+ describe('success', () => {
+ it('dispatches requestUserLists and receiveUserListsSuccess ', done => {
+ testAction(
+ fetchUserLists,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestUserLists',
+ },
+ {
+ payload: { data: [userList], headers: {} },
+ type: 'receiveUserListsSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ it('dispatches requestUserLists and receiveUserListsError ', done => {
+ Api.fetchFeatureFlagUserLists.mockRejectedValue();
+
+ testAction(
+ fetchUserLists,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestUserLists',
+ },
+ {
+ type: 'receiveUserListsError',
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('requestUserLists', () => {
+ it('should commit RECEIVE_USER_LISTS_SUCCESS mutation', done => {
+ testAction(
+ requestUserLists,
+ null,
+ mockedState,
+ [{ type: types.REQUEST_USER_LISTS }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveUserListsSuccess', () => {
+ it('should commit RECEIVE_USER_LISTS_SUCCESS mutation', done => {
+ testAction(
+ receiveUserListsSuccess,
+ { data: [userList], headers: {} },
+ mockedState,
+ [
+ {
+ type: types.RECEIVE_USER_LISTS_SUCCESS,
+ payload: { data: [userList], headers: {} },
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveUserListsError', () => {
+ it('should commit RECEIVE_USER_LISTS_ERROR mutation', done => {
+ testAction(
+ receiveUserListsError,
+ null,
+ mockedState,
+ [{ type: types.RECEIVE_USER_LISTS_ERROR }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('rotateInstanceId', () => {
+ let mock;
+
+ beforeEach(() => {
+ mockedState.rotateEndpoint = `${TEST_HOST}/endpoint.json`;
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('success', () => {
+ it('dispatches requestRotateInstanceId and receiveRotateInstanceIdSuccess ', done => {
+ mock.onPost(`${TEST_HOST}/endpoint.json`).replyOnce(200, rotateData, {});
+
+ testAction(
+ rotateInstanceId,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestRotateInstanceId',
+ },
+ {
+ payload: { data: rotateData, headers: {} },
+ type: 'receiveRotateInstanceIdSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ it('dispatches requestRotateInstanceId and receiveRotateInstanceIdError ', done => {
+ mock.onGet(`${TEST_HOST}/endpoint.json`, {}).replyOnce(500, {});
+
+ testAction(
+ rotateInstanceId,
+ null,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestRotateInstanceId',
+ },
+ {
+ type: 'receiveRotateInstanceIdError',
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('requestRotateInstanceId', () => {
+ it('should commit REQUEST_ROTATE_INSTANCE_ID mutation', done => {
+ testAction(
+ requestRotateInstanceId,
+ null,
+ mockedState,
+ [{ type: types.REQUEST_ROTATE_INSTANCE_ID }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveRotateInstanceIdSuccess', () => {
+ it('should commit RECEIVE_ROTATE_INSTANCE_ID_SUCCESS mutation', done => {
+ testAction(
+ receiveRotateInstanceIdSuccess,
+ { data: rotateData, headers: {} },
+ mockedState,
+ [
+ {
+ type: types.RECEIVE_ROTATE_INSTANCE_ID_SUCCESS,
+ payload: { data: rotateData, headers: {} },
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveRotateInstanceIdError', () => {
+ it('should commit RECEIVE_ROTATE_INSTANCE_ID_ERROR mutation', done => {
+ testAction(
+ receiveRotateInstanceIdError,
+ null,
+ mockedState,
+ [{ type: types.RECEIVE_ROTATE_INSTANCE_ID_ERROR }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('toggleFeatureFlag', () => {
+ let mock;
+
+ beforeEach(() => {
+ mockedState.featureFlags = getRequestData.feature_flags.map(flag => ({
+ ...flag,
+ scopes: mapToScopesViewModel(flag.scopes || []),
+ }));
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+ describe('success', () => {
+ it('dispatches updateFeatureFlag and receiveUpdateFeatureFlagSuccess', done => {
+ mock.onPut(featureFlag.update_path).replyOnce(200, featureFlag, {});
+
+ testAction(
+ toggleFeatureFlag,
+ featureFlag,
+ mockedState,
+ [],
+ [
+ {
+ type: 'updateFeatureFlag',
+ payload: featureFlag,
+ },
+ {
+ payload: featureFlag,
+ type: 'receiveUpdateFeatureFlagSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ });
+ describe('error', () => {
+ it('dispatches updateFeatureFlag and receiveUpdateFeatureFlagSuccess', done => {
+ mock.onPut(featureFlag.update_path).replyOnce(500);
+
+ testAction(
+ toggleFeatureFlag,
+ featureFlag,
+ mockedState,
+ [],
+ [
+ {
+ type: 'updateFeatureFlag',
+ payload: featureFlag,
+ },
+ {
+ payload: featureFlag.id,
+ type: 'receiveUpdateFeatureFlagError',
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+ describe('updateFeatureFlag', () => {
+ beforeEach(() => {
+ mockedState.featureFlags = getRequestData.feature_flags.map(f => ({
+ ...f,
+ scopes: mapToScopesViewModel(f.scopes || []),
+ }));
+ });
+
+ it('commits UPDATE_FEATURE_FLAG with the given flag', done => {
+ testAction(
+ updateFeatureFlag,
+ featureFlag,
+ mockedState,
+ [
+ {
+ type: 'UPDATE_FEATURE_FLAG',
+ payload: featureFlag,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+ describe('receiveUpdateFeatureFlagSuccess', () => {
+ beforeEach(() => {
+ mockedState.featureFlags = getRequestData.feature_flags.map(f => ({
+ ...f,
+ scopes: mapToScopesViewModel(f.scopes || []),
+ }));
+ });
+
+ it('commits RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS with the given flag', done => {
+ testAction(
+ receiveUpdateFeatureFlagSuccess,
+ featureFlag,
+ mockedState,
+ [
+ {
+ type: 'RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS',
+ payload: featureFlag,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+ describe('receiveUpdateFeatureFlagError', () => {
+ beforeEach(() => {
+ mockedState.featureFlags = getRequestData.feature_flags.map(f => ({
+ ...f,
+ scopes: mapToScopesViewModel(f.scopes || []),
+ }));
+ });
+
+ it('commits RECEIVE_UPDATE_FEATURE_FLAG_ERROR with the given flag id', done => {
+ testAction(
+ receiveUpdateFeatureFlagError,
+ featureFlag.id,
+ mockedState,
+ [
+ {
+ type: 'RECEIVE_UPDATE_FEATURE_FLAG_ERROR',
+ payload: featureFlag.id,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+ describe('deleteUserList', () => {
+ beforeEach(() => {
+ mockedState.userLists = [userList];
+ });
+
+ describe('success', () => {
+ beforeEach(() => {
+ Api.deleteFeatureFlagUserList.mockResolvedValue();
+ });
+
+ it('should refresh the user lists', done => {
+ testAction(
+ deleteUserList,
+ userList,
+ mockedState,
+ [],
+ [{ type: 'requestDeleteUserList', payload: userList }, { type: 'fetchUserLists' }],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ Api.deleteFeatureFlagUserList.mockRejectedValue({ response: { data: 'some error' } });
+ });
+
+ it('should dispatch receiveDeleteUserListError', done => {
+ testAction(
+ deleteUserList,
+ userList,
+ mockedState,
+ [],
+ [
+ { type: 'requestDeleteUserList', payload: userList },
+ {
+ type: 'receiveDeleteUserListError',
+ payload: { list: userList, error: 'some error' },
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('receiveDeleteUserListError', () => {
+ it('should commit RECEIVE_DELETE_USER_LIST_ERROR with the given list', done => {
+ testAction(
+ receiveDeleteUserListError,
+ { list: userList, error: 'mock error' },
+ mockedState,
+ [
+ {
+ type: 'RECEIVE_DELETE_USER_LIST_ERROR',
+ payload: { list: userList, error: 'mock error' },
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('clearAlert', () => {
+ it('should commit RECEIVE_CLEAR_ALERT', done => {
+ const alertIndex = 3;
+
+ testAction(
+ clearAlert,
+ alertIndex,
+ mockedState,
+ [{ type: 'RECEIVE_CLEAR_ALERT', payload: alertIndex }],
+ [],
+ done,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/store/index/mutations_spec.js b/spec/frontend/feature_flags/store/index/mutations_spec.js
new file mode 100644
index 00000000000..5e236fe2222
--- /dev/null
+++ b/spec/frontend/feature_flags/store/index/mutations_spec.js
@@ -0,0 +1,332 @@
+import state from '~/feature_flags/store/modules/index/state';
+import mutations from '~/feature_flags/store/modules/index/mutations';
+import * as types from '~/feature_flags/store/modules/index/mutation_types';
+import { mapToScopesViewModel } from '~/feature_flags/store/modules/helpers';
+import { parseIntPagination, normalizeHeaders } from '~/lib/utils/common_utils';
+import { getRequestData, rotateData, featureFlag, userList } from '../../mock_data';
+
+describe('Feature flags store Mutations', () => {
+ let stateCopy;
+
+ beforeEach(() => {
+ stateCopy = state();
+ });
+
+ describe('SET_FEATURE_FLAGS_ENDPOINT', () => {
+ it('should set endpoint', () => {
+ mutations[types.SET_FEATURE_FLAGS_ENDPOINT](stateCopy, 'feature_flags.json');
+
+ expect(stateCopy.endpoint).toEqual('feature_flags.json');
+ });
+ });
+
+ describe('SET_FEATURE_FLAGS_OPTIONS', () => {
+ it('should set provided options', () => {
+ mutations[types.SET_FEATURE_FLAGS_OPTIONS](stateCopy, { page: '1', scope: 'all' });
+
+ expect(stateCopy.options).toEqual({ page: '1', scope: 'all' });
+ });
+ });
+
+ describe('SET_INSTANCE_ID_ENDPOINT', () => {
+ it('should set provided endpoint', () => {
+ mutations[types.SET_INSTANCE_ID_ENDPOINT](stateCopy, 'rotate_token.json');
+
+ expect(stateCopy.rotateEndpoint).toEqual('rotate_token.json');
+ });
+ });
+
+ describe('SET_INSTANCE_ID', () => {
+ it('should set provided token', () => {
+ mutations[types.SET_INSTANCE_ID](stateCopy, rotateData.token);
+
+ expect(stateCopy.instanceId).toEqual(rotateData.token);
+ });
+ });
+
+ describe('REQUEST_FEATURE_FLAGS', () => {
+ it('should set isLoading to true', () => {
+ mutations[types.REQUEST_FEATURE_FLAGS](stateCopy);
+
+ expect(stateCopy.isLoading).toEqual(true);
+ });
+ });
+
+ describe('RECEIVE_FEATURE_FLAGS_SUCCESS', () => {
+ const headers = {
+ 'x-next-page': '2',
+ 'x-page': '1',
+ 'X-Per-Page': '2',
+ 'X-Prev-Page': '',
+ 'X-TOTAL': '37',
+ 'X-Total-Pages': '5',
+ };
+
+ beforeEach(() => {
+ mutations[types.RECEIVE_FEATURE_FLAGS_SUCCESS](stateCopy, { data: getRequestData, headers });
+ });
+
+ it('should set isLoading to false', () => {
+ expect(stateCopy.isLoading).toEqual(false);
+ });
+
+ it('should set hasError to false', () => {
+ expect(stateCopy.hasError).toEqual(false);
+ });
+
+ it('should set featureFlags with the transformed data', () => {
+ const expected = getRequestData.feature_flags.map(flag => ({
+ ...flag,
+ scopes: mapToScopesViewModel(flag.scopes || []),
+ }));
+
+ expect(stateCopy.featureFlags).toEqual(expected);
+ });
+
+ it('should set count with the given data', () => {
+ expect(stateCopy.count.featureFlags).toEqual(37);
+ });
+
+ it('should set pagination', () => {
+ expect(stateCopy.pageInfo.featureFlags).toEqual(
+ parseIntPagination(normalizeHeaders(headers)),
+ );
+ });
+ });
+
+ describe('RECEIVE_FEATURE_FLAGS_ERROR', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_FEATURE_FLAGS_ERROR](stateCopy);
+ });
+
+ it('should set isLoading to false', () => {
+ expect(stateCopy.isLoading).toEqual(false);
+ });
+
+ it('should set hasError to true', () => {
+ expect(stateCopy.hasError).toEqual(true);
+ });
+ });
+
+ describe('REQUEST_USER_LISTS', () => {
+ it('sets isLoading to true', () => {
+ mutations[types.REQUEST_USER_LISTS](stateCopy);
+ expect(stateCopy.isLoading).toBe(true);
+ });
+ });
+
+ describe('RECEIVE_USER_LISTS_SUCCESS', () => {
+ const headers = {
+ 'x-next-page': '2',
+ 'x-page': '1',
+ 'X-Per-Page': '2',
+ 'X-Prev-Page': '',
+ 'X-TOTAL': '37',
+ 'X-Total-Pages': '5',
+ };
+
+ beforeEach(() => {
+ mutations[types.RECEIVE_USER_LISTS_SUCCESS](stateCopy, { data: [userList], headers });
+ });
+
+ it('sets isLoading to false', () => {
+ expect(stateCopy.isLoading).toBe(false);
+ });
+
+ it('sets userLists to the received userLists', () => {
+ expect(stateCopy.userLists).toEqual([userList]);
+ });
+
+ it('sets pagination info for user lits', () => {
+ expect(stateCopy.pageInfo.userLists).toEqual(parseIntPagination(normalizeHeaders(headers)));
+ });
+
+ it('sets the count for user lists', () => {
+ expect(stateCopy.count.userLists).toBe(parseInt(headers['X-TOTAL'], 10));
+ });
+ });
+
+ describe('RECEIVE_USER_LISTS_ERROR', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_USER_LISTS_ERROR](stateCopy);
+ });
+
+ it('should set isLoading to false', () => {
+ expect(stateCopy.isLoading).toEqual(false);
+ });
+
+ it('should set hasError to true', () => {
+ expect(stateCopy.hasError).toEqual(true);
+ });
+ });
+
+ describe('REQUEST_ROTATE_INSTANCE_ID', () => {
+ beforeEach(() => {
+ mutations[types.REQUEST_ROTATE_INSTANCE_ID](stateCopy);
+ });
+
+ it('should set isRotating to true', () => {
+ expect(stateCopy.isRotating).toBe(true);
+ });
+
+ it('should set hasRotateError to false', () => {
+ expect(stateCopy.hasRotateError).toBe(false);
+ });
+ });
+
+ describe('RECEIVE_ROTATE_INSTANCE_ID_SUCCESS', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_ROTATE_INSTANCE_ID_SUCCESS](stateCopy, { data: rotateData });
+ });
+
+ it('should set the instance id to the received data', () => {
+ expect(stateCopy.instanceId).toBe(rotateData.token);
+ });
+
+ it('should set isRotating to false', () => {
+ expect(stateCopy.isRotating).toBe(false);
+ });
+
+ it('should set hasRotateError to false', () => {
+ expect(stateCopy.hasRotateError).toBe(false);
+ });
+ });
+
+ describe('RECEIVE_ROTATE_INSTANCE_ID_ERROR', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_ROTATE_INSTANCE_ID_ERROR](stateCopy);
+ });
+
+ it('should set isRotating to false', () => {
+ expect(stateCopy.isRotating).toBe(false);
+ });
+
+ it('should set hasRotateError to true', () => {
+ expect(stateCopy.hasRotateError).toBe(true);
+ });
+ });
+
+ describe('UPDATE_FEATURE_FLAG', () => {
+ beforeEach(() => {
+ stateCopy.featureFlags = getRequestData.feature_flags.map(flag => ({
+ ...flag,
+ scopes: mapToScopesViewModel(flag.scopes || []),
+ }));
+ stateCopy.count = { featureFlags: 1, userLists: 0 };
+
+ mutations[types.UPDATE_FEATURE_FLAG](stateCopy, {
+ ...featureFlag,
+ scopes: mapToScopesViewModel(featureFlag.scopes || []),
+ active: false,
+ });
+ });
+
+ it('should update the flag with the matching ID', () => {
+ expect(stateCopy.featureFlags).toEqual([
+ {
+ ...featureFlag,
+ scopes: mapToScopesViewModel(featureFlag.scopes || []),
+ active: false,
+ },
+ ]);
+ });
+ });
+
+ describe('RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS', () => {
+ const runUpdate = (stateCount, flagState, featureFlagUpdateParams) => {
+ stateCopy.featureFlags = getRequestData.feature_flags.map(flag => ({
+ ...flag,
+ ...flagState,
+ scopes: mapToScopesViewModel(flag.scopes || []),
+ }));
+ stateCopy.count.featureFlags = stateCount;
+
+ mutations[types.RECEIVE_UPDATE_FEATURE_FLAG_SUCCESS](stateCopy, {
+ ...featureFlag,
+ ...featureFlagUpdateParams,
+ });
+ };
+
+ it('updates the flag with the matching ID', () => {
+ runUpdate({ all: 1, enabled: 1, disabled: 0 }, { active: true }, { active: false });
+
+ expect(stateCopy.featureFlags).toEqual([
+ {
+ ...featureFlag,
+ scopes: mapToScopesViewModel(featureFlag.scopes || []),
+ active: false,
+ },
+ ]);
+ });
+ });
+
+ describe('RECEIVE_UPDATE_FEATURE_FLAG_ERROR', () => {
+ beforeEach(() => {
+ stateCopy.featureFlags = getRequestData.feature_flags.map(flag => ({
+ ...flag,
+ scopes: mapToScopesViewModel(flag.scopes || []),
+ }));
+ stateCopy.count = { enabled: 1, disabled: 0 };
+
+ mutations[types.RECEIVE_UPDATE_FEATURE_FLAG_ERROR](stateCopy, featureFlag.id);
+ });
+
+ it('should update the flag with the matching ID, toggling active', () => {
+ expect(stateCopy.featureFlags).toEqual([
+ {
+ ...featureFlag,
+ scopes: mapToScopesViewModel(featureFlag.scopes || []),
+ active: false,
+ },
+ ]);
+ });
+ });
+
+ describe('REQUEST_DELETE_USER_LIST', () => {
+ beforeEach(() => {
+ stateCopy.userLists = [userList];
+ mutations[types.REQUEST_DELETE_USER_LIST](stateCopy, userList);
+ });
+
+ it('should remove the deleted list', () => {
+ expect(stateCopy.userLists).not.toContain(userList);
+ });
+ });
+
+ describe('RECEIVE_DELETE_USER_LIST_ERROR', () => {
+ beforeEach(() => {
+ stateCopy.userLists = [];
+ mutations[types.RECEIVE_DELETE_USER_LIST_ERROR](stateCopy, {
+ list: userList,
+ error: 'some error',
+ });
+ });
+
+ it('should set isLoading to false and hasError to false', () => {
+ expect(stateCopy.isLoading).toBe(false);
+ expect(stateCopy.hasError).toBe(false);
+ });
+
+ it('should add the user list back to the list of user lists', () => {
+ expect(stateCopy.userLists).toContain(userList);
+ });
+ });
+
+ describe('RECEIVE_CLEAR_ALERT', () => {
+ it('clears the alert', () => {
+ stateCopy.alerts = ['a server error'];
+
+ mutations[types.RECEIVE_CLEAR_ALERT](stateCopy, 0);
+
+ expect(stateCopy.alerts).toEqual([]);
+ });
+
+ it('clears the alert at the specified index', () => {
+ stateCopy.alerts = ['a server error', 'another error', 'final error'];
+
+ mutations[types.RECEIVE_CLEAR_ALERT](stateCopy, 1);
+
+ expect(stateCopy.alerts).toEqual(['a server error', 'final error']);
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/store/new/actions_spec.js b/spec/frontend/feature_flags/store/new/actions_spec.js
new file mode 100644
index 00000000000..cfcddd9451f
--- /dev/null
+++ b/spec/frontend/feature_flags/store/new/actions_spec.js
@@ -0,0 +1,223 @@
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import { TEST_HOST } from 'spec/test_constants';
+import {
+ setEndpoint,
+ setPath,
+ createFeatureFlag,
+ requestCreateFeatureFlag,
+ receiveCreateFeatureFlagSuccess,
+ receiveCreateFeatureFlagError,
+} from '~/feature_flags/store/modules/new/actions';
+import state from '~/feature_flags/store/modules/new/state';
+import * as types from '~/feature_flags/store/modules/new/mutation_types';
+import {
+ ROLLOUT_STRATEGY_ALL_USERS,
+ ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ LEGACY_FLAG,
+ NEW_VERSION_FLAG,
+} from '~/feature_flags/constants';
+import {
+ mapFromScopesViewModel,
+ mapStrategiesToRails,
+} from '~/feature_flags/store/modules/helpers';
+import axios from '~/lib/utils/axios_utils';
+
+jest.mock('~/lib/utils/url_utility');
+
+describe('Feature flags New Module Actions', () => {
+ let mockedState;
+
+ beforeEach(() => {
+ mockedState = state();
+ });
+
+ describe('setEndpoint', () => {
+ it('should commit SET_ENDPOINT mutation', done => {
+ testAction(
+ setEndpoint,
+ 'feature_flags.json',
+ mockedState,
+ [{ type: types.SET_ENDPOINT, payload: 'feature_flags.json' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setPath', () => {
+ it('should commit SET_PATH mutation', done => {
+ testAction(
+ setPath,
+ '/feature_flags',
+ mockedState,
+ [{ type: types.SET_PATH, payload: '/feature_flags' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('createFeatureFlag', () => {
+ let mock;
+
+ const actionParams = {
+ name: 'name',
+ description: 'description',
+ active: true,
+ version: LEGACY_FLAG,
+ scopes: [
+ {
+ id: 1,
+ environmentScope: 'environmentScope',
+ active: true,
+ canUpdate: true,
+ protected: true,
+ shouldBeDestroyed: false,
+ rolloutStrategy: ROLLOUT_STRATEGY_ALL_USERS,
+ rolloutPercentage: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
+ },
+ ],
+ };
+
+ beforeEach(() => {
+ mockedState.endpoint = `${TEST_HOST}/endpoint.json`;
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('success', () => {
+ it('dispatches requestCreateFeatureFlag and receiveCreateFeatureFlagSuccess ', done => {
+ const convertedActionParams = mapFromScopesViewModel(actionParams);
+
+ mock.onPost(`${TEST_HOST}/endpoint.json`, convertedActionParams).replyOnce(200);
+
+ testAction(
+ createFeatureFlag,
+ actionParams,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestCreateFeatureFlag',
+ },
+ {
+ type: 'receiveCreateFeatureFlagSuccess',
+ },
+ ],
+ done,
+ );
+ });
+
+ it('sends strategies for new style feature flags', done => {
+ const newVersionFlagParams = {
+ name: 'name',
+ description: 'description',
+ active: true,
+ version: NEW_VERSION_FLAG,
+ strategies: [
+ {
+ name: ROLLOUT_STRATEGY_ALL_USERS,
+ parameters: {},
+ id: 1,
+ scopes: [{ id: 1, environmentScope: 'environmentScope', shouldBeDestroyed: false }],
+ shouldBeDestroyed: false,
+ },
+ ],
+ };
+ mock
+ .onPost(`${TEST_HOST}/endpoint.json`, mapStrategiesToRails(newVersionFlagParams))
+ .replyOnce(200);
+
+ testAction(
+ createFeatureFlag,
+ newVersionFlagParams,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestCreateFeatureFlag',
+ },
+ {
+ type: 'receiveCreateFeatureFlagSuccess',
+ },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('error', () => {
+ it('dispatches requestCreateFeatureFlag and receiveCreateFeatureFlagError ', done => {
+ const convertedActionParams = mapFromScopesViewModel(actionParams);
+
+ mock
+ .onPost(`${TEST_HOST}/endpoint.json`, convertedActionParams)
+ .replyOnce(500, { message: [] });
+
+ testAction(
+ createFeatureFlag,
+ actionParams,
+ mockedState,
+ [],
+ [
+ {
+ type: 'requestCreateFeatureFlag',
+ },
+ {
+ type: 'receiveCreateFeatureFlagError',
+ payload: { message: [] },
+ },
+ ],
+ done,
+ );
+ });
+ });
+ });
+
+ describe('requestCreateFeatureFlag', () => {
+ it('should commit REQUEST_CREATE_FEATURE_FLAG mutation', done => {
+ testAction(
+ requestCreateFeatureFlag,
+ null,
+ mockedState,
+ [{ type: types.REQUEST_CREATE_FEATURE_FLAG }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveCreateFeatureFlagSuccess', () => {
+ it('should commit RECEIVE_CREATE_FEATURE_FLAG_SUCCESS mutation', done => {
+ testAction(
+ receiveCreateFeatureFlagSuccess,
+ null,
+ mockedState,
+ [
+ {
+ type: types.RECEIVE_CREATE_FEATURE_FLAG_SUCCESS,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('receiveCreateFeatureFlagError', () => {
+ it('should commit RECEIVE_CREATE_FEATURE_FLAG_ERROR mutation', done => {
+ testAction(
+ receiveCreateFeatureFlagError,
+ 'There was an error',
+ mockedState,
+ [{ type: types.RECEIVE_CREATE_FEATURE_FLAG_ERROR, payload: 'There was an error' }],
+ [],
+ done,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/feature_flags/store/new/mutations_spec.js b/spec/frontend/feature_flags/store/new/mutations_spec.js
new file mode 100644
index 00000000000..95eba96ed72
--- /dev/null
+++ b/spec/frontend/feature_flags/store/new/mutations_spec.js
@@ -0,0 +1,65 @@
+import state from '~/feature_flags/store/modules/new/state';
+import mutations from '~/feature_flags/store/modules/new/mutations';
+import * as types from '~/feature_flags/store/modules/new/mutation_types';
+
+describe('Feature flags New Module Mutations', () => {
+ let stateCopy;
+
+ beforeEach(() => {
+ stateCopy = state();
+ });
+
+ describe('SET_ENDPOINT', () => {
+ it('should set endpoint', () => {
+ mutations[types.SET_ENDPOINT](stateCopy, 'feature_flags.json');
+
+ expect(stateCopy.endpoint).toEqual('feature_flags.json');
+ });
+ });
+
+ describe('SET_PATH', () => {
+ it('should set provided options', () => {
+ mutations[types.SET_PATH](stateCopy, 'feature_flags');
+
+ expect(stateCopy.path).toEqual('feature_flags');
+ });
+ });
+
+ describe('REQUEST_CREATE_FEATURE_FLAG', () => {
+ it('should set isSendingRequest to true', () => {
+ mutations[types.REQUEST_CREATE_FEATURE_FLAG](stateCopy);
+
+ expect(stateCopy.isSendingRequest).toEqual(true);
+ });
+
+ it('should set error to an empty array', () => {
+ mutations[types.REQUEST_CREATE_FEATURE_FLAG](stateCopy);
+
+ expect(stateCopy.error).toEqual([]);
+ });
+ });
+
+ describe('RECEIVE_CREATE_FEATURE_FLAG_SUCCESS', () => {
+ it('should set isSendingRequest to false', () => {
+ mutations[types.RECEIVE_CREATE_FEATURE_FLAG_SUCCESS](stateCopy);
+
+ expect(stateCopy.isSendingRequest).toEqual(false);
+ });
+ });
+
+ describe('RECEIVE_CREATE_FEATURE_FLAG_ERROR', () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_CREATE_FEATURE_FLAG_ERROR](stateCopy, {
+ message: ['Name is required'],
+ });
+ });
+
+ it('should set isSendingRequest to false', () => {
+ expect(stateCopy.isSendingRequest).toEqual(false);
+ });
+
+ it('should set hasError to true', () => {
+ expect(stateCopy.error).toEqual(['Name is required']);
+ });
+ });
+});
diff --git a/spec/frontend/fixtures/releases.rb b/spec/frontend/fixtures/releases.rb
new file mode 100644
index 00000000000..bda62f4850a
--- /dev/null
+++ b/spec/frontend/fixtures/releases.rb
@@ -0,0 +1,136 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Releases (JavaScript fixtures)' do
+ include ApiHelpers
+ include JavaScriptFixturesHelpers
+
+ let_it_be(:admin) { create(:admin, username: 'administrator', email: 'admin@example.gitlab.com') }
+ let_it_be(:namespace) { create(:namespace, path: 'releases-namespace') }
+ let_it_be(:project) { create(:project, :repository, namespace: namespace, path: 'releases-project') }
+
+ let_it_be(:milestone_12_3) do
+ create(:milestone,
+ id: 123,
+ project: project,
+ title: '12.3',
+ description: 'The 12.3 milestone',
+ start_date: Time.zone.parse('2018-12-10'),
+ due_date: Time.zone.parse('2019-01-10'))
+ end
+
+ let_it_be(:milestone_12_4) do
+ create(:milestone,
+ id: 124,
+ project: project,
+ title: '12.4',
+ description: 'The 12.4 milestone',
+ start_date: Time.zone.parse('2019-01-10'),
+ due_date: Time.zone.parse('2019-02-10'))
+ end
+
+ let_it_be(:open_issues_12_3) do
+ create_list(:issue, 2, milestone: milestone_12_3, project: project)
+ end
+
+ let_it_be(:closed_issues_12_3) do
+ create_list(:issue, 3, :closed, milestone: milestone_12_3, project: project)
+ end
+
+ let_it_be(:open_issues_12_4) do
+ create_list(:issue, 3, milestone: milestone_12_4, project: project)
+ end
+
+ let_it_be(:closed_issues_12_4) do
+ create_list(:issue, 1, :closed, milestone: milestone_12_4, project: project)
+ end
+
+ let_it_be(:release) do
+ create(:release,
+ milestones: [milestone_12_3, milestone_12_4],
+ project: project,
+ tag: 'v1.1',
+ name: 'The first release',
+ author: admin,
+ description: 'Best. Release. **Ever.** :rocket:',
+ created_at: Time.zone.parse('2018-12-3'),
+ released_at: Time.zone.parse('2018-12-10'))
+ end
+
+ let_it_be(:evidence) do
+ create(:evidence,
+ release: release,
+ collected_at: Time.zone.parse('2018-12-03'))
+ end
+
+ let_it_be(:other_link) do
+ create(:release_link,
+ id: 10,
+ release: release,
+ name: 'linux-amd64 binaries',
+ filepath: '/binaries/linux-amd64',
+ url: 'https://downloads.example.com/bin/gitlab-linux-amd64')
+ end
+
+ let_it_be(:runbook_link) do
+ create(:release_link,
+ id: 11,
+ release: release,
+ name: 'Runbook',
+ url: "#{release.project.web_url}/runbook",
+ link_type: :runbook)
+ end
+
+ let_it_be(:package_link) do
+ create(:release_link,
+ id: 12,
+ release: release,
+ name: 'Package',
+ url: 'https://example.com/package',
+ link_type: :package)
+ end
+
+ let_it_be(:image_link) do
+ create(:release_link,
+ id: 13,
+ release: release,
+ name: 'Image',
+ url: 'https://example.com/image',
+ link_type: :image)
+ end
+
+ after(:all) do
+ remove_repository(project)
+ end
+
+ describe API::Releases, type: :request do
+ before(:all) do
+ clean_frontend_fixtures('api/releases/')
+ end
+
+ it 'api/releases/release.json' do
+ get api("/projects/#{project.id}/releases/#{release.tag}", admin)
+
+ expect(response).to be_successful
+ end
+ end
+
+ graphql_query_path = 'releases/queries/all_releases.query.graphql'
+
+ describe "~/#{graphql_query_path}", type: :request do
+ include GraphqlHelpers
+
+ before(:all) do
+ clean_frontend_fixtures('graphql/releases/')
+ end
+
+ it "graphql/#{graphql_query_path}.json" do
+ query = File.read(File.join(Rails.root, '/app/assets/javascripts', graphql_query_path))
+
+ post_graphql(query, current_user: admin, variables: { fullPath: project.full_path })
+
+ expect_graphql_errors_to_be_empty
+ end
+ end
+end
diff --git a/spec/frontend/gfm_auto_complete_spec.js b/spec/frontend/gfm_auto_complete_spec.js
index 6c40b1ba3a7..38a0da95080 100644
--- a/spec/frontend/gfm_auto_complete_spec.js
+++ b/spec/frontend/gfm_auto_complete_spec.js
@@ -7,15 +7,228 @@ import GfmAutoComplete, { membersBeforeSave } from 'ee_else_ce/gfm_auto_complete
import { TEST_HOST } from 'helpers/test_constants';
import { getJSONFixture } from 'helpers/fixtures';
+import waitForPromises from 'jest/helpers/wait_for_promises';
+
+import MockAdapter from 'axios-mock-adapter';
+import AjaxCache from '~/lib/utils/ajax_cache';
+import axios from '~/lib/utils/axios_utils';
+
const labelsFixture = getJSONFixture('autocomplete_sources/labels.json');
describe('GfmAutoComplete', () => {
- const gfmAutoCompleteCallbacks = GfmAutoComplete.prototype.getDefaultCallbacks.call({
- fetchData: () => {},
- });
+ const fetchDataMock = { fetchData: jest.fn() };
+ let gfmAutoCompleteCallbacks = GfmAutoComplete.prototype.getDefaultCallbacks.call(fetchDataMock);
let atwhoInstance;
let sorterValue;
+ let filterValue;
+
+ describe('.typesWithBackendFiltering', () => {
+ it('should contain vulnerabilities', () => {
+ expect(GfmAutoComplete.typesWithBackendFiltering).toContain('vulnerabilities');
+ });
+ });
+
+ describe('DefaultOptions.filter', () => {
+ let items;
+
+ beforeEach(() => {
+ jest.spyOn(fetchDataMock, 'fetchData');
+ jest.spyOn($.fn.atwho.default.callbacks, 'filter').mockImplementation(() => {});
+ });
+
+ describe('assets loading', () => {
+ beforeEach(() => {
+ atwhoInstance = { setting: {}, $inputor: 'inputor', at: '+' };
+ items = ['loading'];
+
+ filterValue = gfmAutoCompleteCallbacks.filter.call(atwhoInstance, '', items);
+ });
+
+ it('should call the fetchData function without query', () => {
+ expect(fetchDataMock.fetchData).toHaveBeenCalledWith('inputor', '+');
+ });
+
+ it('should not call the default atwho filter', () => {
+ expect($.fn.atwho.default.callbacks.filter).not.toHaveBeenCalled();
+ });
+
+ it('should return the passed unfiltered items', () => {
+ expect(filterValue).toEqual(items);
+ });
+ });
+
+ describe('backend filtering', () => {
+ beforeEach(() => {
+ atwhoInstance = { setting: {}, $inputor: 'inputor', at: '+' };
+ items = [];
+ });
+
+ describe('when previous query is different from current one', () => {
+ beforeEach(() => {
+ gfmAutoCompleteCallbacks = GfmAutoComplete.prototype.getDefaultCallbacks.call({
+ previousQuery: 'oldquery',
+ ...fetchDataMock,
+ });
+ filterValue = gfmAutoCompleteCallbacks.filter.call(atwhoInstance, 'newquery', items);
+ });
+
+ it('should call the fetchData function with query', () => {
+ expect(fetchDataMock.fetchData).toHaveBeenCalledWith('inputor', '+', 'newquery');
+ });
+
+ it('should not call the default atwho filter', () => {
+ expect($.fn.atwho.default.callbacks.filter).not.toHaveBeenCalled();
+ });
+
+ it('should return the passed unfiltered items', () => {
+ expect(filterValue).toEqual(items);
+ });
+ });
+
+ describe('when previous query is not different from current one', () => {
+ beforeEach(() => {
+ gfmAutoCompleteCallbacks = GfmAutoComplete.prototype.getDefaultCallbacks.call({
+ previousQuery: 'oldquery',
+ ...fetchDataMock,
+ });
+ filterValue = gfmAutoCompleteCallbacks.filter.call(
+ atwhoInstance,
+ 'oldquery',
+ items,
+ 'searchKey',
+ );
+ });
+
+ it('should not call the fetchData function', () => {
+ expect(fetchDataMock.fetchData).not.toHaveBeenCalled();
+ });
+
+ it('should call the default atwho filter', () => {
+ expect($.fn.atwho.default.callbacks.filter).toHaveBeenCalledWith(
+ 'oldquery',
+ items,
+ 'searchKey',
+ );
+ });
+ });
+ });
+ });
+
+ describe('fetchData', () => {
+ const { fetchData } = GfmAutoComplete.prototype;
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ jest.spyOn(axios, 'get');
+ jest.spyOn(AjaxCache, 'retrieve');
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('already loading data', () => {
+ beforeEach(() => {
+ const context = {
+ isLoadingData: { '+': true },
+ dataSources: {},
+ cachedData: {},
+ };
+ fetchData.call(context, {}, '+', '');
+ });
+
+ it('should not call either axios nor AjaxCache', () => {
+ expect(axios.get).not.toHaveBeenCalled();
+ expect(AjaxCache.retrieve).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('backend filtering', () => {
+ describe('data is not in cache', () => {
+ let context;
+
+ beforeEach(() => {
+ context = {
+ isLoadingData: { '+': false },
+ dataSources: { vulnerabilities: 'vulnerabilities_autocomplete_url' },
+ cachedData: {},
+ };
+ });
+
+ it('should call axios with query', () => {
+ fetchData.call(context, {}, '+', 'query');
+
+ expect(axios.get).toHaveBeenCalledWith('vulnerabilities_autocomplete_url', {
+ params: { search: 'query' },
+ });
+ });
+
+ it.each([200, 500])('should set the loading state', async responseStatus => {
+ mock.onGet('vulnerabilities_autocomplete_url').replyOnce(responseStatus);
+
+ fetchData.call(context, {}, '+', 'query');
+
+ expect(context.isLoadingData['+']).toBe(true);
+
+ await waitForPromises();
+
+ expect(context.isLoadingData['+']).toBe(false);
+ });
+ });
+
+ describe('data is in cache', () => {
+ beforeEach(() => {
+ const context = {
+ isLoadingData: { '+': false },
+ dataSources: { vulnerabilities: 'vulnerabilities_autocomplete_url' },
+ cachedData: { '+': [{}] },
+ };
+ fetchData.call(context, {}, '+', 'query');
+ });
+
+ it('should anyway call axios with query ignoring cache', () => {
+ expect(axios.get).toHaveBeenCalledWith('vulnerabilities_autocomplete_url', {
+ params: { search: 'query' },
+ });
+ });
+ });
+ });
+
+ describe('frontend filtering', () => {
+ describe('data is not in cache', () => {
+ beforeEach(() => {
+ const context = {
+ isLoadingData: { '#': false },
+ dataSources: { issues: 'issues_autocomplete_url' },
+ cachedData: {},
+ };
+ fetchData.call(context, {}, '#', 'query');
+ });
+
+ it('should call AjaxCache', () => {
+ expect(AjaxCache.retrieve).toHaveBeenCalledWith('issues_autocomplete_url', true);
+ });
+ });
+
+ describe('data is in cache', () => {
+ beforeEach(() => {
+ const context = {
+ isLoadingData: { '#': false },
+ dataSources: { issues: 'issues_autocomplete_url' },
+ cachedData: { '#': [{}] },
+ loadData: () => {},
+ };
+ fetchData.call(context, {}, '#', 'query');
+ });
+
+ it('should not call AjaxCache', () => {
+ expect(AjaxCache.retrieve).not.toHaveBeenCalled();
+ });
+ });
+ });
+ });
describe('DefaultOptions.sorter', () => {
describe('assets loading', () => {
diff --git a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
index 0befe1aa192..e880f585daa 100644
--- a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
+++ b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
@@ -17,6 +17,7 @@ exports[`grafana integration component default state to match the default snapsh
</h3>
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="js-settings-toggle"
icon=""
@@ -92,20 +93,17 @@ exports[`grafana integration component default state to match the default snapsh
</p>
</gl-form-group-stub>
- <div
- class="gl-display-flex gl-justify-content-end"
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
+ icon=""
+ size="medium"
+ variant="success"
>
- <gl-button-stub
- category="primary"
- icon=""
- size="medium"
- variant="success"
- >
-
- Save Changes
- </gl-button-stub>
- </div>
+ Save Changes
+
+ </gl-button-stub>
</form>
</div>
</section>
diff --git a/spec/frontend/groups/components/item_actions_spec.js b/spec/frontend/groups/components/item_actions_spec.js
index f5df8c180d5..d4aa29eaadd 100644
--- a/spec/frontend/groups/components/item_actions_spec.js
+++ b/spec/frontend/groups/components/item_actions_spec.js
@@ -1,84 +1,87 @@
-import Vue from 'vue';
-
-import mountComponent from 'helpers/vue_mount_component_helper';
-import itemActionsComponent from '~/groups/components/item_actions.vue';
+import { shallowMount } from '@vue/test-utils';
+import { GlIcon } from '@gitlab/ui';
+import ItemActions from '~/groups/components/item_actions.vue';
import eventHub from '~/groups/event_hub';
import { mockParentGroupItem, mockChildren } from '../mock_data';
-const createComponent = (group = mockParentGroupItem, parentGroup = mockChildren[0]) => {
- const Component = Vue.extend(itemActionsComponent);
+describe('ItemActions', () => {
+ let wrapper;
+ const parentGroup = mockChildren[0];
- return mountComponent(Component, {
- group,
+ const defaultProps = {
+ group: mockParentGroupItem,
parentGroup,
- });
-};
-
-describe('ItemActionsComponent', () => {
- let vm;
+ };
- beforeEach(() => {
- vm = createComponent();
- });
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(ItemActions, {
+ propsData: { ...defaultProps, ...props },
+ });
+ };
afterEach(() => {
- vm.$destroy();
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
});
- describe('methods', () => {
- describe('onLeaveGroup', () => {
- it('emits `showLeaveGroupModal` event with `group` and `parentGroup` props', () => {
- jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
- vm.onLeaveGroup();
-
- expect(eventHub.$emit).toHaveBeenCalledWith(
- 'showLeaveGroupModal',
- vm.group,
- vm.parentGroup,
- );
- });
- });
- });
+ const findEditGroupBtn = () => wrapper.find('[data-testid="edit-group-btn"]');
+ const findEditGroupIcon = () => findEditGroupBtn().find(GlIcon);
+ const findLeaveGroupBtn = () => wrapper.find('[data-testid="leave-group-btn"]');
+ const findLeaveGroupIcon = () => findLeaveGroupBtn().find(GlIcon);
describe('template', () => {
- it('should render component template correctly', () => {
- expect(vm.$el.classList.contains('controls')).toBeTruthy();
- });
+ it('renders component template correctly', () => {
+ createComponent();
- it('should render Edit Group button with correct attribute values', () => {
- const group = { ...mockParentGroupItem };
- group.canEdit = true;
- const newVm = createComponent(group);
+ expect(wrapper.classes()).toContain('controls');
+ });
- const editBtn = newVm.$el.querySelector('a.edit-group');
+ it('renders "Edit group" button with correct attribute values', () => {
+ const group = {
+ ...mockParentGroupItem,
+ canEdit: true,
+ };
+
+ createComponent({ group });
+
+ expect(findEditGroupBtn().exists()).toBe(true);
+ expect(findEditGroupBtn().classes()).toContain('no-expand');
+ expect(findEditGroupBtn().attributes('href')).toBe(group.editPath);
+ expect(findEditGroupBtn().attributes('aria-label')).toBe('Edit group');
+ expect(findEditGroupBtn().attributes('data-original-title')).toBe('Edit group');
+ expect(findEditGroupIcon().exists()).toBe(true);
+ expect(findEditGroupIcon().props('name')).toBe('settings');
+ });
- expect(editBtn).toBeDefined();
- expect(editBtn.classList.contains('no-expand')).toBeTruthy();
- expect(editBtn.getAttribute('href')).toBe(group.editPath);
- expect(editBtn.getAttribute('aria-label')).toBe('Edit group');
- expect(editBtn.dataset.originalTitle).toBe('Edit group');
- expect(editBtn.querySelectorAll('svg').length).not.toBe(0);
- expect(editBtn.querySelector('svg').getAttribute('data-testid')).toBe('settings-icon');
+ describe('`canLeave` is true', () => {
+ const group = {
+ ...mockParentGroupItem,
+ canLeave: true,
+ };
- newVm.$destroy();
- });
+ beforeEach(() => {
+ createComponent({ group });
+ });
- it('should render Leave Group button with correct attribute values', () => {
- const group = { ...mockParentGroupItem };
- group.canLeave = true;
- const newVm = createComponent(group);
+ it('renders "Leave this group" button with correct attribute values', () => {
+ expect(findLeaveGroupBtn().exists()).toBe(true);
+ expect(findLeaveGroupBtn().classes()).toContain('no-expand');
+ expect(findLeaveGroupBtn().attributes('href')).toBe(group.leavePath);
+ expect(findLeaveGroupBtn().attributes('aria-label')).toBe('Leave this group');
+ expect(findLeaveGroupBtn().attributes('data-original-title')).toBe('Leave this group');
+ expect(findLeaveGroupIcon().exists()).toBe(true);
+ expect(findLeaveGroupIcon().props('name')).toBe('leave');
+ });
- const leaveBtn = newVm.$el.querySelector('a.leave-group');
+ it('emits event on "Leave this group" button click', () => {
+ jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
- expect(leaveBtn).toBeDefined();
- expect(leaveBtn.classList.contains('no-expand')).toBeTruthy();
- expect(leaveBtn.getAttribute('href')).toBe(group.leavePath);
- expect(leaveBtn.getAttribute('aria-label')).toBe('Leave this group');
- expect(leaveBtn.dataset.originalTitle).toBe('Leave this group');
- expect(leaveBtn.querySelectorAll('svg').length).not.toBe(0);
- expect(leaveBtn.querySelector('svg').getAttribute('data-testid')).toBe('leave-icon');
+ findLeaveGroupBtn().trigger('click');
- newVm.$destroy();
+ expect(eventHub.$emit).toHaveBeenCalledWith('showLeaveGroupModal', group, parentGroup);
+ });
});
});
});
diff --git a/spec/frontend/groups/components/item_caret_spec.js b/spec/frontend/groups/components/item_caret_spec.js
index 4ff7482414c..b2915607a06 100644
--- a/spec/frontend/groups/components/item_caret_spec.js
+++ b/spec/frontend/groups/components/item_caret_spec.js
@@ -1,38 +1,48 @@
-import Vue from 'vue';
+import { shallowMount } from '@vue/test-utils';
+import { GlIcon } from '@gitlab/ui';
+import ItemCaret from '~/groups/components/item_caret.vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import itemCaretComponent from '~/groups/components/item_caret.vue';
+describe('ItemCaret', () => {
+ let wrapper;
-const createComponent = (isGroupOpen = false) => {
- const Component = Vue.extend(itemCaretComponent);
+ const defaultProps = {
+ isGroupOpen: false,
+ };
- return mountComponent(Component, {
- isGroupOpen,
- });
-};
-
-describe('ItemCaretComponent', () => {
- let vm;
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(ItemCaret, {
+ propsData: { ...defaultProps, ...props },
+ });
+ };
afterEach(() => {
- vm.$destroy();
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
});
+ const findAllGlIcons = () => wrapper.findAll(GlIcon);
+ const findGlIcon = () => wrapper.find(GlIcon);
+
describe('template', () => {
- it('should render component template correctly', () => {
- vm = createComponent();
- expect(vm.$el.classList.contains('folder-caret')).toBeTruthy();
- expect(vm.$el.querySelectorAll('svg').length).toBe(1);
- });
+ it('renders component template correctly', () => {
+ createComponent();
- it('should render caret down icon if `isGroupOpen` prop is `true`', () => {
- vm = createComponent(true);
- expect(vm.$el.querySelector('svg').getAttribute('data-testid')).toBe('angle-down-icon');
+ expect(wrapper.classes()).toContain('folder-caret');
+ expect(findAllGlIcons()).toHaveLength(1);
});
- it('should render caret right icon if `isGroupOpen` prop is `false`', () => {
- vm = createComponent();
- expect(vm.$el.querySelector('svg').getAttribute('data-testid')).toBe('angle-right-icon');
+ it.each`
+ isGroupOpen | icon
+ ${true} | ${'angle-down'}
+ ${false} | ${'angle-right'}
+ `('renders "$icon" icon when `isGroupOpen` is $isGroupOpen', ({ isGroupOpen, icon }) => {
+ createComponent({
+ isGroupOpen,
+ });
+
+ expect(findGlIcon().props('name')).toBe(icon);
});
});
});
diff --git a/spec/frontend/groups/components/item_stats_spec.js b/spec/frontend/groups/components/item_stats_spec.js
index 771643609ec..d8c88a608ac 100644
--- a/spec/frontend/groups/components/item_stats_spec.js
+++ b/spec/frontend/groups/components/item_stats_spec.js
@@ -1,119 +1,50 @@
-import Vue from 'vue';
+import { shallowMount } from '@vue/test-utils';
+import ItemStats from '~/groups/components/item_stats.vue';
+import ItemStatsValue from '~/groups/components/item_stats_value.vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import itemStatsComponent from '~/groups/components/item_stats.vue';
-import {
- mockParentGroupItem,
- ITEM_TYPE,
- VISIBILITY_TYPE_ICON,
- GROUP_VISIBILITY_TYPE,
- PROJECT_VISIBILITY_TYPE,
-} from '../mock_data';
+import { mockParentGroupItem, ITEM_TYPE } from '../mock_data';
-const createComponent = (item = mockParentGroupItem) => {
- const Component = Vue.extend(itemStatsComponent);
+describe('ItemStats', () => {
+ let wrapper;
- return mountComponent(Component, {
- item,
- });
-};
-
-describe('ItemStatsComponent', () => {
- describe('computed', () => {
- describe('visibilityIcon', () => {
- it('should return icon class based on `item.visibility` value', () => {
- Object.keys(VISIBILITY_TYPE_ICON).forEach(visibility => {
- const item = { ...mockParentGroupItem, visibility };
- const vm = createComponent(item);
+ const defaultProps = {
+ item: mockParentGroupItem,
+ };
- expect(vm.visibilityIcon).toBe(VISIBILITY_TYPE_ICON[visibility]);
- vm.$destroy();
- });
- });
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(ItemStats, {
+ propsData: { ...defaultProps, ...props },
});
+ };
- describe('visibilityTooltip', () => {
- it('should return tooltip string for Group based on `item.visibility` value', () => {
- Object.keys(GROUP_VISIBILITY_TYPE).forEach(visibility => {
- const item = { ...mockParentGroupItem, visibility, type: ITEM_TYPE.GROUP };
- const vm = createComponent(item);
-
- expect(vm.visibilityTooltip).toBe(GROUP_VISIBILITY_TYPE[visibility]);
- vm.$destroy();
- });
- });
-
- it('should return tooltip string for Project based on `item.visibility` value', () => {
- Object.keys(PROJECT_VISIBILITY_TYPE).forEach(visibility => {
- const item = { ...mockParentGroupItem, visibility, type: ITEM_TYPE.PROJECT };
- const vm = createComponent(item);
-
- expect(vm.visibilityTooltip).toBe(PROJECT_VISIBILITY_TYPE[visibility]);
- vm.$destroy();
- });
- });
- });
-
- describe('isProject', () => {
- it('should return boolean value representing whether `item.type` is Project or not', () => {
- let item;
- let vm;
-
- item = { ...mockParentGroupItem, type: ITEM_TYPE.PROJECT };
- vm = createComponent(item);
-
- expect(vm.isProject).toBeTruthy();
- vm.$destroy();
-
- item = { ...mockParentGroupItem, type: ITEM_TYPE.GROUP };
- vm = createComponent(item);
-
- expect(vm.isProject).toBeFalsy();
- vm.$destroy();
- });
- });
-
- describe('isGroup', () => {
- it('should return boolean value representing whether `item.type` is Group or not', () => {
- let item;
- let vm;
-
- item = { ...mockParentGroupItem, type: ITEM_TYPE.GROUP };
- vm = createComponent(item);
-
- expect(vm.isGroup).toBeTruthy();
- vm.$destroy();
-
- item = { ...mockParentGroupItem, type: ITEM_TYPE.PROJECT };
- vm = createComponent(item);
-
- expect(vm.isGroup).toBeFalsy();
- vm.$destroy();
- });
- });
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
});
+ const findItemStatsValue = () => wrapper.find(ItemStatsValue);
+
describe('template', () => {
it('renders component container element correctly', () => {
- const vm = createComponent();
+ createComponent();
- expect(vm.$el.classList.contains('stats')).toBeTruthy();
-
- vm.$destroy();
+ expect(wrapper.classes()).toContain('stats');
});
it('renders start count and last updated information for project item correctly', () => {
- const item = { ...mockParentGroupItem, type: ITEM_TYPE.PROJECT, starCount: 4 };
- const vm = createComponent(item);
-
- const projectStarIconEl = vm.$el.querySelector('.project-stars');
+ const item = {
+ ...mockParentGroupItem,
+ type: ITEM_TYPE.PROJECT,
+ starCount: 4,
+ };
- expect(projectStarIconEl).not.toBeNull();
- expect(projectStarIconEl.querySelectorAll('svg').length).toBeGreaterThan(0);
- expect(projectStarIconEl.querySelectorAll('.stat-value').length).toBeGreaterThan(0);
- expect(vm.$el.querySelectorAll('.last-updated').length).toBeGreaterThan(0);
+ createComponent({ item });
- vm.$destroy();
+ expect(findItemStatsValue().exists()).toBe(true);
+ expect(findItemStatsValue().props('cssClass')).toBe('project-stars');
+ expect(wrapper.contains('.last-updated')).toBe(true);
});
});
});
diff --git a/spec/frontend/groups/components/item_stats_value_spec.js b/spec/frontend/groups/components/item_stats_value_spec.js
index 11246390444..6f018aa79a0 100644
--- a/spec/frontend/groups/components/item_stats_value_spec.js
+++ b/spec/frontend/groups/components/item_stats_value_spec.js
@@ -1,82 +1,67 @@
-import Vue from 'vue';
+import { shallowMount } from '@vue/test-utils';
+import { GlIcon } from '@gitlab/ui';
+import ItemStatsValue from '~/groups/components/item_stats_value.vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import itemStatsValueComponent from '~/groups/components/item_stats_value.vue';
+describe('ItemStatsValue', () => {
+ let wrapper;
-const createComponent = ({ title, cssClass, iconName, tooltipPlacement, value }) => {
- const Component = Vue.extend(itemStatsValueComponent);
+ const defaultProps = {
+ title: 'Subgroups',
+ cssClass: 'number-subgroups',
+ iconName: 'folder',
+ tooltipPlacement: 'left',
+ };
- return mountComponent(Component, {
- title,
- cssClass,
- iconName,
- tooltipPlacement,
- value,
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(ItemStatsValue, {
+ propsData: { ...defaultProps, ...props },
+ });
+ };
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
});
-};
-describe('ItemStatsValueComponent', () => {
- describe('computed', () => {
- let vm;
- const itemConfig = {
- title: 'Subgroups',
- cssClass: 'number-subgroups',
- iconName: 'folder',
- tooltipPlacement: 'left',
- };
+ const findGlIcon = () => wrapper.find(GlIcon);
+ const findStatValue = () => wrapper.find('[data-testid="itemStatValue"]');
- describe('isValuePresent', () => {
- it('returns true if non-empty `value` is present', () => {
- vm = createComponent({ ...itemConfig, value: 10 });
+ describe('template', () => {
+ describe('when `value` is not provided', () => {
+ it('does not render value count', () => {
+ createComponent();
- expect(vm.isValuePresent).toBeTruthy();
+ expect(findStatValue().exists()).toBe(false);
});
+ });
- it('returns false if empty `value` is present', () => {
- vm = createComponent(itemConfig);
-
- expect(vm.isValuePresent).toBeFalsy();
+ describe('when `value` is provided', () => {
+ beforeEach(() => {
+ createComponent({
+ value: 10,
+ });
});
- afterEach(() => {
- vm.$destroy();
+ it('renders component element correctly', () => {
+ expect(wrapper.classes()).toContain('number-subgroups');
});
- });
- });
- describe('template', () => {
- let vm;
- beforeEach(() => {
- vm = createComponent({
- title: 'Subgroups',
- cssClass: 'number-subgroups',
- iconName: 'folder',
- tooltipPlacement: 'left',
- value: 10,
+ it('renders element tooltip correctly', () => {
+ expect(wrapper.attributes('data-original-title')).toBe('Subgroups');
+ expect(wrapper.attributes('data-placement')).toBe('left');
});
- });
- afterEach(() => {
- vm.$destroy();
- });
-
- it('renders component element correctly', () => {
- expect(vm.$el.classList.contains('number-subgroups')).toBeTruthy();
- expect(vm.$el.querySelectorAll('svg').length).toBeGreaterThan(0);
- expect(vm.$el.querySelectorAll('.stat-value').length).toBeGreaterThan(0);
- });
-
- it('renders element tooltip correctly', () => {
- expect(vm.$el.dataset.originalTitle).toBe('Subgroups');
- expect(vm.$el.dataset.placement).toBe('left');
- });
-
- it('renders element icon correctly', () => {
- expect(vm.$el.querySelector('svg').getAttribute('data-testid')).toBe('folder-icon');
- });
+ it('renders element icon correctly', () => {
+ expect(findGlIcon().exists()).toBe(true);
+ expect(findGlIcon().props('name')).toBe('folder');
+ });
- it('renders value count correctly', () => {
- expect(vm.$el.querySelector('.stat-value').innerText.trim()).toContain('10');
+ it('renders value count correctly', () => {
+ expect(findStatValue().classes()).toContain('stat-value');
+ expect(findStatValue().text()).toBe('10');
+ });
});
});
});
diff --git a/spec/frontend/groups/components/item_type_icon_spec.js b/spec/frontend/groups/components/item_type_icon_spec.js
index 477c413ddcd..5e7056be218 100644
--- a/spec/frontend/groups/components/item_type_icon_spec.js
+++ b/spec/frontend/groups/components/item_type_icon_spec.js
@@ -1,53 +1,53 @@
-import Vue from 'vue';
-
-import mountComponent from 'helpers/vue_mount_component_helper';
-import itemTypeIconComponent from '~/groups/components/item_type_icon.vue';
+import { shallowMount } from '@vue/test-utils';
+import { GlIcon } from '@gitlab/ui';
+import ItemTypeIcon from '~/groups/components/item_type_icon.vue';
import { ITEM_TYPE } from '../mock_data';
-const createComponent = (itemType = ITEM_TYPE.GROUP, isGroupOpen = false) => {
- const Component = Vue.extend(itemTypeIconComponent);
-
- return mountComponent(Component, {
- itemType,
- isGroupOpen,
- });
-};
+describe('ItemTypeIcon', () => {
+ let wrapper;
-describe('ItemTypeIconComponent', () => {
- describe('template', () => {
- it('should render component template correctly', () => {
- const vm = createComponent();
+ const defaultProps = {
+ itemType: ITEM_TYPE.GROUP,
+ isGroupOpen: false,
+ };
- expect(vm.$el.classList.contains('item-type-icon')).toBeTruthy();
- vm.$destroy();
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(ItemTypeIcon, {
+ propsData: { ...defaultProps, ...props },
});
+ };
- it('should render folder open or close icon based `isGroupOpen` prop value', () => {
- let vm;
-
- vm = createComponent(ITEM_TYPE.GROUP, true);
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ });
- expect(vm.$el.querySelector('svg').getAttribute('data-testid')).toBe('folder-open-icon');
- vm.$destroy();
+ const findGlIcon = () => wrapper.find(GlIcon);
- vm = createComponent(ITEM_TYPE.GROUP);
+ describe('template', () => {
+ it('renders component template correctly', () => {
+ createComponent();
- expect(vm.$el.querySelector('svg').getAttribute('data-testid')).toBe('folder-o-icon');
- vm.$destroy();
+ expect(wrapper.classes()).toContain('item-type-icon');
});
- it('should render bookmark icon based on `isProject` prop value', () => {
- let vm;
-
- vm = createComponent(ITEM_TYPE.PROJECT);
-
- expect(vm.$el.querySelector('svg').getAttribute('data-testid')).toBe('bookmark-icon');
- vm.$destroy();
-
- vm = createComponent(ITEM_TYPE.GROUP);
-
- expect(vm.$el.querySelector('svg').getAttribute('data-testid')).not.toBe('bookmark-icon');
- vm.$destroy();
- });
+ it.each`
+ type | isGroupOpen | icon
+ ${ITEM_TYPE.GROUP} | ${true} | ${'folder-open'}
+ ${ITEM_TYPE.GROUP} | ${false} | ${'folder-o'}
+ ${ITEM_TYPE.PROJECT} | ${true} | ${'bookmark'}
+ ${ITEM_TYPE.PROJECT} | ${false} | ${'bookmark'}
+ `(
+ 'shows "$icon" icon when `itemType` is "$type" and `isGroupOpen` is $isGroupOpen',
+ ({ type, isGroupOpen, icon }) => {
+ createComponent({
+ itemType: type,
+ isGroupOpen,
+ });
+ expect(findGlIcon().props('name')).toBe(icon);
+ },
+ );
});
});
diff --git a/spec/frontend/groups/members/index_spec.js b/spec/frontend/groups/members/index_spec.js
index 70fce0d60fb..95a111ef5da 100644
--- a/spec/frontend/groups/members/index_spec.js
+++ b/spec/frontend/groups/members/index_spec.js
@@ -1,5 +1,5 @@
import { createWrapper } from '@vue/test-utils';
-import initGroupMembersApp from '~/groups/members';
+import { initGroupMembersApp } from '~/groups/members';
import GroupMembersApp from '~/groups/members/components/app.vue';
import { membersJsonString, membersParsed } from './mock_data';
@@ -9,7 +9,7 @@ describe('initGroupMembersApp', () => {
let wrapper;
const setup = () => {
- vm = initGroupMembersApp(el);
+ vm = initGroupMembersApp(el, ['account']);
wrapper = createWrapper(vm);
};
@@ -63,4 +63,10 @@ describe('initGroupMembersApp', () => {
expect(vm.$store.state.members).toEqual(membersParsed);
});
+
+ it('sets `tableFields` in Vuex store', () => {
+ setup();
+
+ expect(vm.$store.state.tableFields).toEqual(['account']);
+ });
});
diff --git a/spec/frontend/helpers/experimentation_helper.js b/spec/frontend/helpers/experimentation_helper.js
new file mode 100644
index 00000000000..c08c25155e8
--- /dev/null
+++ b/spec/frontend/helpers/experimentation_helper.js
@@ -0,0 +1,14 @@
+import { merge } from 'lodash';
+
+export function withGonExperiment(experimentKey, value = true) {
+ let origGon;
+
+ beforeEach(() => {
+ origGon = window.gon;
+ window.gon = merge({}, window.gon || {}, { experiments: { [experimentKey]: value } });
+ });
+
+ afterEach(() => {
+ window.gon = origGon;
+ });
+}
diff --git a/spec/frontend/helpers/keep_alive_component_helper.js b/spec/frontend/helpers/keep_alive_component_helper.js
new file mode 100644
index 00000000000..54f40bf9093
--- /dev/null
+++ b/spec/frontend/helpers/keep_alive_component_helper.js
@@ -0,0 +1,29 @@
+import Vue from 'vue';
+
+export function keepAlive(KeptAliveComponent) {
+ return Vue.extend({
+ components: {
+ KeptAliveComponent,
+ },
+ data() {
+ return {
+ view: 'KeptAliveComponent',
+ };
+ },
+ methods: {
+ async activate() {
+ this.view = 'KeptAliveComponent';
+ await this.$nextTick();
+ },
+ async deactivate() {
+ this.view = 'div';
+ await this.$nextTick();
+ },
+ async reactivate() {
+ await this.deactivate();
+ await this.activate();
+ },
+ },
+ template: `<keep-alive><component :is="view"></component></keep-alive>`,
+ });
+}
diff --git a/spec/frontend/helpers/keep_alive_component_helper_spec.js b/spec/frontend/helpers/keep_alive_component_helper_spec.js
new file mode 100644
index 00000000000..dcccc14f396
--- /dev/null
+++ b/spec/frontend/helpers/keep_alive_component_helper_spec.js
@@ -0,0 +1,32 @@
+import { mount } from '@vue/test-utils';
+import { keepAlive } from './keep_alive_component_helper';
+
+const component = {
+ template: '<div>Test Component</div>',
+};
+
+describe('keepAlive', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = mount(keepAlive(component));
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('converts a component to a keep-alive component', async () => {
+ const { element } = wrapper.find(component);
+
+ await wrapper.vm.deactivate();
+ expect(wrapper.find(component).exists()).toBe(false);
+
+ await wrapper.vm.activate();
+
+ // assert that when the component is destroyed and re-rendered, the
+ // newly rendered component has the reference to the old component
+ // (i.e. the old component was deactivated and activated)
+ expect(wrapper.find(component).element).toBe(element);
+ });
+});
diff --git a/spec/frontend/helpers/local_storage_helper.js b/spec/frontend/helpers/local_storage_helper.js
index cd39b660bfd..0318b80aaef 100644
--- a/spec/frontend/helpers/local_storage_helper.js
+++ b/spec/frontend/helpers/local_storage_helper.js
@@ -35,7 +35,7 @@ export const createLocalStorageSpy = () => {
clear: jest.fn(() => {
storage = {};
}),
- getItem: jest.fn(key => storage[key]),
+ getItem: jest.fn(key => (key in storage ? storage[key] : null)),
setItem: jest.fn((key, value) => {
storage[key] = value;
}),
diff --git a/spec/frontend/helpers/local_storage_helper_spec.js b/spec/frontend/helpers/local_storage_helper_spec.js
index 6b44ea3a4c3..5d9961e7631 100644
--- a/spec/frontend/helpers/local_storage_helper_spec.js
+++ b/spec/frontend/helpers/local_storage_helper_spec.js
@@ -18,11 +18,11 @@ describe('localStorage helper', () => {
localStorage.removeItem('test', 'testing');
- expect(localStorage.getItem('test')).toBeUndefined();
+ expect(localStorage.getItem('test')).toBe(null);
expect(localStorage.getItem('test2')).toBe('testing');
localStorage.clear();
- expect(localStorage.getItem('test2')).toBeUndefined();
+ expect(localStorage.getItem('test2')).toBe(null);
});
});
diff --git a/spec/frontend/helpers/vue_test_utils_helper.js b/spec/frontend/helpers/vue_test_utils_helper.js
index 68326e37ae7..ead898f04d3 100644
--- a/spec/frontend/helpers/vue_test_utils_helper.js
+++ b/spec/frontend/helpers/vue_test_utils_helper.js
@@ -33,3 +33,10 @@ export const waitForMutation = (store, expectedMutationType) =>
}
});
});
+
+export const extendedWrapper = wrapper =>
+ Object.defineProperty(wrapper, 'findByTestId', {
+ value(id) {
+ return this.find(`[data-testid="${id}"]`);
+ },
+ });
diff --git a/spec/frontend/helpers/wait_for_text.js b/spec/frontend/helpers/wait_for_text.js
new file mode 100644
index 00000000000..6bed8a90a98
--- /dev/null
+++ b/spec/frontend/helpers/wait_for_text.js
@@ -0,0 +1,3 @@
+import { findByText } from '@testing-library/dom';
+
+export const waitForText = async (text, container = document) => findByText(container, text);
diff --git a/spec/frontend/ide/components/commit_sidebar/actions_spec.js b/spec/frontend/ide/components/commit_sidebar/actions_spec.js
index a303e2b9bee..0003e13c92f 100644
--- a/spec/frontend/ide/components/commit_sidebar/actions_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/actions_spec.js
@@ -83,12 +83,12 @@ describe('IDE commit sidebar actions', () => {
});
});
- describe('commitToCurrentBranchText', () => {
+ describe('currentBranchText', () => {
it('escapes current branch', () => {
const injectedSrc = '<img src="x" />';
createComponent({ currentBranchId: injectedSrc });
- expect(vm.commitToCurrentBranchText).not.toContain(injectedSrc);
+ expect(vm.currentBranchText).not.toContain(injectedSrc);
});
});
diff --git a/spec/frontend/ide/components/ide_review_spec.js b/spec/frontend/ide/components/ide_review_spec.js
index c9ac2ac423d..bcc98669427 100644
--- a/spec/frontend/ide/components/ide_review_spec.js
+++ b/spec/frontend/ide/components/ide_review_spec.js
@@ -1,14 +1,19 @@
import Vue from 'vue';
+import Vuex from 'vuex';
+import { createLocalVue, mount } from '@vue/test-utils';
import IdeReview from '~/ide/components/ide_review.vue';
+import EditorModeDropdown from '~/ide/components/editor_mode_dropdown.vue';
import { createStore } from '~/ide/stores';
-import { createComponentWithStore } from '../../helpers/vue_mount_component_helper';
import { trimText } from '../../helpers/text_helper';
+import { keepAlive } from '../../helpers/keep_alive_component_helper';
import { file } from '../helpers';
import { projectData } from '../mock_data';
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
describe('IDE review mode', () => {
- const Component = Vue.extend(IdeReview);
- let vm;
+ let wrapper;
let store;
beforeEach(() => {
@@ -21,15 +26,53 @@ describe('IDE review mode', () => {
loading: false,
});
- vm = createComponentWithStore(Component, store).$mount();
+ wrapper = mount(keepAlive(IdeReview), {
+ store,
+ localVue,
+ });
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('renders list of files', () => {
- expect(vm.$el.textContent).toContain('fileName');
+ expect(wrapper.text()).toContain('fileName');
+ });
+
+ describe('activated', () => {
+ let inititializeSpy;
+
+ beforeEach(async () => {
+ inititializeSpy = jest.spyOn(wrapper.find(IdeReview).vm, 'initialize');
+ store.state.viewer = 'editor';
+
+ await wrapper.vm.reactivate();
+ });
+
+ it('re initializes the component', () => {
+ expect(inititializeSpy).toHaveBeenCalled();
+ });
+
+ it('updates viewer to "diff" by default', () => {
+ expect(store.state.viewer).toBe('diff');
+ });
+
+ describe('merge request is defined', () => {
+ beforeEach(async () => {
+ store.state.currentMergeRequestId = '1';
+ store.state.projects.abcproject.mergeRequests['1'] = {
+ iid: 123,
+ web_url: 'testing123',
+ };
+
+ await wrapper.vm.reactivate();
+ });
+
+ it('updates viewer to "mrdiff"', async () => {
+ expect(store.state.viewer).toBe('mrdiff');
+ });
+ });
});
describe('merge request', () => {
@@ -40,32 +83,27 @@ describe('IDE review mode', () => {
web_url: 'testing123',
};
- return vm.$nextTick();
+ return wrapper.vm.$nextTick();
});
it('renders edit dropdown', () => {
- expect(vm.$el.querySelector('.btn')).not.toBe(null);
+ expect(wrapper.find(EditorModeDropdown).exists()).toBe(true);
});
- it('renders merge request link & IID', () => {
+ it('renders merge request link & IID', async () => {
store.state.viewer = 'mrdiff';
- return vm.$nextTick(() => {
- const link = vm.$el.querySelector('.ide-review-sub-header');
+ await wrapper.vm.$nextTick();
- expect(link.querySelector('a').getAttribute('href')).toBe('testing123');
- expect(trimText(link.textContent)).toBe('Merge request (!123)');
- });
+ expect(trimText(wrapper.text())).toContain('Merge request (!123)');
});
- it('changes text to latest changes when viewer is not mrdiff', () => {
+ it('changes text to latest changes when viewer is not mrdiff', async () => {
store.state.viewer = 'diff';
- return vm.$nextTick(() => {
- expect(trimText(vm.$el.querySelector('.ide-review-sub-header').textContent)).toBe(
- 'Latest changes',
- );
- });
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.text()).toContain('Latest changes');
});
});
});
diff --git a/spec/frontend/ide/components/ide_side_bar_spec.js b/spec/frontend/ide/components/ide_side_bar_spec.js
index 67257b40879..86e4e8d8f89 100644
--- a/spec/frontend/ide/components/ide_side_bar_spec.js
+++ b/spec/frontend/ide/components/ide_side_bar_spec.js
@@ -1,57 +1,88 @@
-import Vue from 'vue';
-import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { mount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
+import { GlSkeletonLoading } from '@gitlab/ui';
import { createStore } from '~/ide/stores';
-import ideSidebar from '~/ide/components/ide_side_bar.vue';
+import IdeSidebar from '~/ide/components/ide_side_bar.vue';
+import IdeTree from '~/ide/components/ide_tree.vue';
+import RepoCommitSection from '~/ide/components/repo_commit_section.vue';
import { leftSidebarViews } from '~/ide/constants';
import { projectData } from '../mock_data';
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
describe('IdeSidebar', () => {
- let vm;
+ let wrapper;
let store;
- beforeEach(() => {
+ function createComponent() {
store = createStore();
- const Component = Vue.extend(ideSidebar);
-
store.state.currentProjectId = 'abcproject';
store.state.projects.abcproject = projectData;
- vm = createComponentWithStore(Component, store).$mount();
- });
+ return mount(IdeSidebar, {
+ store,
+ localVue,
+ });
+ }
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
+ wrapper = null;
});
it('renders a sidebar', () => {
- expect(vm.$el.querySelector('.multi-file-commit-panel-inner')).not.toBeNull();
+ wrapper = createComponent();
+
+ expect(wrapper.find('[data-testid="ide-side-bar-inner"]').exists()).toBe(true);
});
- it('renders loading icon component', done => {
- vm.$store.state.loading = true;
+ it('renders loading components', async () => {
+ wrapper = createComponent();
- vm.$nextTick(() => {
- expect(vm.$el.querySelector('.multi-file-loading-container')).not.toBeNull();
- expect(vm.$el.querySelectorAll('.multi-file-loading-container').length).toBe(3);
+ store.state.loading = true;
- done();
- });
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.findAll(GlSkeletonLoading)).toHaveLength(3);
});
describe('activityBarComponent', () => {
it('renders tree component', () => {
- expect(vm.$el.querySelector('.ide-file-list')).not.toBeNull();
+ wrapper = createComponent();
+
+ expect(wrapper.find(IdeTree).exists()).toBe(true);
});
- it('renders commit component', done => {
- vm.$store.state.currentActivityView = leftSidebarViews.commit.name;
+ it('renders commit component', async () => {
+ wrapper = createComponent();
+
+ store.state.currentActivityView = leftSidebarViews.commit.name;
- vm.$nextTick(() => {
- expect(vm.$el.querySelector('.multi-file-commit-panel-section')).not.toBeNull();
+ await wrapper.vm.$nextTick();
- done();
- });
+ expect(wrapper.find(RepoCommitSection).exists()).toBe(true);
});
});
+
+ it('keeps the current activity view components alive', async () => {
+ wrapper = createComponent();
+
+ const ideTreeComponent = wrapper.find(IdeTree).element;
+
+ store.state.currentActivityView = leftSidebarViews.commit.name;
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find(IdeTree).exists()).toBe(false);
+ expect(wrapper.find(RepoCommitSection).exists()).toBe(true);
+
+ store.state.currentActivityView = leftSidebarViews.edit.name;
+
+ await wrapper.vm.$nextTick();
+
+ // reference to the elements remains the same, meaning the components were kept alive
+ expect(wrapper.find(IdeTree).element).toEqual(ideTreeComponent);
+ });
});
diff --git a/spec/frontend/ide/components/ide_tree_list_spec.js b/spec/frontend/ide/components/ide_tree_list_spec.js
index 4593ef6049b..dd57a5c5f4d 100644
--- a/spec/frontend/ide/components/ide_tree_list_spec.js
+++ b/spec/frontend/ide/components/ide_tree_list_spec.js
@@ -38,15 +38,9 @@ describe('IDE tree list', () => {
beforeEach(() => {
bootstrapWithTree();
- jest.spyOn(vm, 'updateViewer');
-
vm.$mount();
});
- it('updates viewer on mount', () => {
- expect(vm.updateViewer).toHaveBeenCalledWith('edit');
- });
-
it('renders loading indicator', done => {
store.state.trees['abcproject/master'].loading = true;
@@ -67,8 +61,6 @@ describe('IDE tree list', () => {
beforeEach(() => {
bootstrapWithTree(emptyBranchTree);
- jest.spyOn(vm, 'updateViewer');
-
vm.$mount();
});
diff --git a/spec/frontend/ide/components/ide_tree_spec.js b/spec/frontend/ide/components/ide_tree_spec.js
index 899daa0bf57..ad00dec2e48 100644
--- a/spec/frontend/ide/components/ide_tree_spec.js
+++ b/spec/frontend/ide/components/ide_tree_spec.js
@@ -1,19 +1,22 @@
import Vue from 'vue';
+import Vuex from 'vuex';
+import { mount, createLocalVue } from '@vue/test-utils';
import IdeTree from '~/ide/components/ide_tree.vue';
import { createStore } from '~/ide/stores';
-import { createComponentWithStore } from '../../helpers/vue_mount_component_helper';
+import { keepAlive } from '../../helpers/keep_alive_component_helper';
import { file } from '../helpers';
import { projectData } from '../mock_data';
-describe('IdeRepoTree', () => {
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('IdeTree', () => {
let store;
- let vm;
+ let wrapper;
beforeEach(() => {
store = createStore();
- const IdeRepoTree = Vue.extend(IdeTree);
-
store.state.currentProjectId = 'abcproject';
store.state.currentBranchId = 'master';
store.state.projects.abcproject = { ...projectData };
@@ -22,14 +25,36 @@ describe('IdeRepoTree', () => {
loading: false,
});
- vm = createComponentWithStore(IdeRepoTree, store).$mount();
+ wrapper = mount(keepAlive(IdeTree), {
+ store,
+ localVue,
+ });
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('renders list of files', () => {
- expect(vm.$el.textContent).toContain('fileName');
+ expect(wrapper.text()).toContain('fileName');
+ });
+
+ describe('activated', () => {
+ let inititializeSpy;
+
+ beforeEach(async () => {
+ inititializeSpy = jest.spyOn(wrapper.find(IdeTree).vm, 'initialize');
+ store.state.viewer = 'diff';
+
+ await wrapper.vm.reactivate();
+ });
+
+ it('re initializes the component', () => {
+ expect(inititializeSpy).toHaveBeenCalled();
+ });
+
+ it('updates viewer to "editor" by default', () => {
+ expect(store.state.viewer).toBe('editor');
+ });
});
});
diff --git a/spec/frontend/ide/components/repo_commit_section_spec.js b/spec/frontend/ide/components/repo_commit_section_spec.js
index 3b837622720..096079308cd 100644
--- a/spec/frontend/ide/components/repo_commit_section_spec.js
+++ b/spec/frontend/ide/components/repo_commit_section_spec.js
@@ -1,6 +1,7 @@
import { mount } from '@vue/test-utils';
import { createStore } from '~/ide/stores';
import { createRouter } from '~/ide/ide_router';
+import { keepAlive } from '../../helpers/keep_alive_component_helper';
import RepoCommitSection from '~/ide/components/repo_commit_section.vue';
import EmptyState from '~/ide/components/commit_sidebar/empty_state.vue';
import { stageKeys } from '~/ide/constants';
@@ -14,7 +15,7 @@ describe('RepoCommitSection', () => {
let store;
function createComponent() {
- wrapper = mount(RepoCommitSection, { store });
+ wrapper = mount(keepAlive(RepoCommitSection), { store });
}
function setupDefaultState() {
@@ -64,6 +65,7 @@ describe('RepoCommitSection', () => {
afterEach(() => {
wrapper.destroy();
+ wrapper = null;
});
describe('empty state', () => {
@@ -168,4 +170,21 @@ describe('RepoCommitSection', () => {
expect(wrapper.find(EmptyState).exists()).toBe(false);
});
});
+
+ describe('activated', () => {
+ let inititializeSpy;
+
+ beforeEach(async () => {
+ createComponent();
+
+ inititializeSpy = jest.spyOn(wrapper.find(RepoCommitSection).vm, 'initialize');
+ store.state.viewer = 'diff';
+
+ await wrapper.vm.reactivate();
+ });
+
+ it('re initializes the component', () => {
+ expect(inititializeSpy).toHaveBeenCalled();
+ });
+ });
});
diff --git a/spec/frontend/incidents/components/incidents_list_spec.js b/spec/frontend/incidents/components/incidents_list_spec.js
index 307806e0a8a..1b98d488854 100644
--- a/spec/frontend/incidents/components/incidents_list_spec.js
+++ b/spec/frontend/incidents/components/incidents_list_spec.js
@@ -5,7 +5,6 @@ import {
GlTable,
GlAvatar,
GlPagination,
- GlSearchBoxByType,
GlTab,
GlTabs,
GlBadge,
@@ -15,13 +14,24 @@ import { visitUrl, joinPaths, mergeUrlParams } from '~/lib/utils/url_utility';
import IncidentsList from '~/incidents/components/incidents_list.vue';
import SeverityToken from '~/sidebar/components/severity/severity.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
-import { I18N, INCIDENT_STATUS_TABS } from '~/incidents/constants';
+import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
+import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue';
+import {
+ I18N,
+ INCIDENT_STATUS_TABS,
+ TH_CREATED_AT_TEST_ID,
+ TH_SEVERITY_TEST_ID,
+ TH_PUBLISHED_TEST_ID,
+} from '~/incidents/constants';
import mockIncidents from '../mocks/incidents.json';
+import mockFilters from '../mocks/incidents_filter.json';
jest.mock('~/lib/utils/url_utility', () => ({
visitUrl: jest.fn().mockName('visitUrlMock'),
- joinPaths: jest.fn().mockName('joinPaths'),
- mergeUrlParams: jest.fn().mockName('mergeUrlParams'),
+ joinPaths: jest.fn(),
+ mergeUrlParams: jest.fn(),
+ setUrlParams: jest.fn(),
+ updateHistory: jest.fn(),
}));
describe('Incidents List', () => {
@@ -41,9 +51,7 @@ describe('Incidents List', () => {
const findAlert = () => wrapper.find(GlAlert);
const findLoader = () => wrapper.find(GlLoadingIcon);
const findTimeAgo = () => wrapper.findAll(TimeAgoTooltip);
- const findDateColumnHeader = () =>
- wrapper.find('[data-testid="incident-management-created-at-sort"]');
- const findSearch = () => wrapper.find(GlSearchBoxByType);
+ const findSearch = () => wrapper.find(FilteredSearchBar);
const findAssingees = () => wrapper.findAll('[data-testid="incident-assignees"]');
const findCreateIncidentBtn = () => wrapper.find('[data-testid="createIncidentBtn"]');
const findClosedIcon = () => wrapper.findAll("[data-testid='incident-closed']");
@@ -73,9 +81,13 @@ describe('Incidents List', () => {
newIssuePath,
incidentTemplateName,
incidentType,
- issuePath: '/project/isssues',
+ issuePath: '/project/issues',
publishedAvailable: true,
emptyListSvgPath,
+ textQuery: '',
+ authorUsernamesQuery: '',
+ assigneeUsernamesQuery: '',
+ issuesIncidentDetails: false,
},
stubs: {
GlButton: true,
@@ -171,13 +183,6 @@ describe('Incidents List', () => {
expect(src).toBe(avatarUrl);
});
- it('contains a link to the issue details', () => {
- findTableRows()
- .at(0)
- .trigger('click');
- expect(visitUrl).toHaveBeenCalledWith(joinPaths(`/project/isssues/`, mockIncidents[0].iid));
- });
-
it('renders a closed icon for closed incidents', () => {
expect(findClosedIcon().length).toBe(
mockIncidents.filter(({ state }) => state === 'closed').length,
@@ -188,6 +193,30 @@ describe('Incidents List', () => {
it('renders severity per row', () => {
expect(findSeverity().length).toBe(mockIncidents.length);
});
+
+ it('contains a link to the issue details page', () => {
+ findTableRows()
+ .at(0)
+ .trigger('click');
+ expect(visitUrl).toHaveBeenCalledWith(joinPaths(`/project/issues/`, mockIncidents[0].iid));
+ });
+
+ it('contains a link to the incident details page', async () => {
+ beforeEach(() =>
+ mountComponent({
+ data: { incidents: { list: mockIncidents }, incidentsCount: {} },
+ loading: false,
+ provide: { glFeatures: { issuesIncidentDetails: true } },
+ }),
+ );
+
+ findTableRows()
+ .at(0)
+ .trigger('click');
+ expect(visitUrl).toHaveBeenCalledWith(
+ joinPaths(`/project/issues/incident`, mockIncidents[0].iid),
+ );
+ });
});
describe('Create Incident', () => {
@@ -207,11 +236,10 @@ describe('Incidents List', () => {
);
});
- it('sets button loading on click', () => {
+ it('sets button loading on click', async () => {
findCreateIncidentBtn().vm.$emit('click');
- return wrapper.vm.$nextTick().then(() => {
- expect(findCreateIncidentBtn().attributes('loading')).toBe('true');
- });
+ await wrapper.vm.$nextTick();
+ expect(findCreateIncidentBtn().attributes('loading')).toBe('true');
});
it("doesn't show the button when list is empty", () => {
@@ -243,51 +271,47 @@ describe('Incidents List', () => {
});
describe('prevPage', () => {
- it('returns prevPage button', () => {
+ it('returns prevPage button', async () => {
findPagination().vm.$emit('input', 3);
- return wrapper.vm.$nextTick(() => {
- expect(
- findPagination()
- .findAll('.page-item')
- .at(0)
- .text(),
- ).toBe('Prev');
- });
+ await wrapper.vm.$nextTick();
+ expect(
+ findPagination()
+ .findAll('.page-item')
+ .at(0)
+ .text(),
+ ).toBe('Prev');
});
- it('returns prevPage number', () => {
+ it('returns prevPage number', async () => {
findPagination().vm.$emit('input', 3);
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.vm.prevPage).toBe(2);
- });
+ await wrapper.vm.$nextTick();
+ expect(wrapper.vm.prevPage).toBe(2);
});
- it('returns 0 when it is the first page', () => {
+ it('returns 0 when it is the first page', async () => {
findPagination().vm.$emit('input', 1);
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.vm.prevPage).toBe(0);
- });
+ await wrapper.vm.$nextTick();
+ expect(wrapper.vm.prevPage).toBe(0);
});
});
describe('nextPage', () => {
- it('returns nextPage button', () => {
+ it('returns nextPage button', async () => {
findPagination().vm.$emit('input', 3);
- return wrapper.vm.$nextTick(() => {
- expect(
- findPagination()
- .findAll('.page-item')
- .at(1)
- .text(),
- ).toBe('Next');
- });
+ await wrapper.vm.$nextTick();
+ expect(
+ findPagination()
+ .findAll('.page-item')
+ .at(1)
+ .text(),
+ ).toBe('Next');
});
- it('returns nextPage number', () => {
+ it('returns nextPage number', async () => {
mountComponent({
data: {
incidents: {
@@ -301,21 +325,19 @@ describe('Incidents List', () => {
});
findPagination().vm.$emit('input', 1);
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.vm.nextPage).toBe(2);
- });
+ await wrapper.vm.$nextTick();
+ expect(wrapper.vm.nextPage).toBe(2);
});
- it('returns `null` when currentPage is already last page', () => {
+ it('returns `null` when currentPage is already last page', async () => {
findStatusTabs().vm.$emit('input', 1);
findPagination().vm.$emit('input', 1);
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.vm.nextPage).toBeNull();
- });
+ await wrapper.vm.$nextTick();
+ expect(wrapper.vm.nextPage).toBeNull();
});
});
- describe('Search', () => {
+ describe('Filtered search component', () => {
beforeEach(() => {
mountComponent({
data: {
@@ -331,15 +353,62 @@ describe('Incidents List', () => {
});
it('renders the search component for incidents', () => {
- expect(findSearch().exists()).toBe(true);
+ expect(findSearch().props('searchInputPlaceholder')).toBe('Search or filter results…');
+ expect(findSearch().props('tokens')).toEqual([
+ {
+ type: 'author_username',
+ icon: 'user',
+ title: 'Author',
+ unique: true,
+ symbol: '@',
+ token: AuthorToken,
+ operators: [{ value: '=', description: 'is', default: 'true' }],
+ fetchPath: '/project/path',
+ fetchAuthors: expect.any(Function),
+ },
+ {
+ type: 'assignee_username',
+ icon: 'user',
+ title: 'Assignees',
+ unique: true,
+ symbol: '@',
+ token: AuthorToken,
+ operators: [{ value: '=', description: 'is', default: 'true' }],
+ fetchPath: '/project/path',
+ fetchAuthors: expect.any(Function),
+ },
+ ]);
+ expect(findSearch().props('recentSearchesStorageKey')).toBe('incidents');
+ });
+
+ it('returns correctly applied filter search values', async () => {
+ const searchTerm = 'foo';
+ wrapper.setData({
+ searchTerm,
+ });
+
+ await wrapper.vm.$nextTick();
+ expect(wrapper.vm.filteredSearchValue).toEqual([searchTerm]);
});
- it('sets the `searchTerm` graphql variable', () => {
- const SEARCH_TERM = 'Simple Incident';
+ it('updates props tied to getIncidents GraphQL query', () => {
+ wrapper.vm.handleFilterIncidents(mockFilters);
- findSearch().vm.$emit('input', SEARCH_TERM);
+ expect(wrapper.vm.authorUsername).toBe('root');
+ expect(wrapper.vm.assigneeUsernames).toEqual('root2');
+ expect(wrapper.vm.searchTerm).toBe(mockFilters[2].value.data);
+ });
- expect(wrapper.vm.$data.searchTerm).toBe(SEARCH_TERM);
+ it('updates props `searchTerm` and `authorUsername` with empty values when passed filters param is empty', () => {
+ wrapper.setData({
+ authorUsername: 'foo',
+ searchTerm: 'bar',
+ });
+
+ wrapper.vm.handleFilterIncidents([]);
+
+ expect(wrapper.vm.authorUsername).toBe('');
+ expect(wrapper.vm.searchTerm).toBe('');
});
});
@@ -383,13 +452,25 @@ describe('Incidents List', () => {
});
});
- it('updates sort with new direction and column key', () => {
- expect(findDateColumnHeader().attributes('aria-sort')).toBe('descending');
+ const descSort = 'descending';
+ const ascSort = 'ascending';
+ const noneSort = 'none';
- findDateColumnHeader().trigger('click');
- return wrapper.vm.$nextTick(() => {
- expect(findDateColumnHeader().attributes('aria-sort')).toBe('ascending');
- });
+ it.each`
+ selector | initialSort | firstSort | nextSort
+ ${TH_CREATED_AT_TEST_ID} | ${descSort} | ${ascSort} | ${descSort}
+ ${TH_SEVERITY_TEST_ID} | ${noneSort} | ${descSort} | ${ascSort}
+ ${TH_PUBLISHED_TEST_ID} | ${noneSort} | ${descSort} | ${ascSort}
+ `('updates sort with new direction', async ({ selector, initialSort, firstSort, nextSort }) => {
+ const [[attr, value]] = Object.entries(selector);
+ const columnHeader = () => wrapper.find(`[${attr}="${value}"]`);
+ expect(columnHeader().attributes('aria-sort')).toBe(initialSort);
+ columnHeader().trigger('click');
+ await wrapper.vm.$nextTick();
+ expect(columnHeader().attributes('aria-sort')).toBe(firstSort);
+ columnHeader().trigger('click');
+ await wrapper.vm.$nextTick();
+ expect(columnHeader().attributes('aria-sort')).toBe(nextSort);
});
});
});
diff --git a/spec/frontend/incidents/mocks/incidents_filter.json b/spec/frontend/incidents/mocks/incidents_filter.json
new file mode 100644
index 00000000000..9f54e259b1d
--- /dev/null
+++ b/spec/frontend/incidents/mocks/incidents_filter.json
@@ -0,0 +1,14 @@
+ [
+ {
+ "type": "assignee_username",
+ "value": { "data": "root2" }
+ },
+ {
+ "type": "author_username",
+ "value": { "data": "root" }
+ },
+ {
+ "type": "filtered-search-term",
+ "value": { "data": "bar" }
+ }
+ ] \ No newline at end of file
diff --git a/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap b/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap
index cab2165b5db..e4620590e62 100644
--- a/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap
+++ b/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap
@@ -93,23 +93,20 @@ exports[`Alert integration settings form default state should match the default
</gl-form-checkbox-stub>
</gl-form-group-stub>
- <div
- class="gl-display-flex gl-justify-content-end"
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
+ class="js-no-auto-disable"
+ data-qa-selector="save_changes_button"
+ icon=""
+ size="medium"
+ type="submit"
+ variant="success"
>
- <gl-button-stub
- category="primary"
- class="js-no-auto-disable"
- data-qa-selector="save_changes_button"
- icon=""
- size="medium"
- type="submit"
- variant="success"
- >
-
- Save changes
- </gl-button-stub>
- </div>
+ Save changes
+
+ </gl-button-stub>
</form>
</div>
`;
diff --git a/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap b/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap
index 3ad4c13382d..53c3e131466 100644
--- a/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap
+++ b/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap
@@ -18,6 +18,7 @@ exports[`IncidentsSettingTabs should render the component 1`] = `
</h4>
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="js-settings-toggle"
icon=""
diff --git a/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap b/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
index 78bb238fcb6..ea2c512bf40 100644
--- a/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
+++ b/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
@@ -42,24 +42,21 @@ exports[`Alert integration settings form should match the default snapshot 1`] =
/>
</div>
- <div
- class="gl-display-flex gl-justify-content-end"
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
+ class="gl-mt-3"
+ data-testid="webhook-reset-btn"
+ icon=""
+ role="button"
+ size="medium"
+ tabindex="0"
+ variant="default"
>
- <gl-button-stub
- category="primary"
- class="gl-mt-3"
- data-testid="webhook-reset-btn"
- icon=""
- role="button"
- size="medium"
- tabindex="0"
- variant="default"
- >
-
- Reset webhook URL
- </gl-button-stub>
- </div>
+ Reset webhook URL
+
+ </gl-button-stub>
<gl-modal-stub
modalclass=""
@@ -76,22 +73,19 @@ exports[`Alert integration settings form should match the default snapshot 1`] =
</gl-modal-stub>
</gl-form-group-stub>
- <div
- class="gl-display-flex gl-justify-content-end"
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
+ class="js-no-auto-disable"
+ icon=""
+ size="medium"
+ type="submit"
+ variant="success"
>
- <gl-button-stub
- category="primary"
- class="js-no-auto-disable"
- icon=""
- size="medium"
- type="submit"
- variant="success"
- >
-
- Save changes
- </gl-button-stub>
- </div>
+ Save changes
+
+ </gl-button-stub>
</form>
</div>
`;
diff --git a/spec/frontend/integrations/edit/components/confirmation_modal_spec.js b/spec/frontend/integrations/edit/components/confirmation_modal_spec.js
new file mode 100644
index 00000000000..02f311f579f
--- /dev/null
+++ b/spec/frontend/integrations/edit/components/confirmation_modal_spec.js
@@ -0,0 +1,51 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlModal } from '@gitlab/ui';
+import { createStore } from '~/integrations/edit/store';
+
+import ConfirmationModal from '~/integrations/edit/components/confirmation_modal.vue';
+
+describe('ConfirmationModal', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(ConfirmationModal, {
+ store: createStore(),
+ });
+ };
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ });
+
+ const findGlModal = () => wrapper.find(GlModal);
+
+ describe('template', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders GlModal with correct copy', () => {
+ expect(findGlModal().exists()).toBe(true);
+ expect(findGlModal().attributes('title')).toBe('Save settings?');
+ expect(findGlModal().text()).toContain(
+ 'Saving will update the default settings for all projects that are not using custom settings.',
+ );
+ expect(findGlModal().text()).toContain(
+ 'Projects using custom settings will not be impacted unless the project owner chooses to use instance-level defaults.',
+ );
+ });
+
+ it('emits `submit` event when `primary` event is emitted on GlModal', async () => {
+ expect(wrapper.emitted().submit).toBeUndefined();
+
+ findGlModal().vm.$emit('primary');
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.emitted().submit).toHaveLength(1);
+ });
+ });
+});
diff --git a/spec/frontend/integrations/edit/components/integration_form_spec.js b/spec/frontend/integrations/edit/components/integration_form_spec.js
index eeb5d21d62c..efcc727277a 100644
--- a/spec/frontend/integrations/edit/components/integration_form_spec.js
+++ b/spec/frontend/integrations/edit/components/integration_form_spec.js
@@ -4,6 +4,7 @@ import { createStore } from '~/integrations/edit/store';
import IntegrationForm from '~/integrations/edit/components/integration_form.vue';
import OverrideDropdown from '~/integrations/edit/components/override_dropdown.vue';
import ActiveCheckbox from '~/integrations/edit/components/active_checkbox.vue';
+import ConfirmationModal from '~/integrations/edit/components/confirmation_modal.vue';
import JiraTriggerFields from '~/integrations/edit/components/jira_trigger_fields.vue';
import JiraIssuesFields from '~/integrations/edit/components/jira_issues_fields.vue';
import TriggerFields from '~/integrations/edit/components/trigger_fields.vue';
@@ -22,6 +23,7 @@ describe('IntegrationForm', () => {
stubs: {
OverrideDropdown,
ActiveCheckbox,
+ ConfirmationModal,
JiraTriggerFields,
TriggerFields,
},
@@ -40,6 +42,7 @@ describe('IntegrationForm', () => {
const findOverrideDropdown = () => wrapper.find(OverrideDropdown);
const findActiveCheckbox = () => wrapper.find(ActiveCheckbox);
+ const findConfirmationModal = () => wrapper.find(ConfirmationModal);
const findJiraTriggerFields = () => wrapper.find(JiraTriggerFields);
const findJiraIssuesFields = () => wrapper.find(JiraIssuesFields);
const findTriggerFields = () => wrapper.find(TriggerFields);
@@ -63,6 +66,26 @@ describe('IntegrationForm', () => {
});
});
+ describe('integrationLevel is instance', () => {
+ it('renders ConfirmationModal', () => {
+ createComponent({
+ integrationLevel: 'instance',
+ });
+
+ expect(findConfirmationModal().exists()).toBe(true);
+ });
+ });
+
+ describe('integrationLevel is not instance', () => {
+ it('does not render ConfirmationModal', () => {
+ createComponent({
+ integrationLevel: 'project',
+ });
+
+ expect(findConfirmationModal().exists()).toBe(false);
+ });
+ });
+
describe('type is "slack"', () => {
beforeEach(() => {
createComponent({ type: 'slack' });
diff --git a/spec/frontend/integrations/edit/mock_data.js b/spec/frontend/integrations/edit/mock_data.js
index 821972b7698..27ba0768331 100644
--- a/spec/frontend/integrations/edit/mock_data.js
+++ b/spec/frontend/integrations/edit/mock_data.js
@@ -2,6 +2,7 @@ export const mockIntegrationProps = {
id: 25,
initialActivated: true,
showActive: true,
+ editable: true,
triggerFieldsProps: {
initialTriggerCommit: false,
initialTriggerMergeRequest: false,
diff --git a/spec/frontend/invite_members/components/invite_members_modal_spec.js b/spec/frontend/invite_members/components/invite_members_modal_spec.js
new file mode 100644
index 00000000000..0be0fbbde2d
--- /dev/null
+++ b/spec/frontend/invite_members/components/invite_members_modal_spec.js
@@ -0,0 +1,115 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlDropdown, GlDropdownItem, GlDatepicker, GlSprintf, GlLink } from '@gitlab/ui';
+import Api from '~/api';
+import InviteMembersModal from '~/invite_members/components/invite_members_modal.vue';
+
+const groupId = '1';
+const groupName = 'testgroup';
+const accessLevels = { Guest: 10, Reporter: 20, Developer: 30, Maintainer: 40, Owner: 50 };
+const defaultAccessLevel = '10';
+const helpLink = 'https://example.com';
+
+const createComponent = () => {
+ return shallowMount(InviteMembersModal, {
+ propsData: {
+ groupId,
+ groupName,
+ accessLevels,
+ defaultAccessLevel,
+ helpLink,
+ },
+ stubs: {
+ GlSprintf,
+ 'gl-modal': '<div><slot name="modal-footer"></slot><slot></slot></div>',
+ },
+ });
+};
+
+describe('InviteMembersModal', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findDropdown = () => wrapper.find(GlDropdown);
+ const findDropdownItems = () => wrapper.findAll(GlDropdownItem);
+ const findDatepicker = () => wrapper.find(GlDatepicker);
+ const findLink = () => wrapper.find(GlLink);
+ const findCancelButton = () => wrapper.find({ ref: 'cancelButton' });
+ const findInviteButton = () => wrapper.find({ ref: 'inviteButton' });
+
+ describe('rendering the modal', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ it('renders the modal with the correct title', () => {
+ expect(wrapper.attributes('title')).toBe('Invite team members');
+ });
+
+ it('renders the Cancel button text correctly', () => {
+ expect(findCancelButton().text()).toBe('Cancel');
+ });
+
+ it('renders the Invite button text correctly', () => {
+ expect(findInviteButton().text()).toBe('Invite');
+ });
+
+ describe('rendering the access levels dropdown', () => {
+ it('sets the default dropdown text to the default access level name', () => {
+ expect(findDropdown().attributes('text')).toBe('Guest');
+ });
+
+ it('renders dropdown items for each accessLevel', () => {
+ expect(findDropdownItems()).toHaveLength(5);
+ });
+ });
+
+ describe('rendering the help link', () => {
+ it('renders the correct link', () => {
+ expect(findLink().attributes('href')).toBe(helpLink);
+ });
+ });
+
+ describe('rendering the access expiration date field', () => {
+ it('renders the datepicker', () => {
+ expect(findDatepicker()).toExist();
+ });
+ });
+ });
+
+ describe('submitting the invite form', () => {
+ const postData = {
+ user_id: '1',
+ access_level: '10',
+ expires_at: new Date(),
+ format: 'json',
+ };
+
+ beforeEach(() => {
+ wrapper = createComponent();
+
+ jest.spyOn(Api, 'inviteGroupMember').mockResolvedValue({ data: postData });
+ wrapper.vm.$toast = { show: jest.fn() };
+
+ wrapper.vm.submitForm(postData);
+ });
+
+ it('calls Api inviteGroupMember with the correct params', () => {
+ expect(Api.inviteGroupMember).toHaveBeenCalledWith(groupId, postData);
+ });
+
+ describe('when the invite was sent successfully', () => {
+ const toastMessageSuccessful = 'Users were succesfully added';
+
+ it('displays the successful toastMessage', () => {
+ expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(
+ toastMessageSuccessful,
+ wrapper.vm.toastOptions,
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/invite_members/components/invite_members_trigger_spec.js b/spec/frontend/invite_members/components/invite_members_trigger_spec.js
new file mode 100644
index 00000000000..450d37a9748
--- /dev/null
+++ b/spec/frontend/invite_members/components/invite_members_trigger_spec.js
@@ -0,0 +1,58 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlIcon, GlLink } from '@gitlab/ui';
+import InviteMembersTrigger from '~/invite_members/components/invite_members_trigger.vue';
+
+const displayText = 'Invite team members';
+const icon = 'plus';
+
+const createComponent = (props = {}) => {
+ return shallowMount(InviteMembersTrigger, {
+ propsData: {
+ displayText,
+ ...props,
+ },
+ });
+};
+
+describe('InviteMembersTrigger', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('displayText', () => {
+ const findLink = () => wrapper.find(GlLink);
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ it('includes the correct displayText for the link', () => {
+ expect(findLink().text()).toBe(displayText);
+ });
+ });
+
+ describe('icon', () => {
+ const findIcon = () => wrapper.find(GlIcon);
+
+ it('includes the correct icon when an icon is sent', () => {
+ wrapper = createComponent({ icon });
+
+ expect(findIcon().attributes('name')).toBe(icon);
+ });
+
+ it('does not include an icon when icon is not sent', () => {
+ wrapper = createComponent();
+
+ expect(findIcon().exists()).toBe(false);
+ });
+
+ it('does not include an icon when empty string is sent', () => {
+ wrapper = createComponent({ icon: '' });
+
+ expect(findIcon().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js b/spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js
index bfbe4ec8e70..17a195df494 100644
--- a/spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js
+++ b/spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js
@@ -48,7 +48,10 @@ describe('AddIssuableForm', () => {
const input = findFormInput(wrapper);
if (input) input.blur();
- wrapper.destroy();
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
});
describe('with data', () => {
diff --git a/spec/frontend/issuable/related_issues/components/issue_token_spec.js b/spec/frontend/issuable/related_issues/components/issue_token_spec.js
index 553721fa783..f2cb9042ba6 100644
--- a/spec/frontend/issuable/related_issues/components/issue_token_spec.js
+++ b/spec/frontend/issuable/related_issues/components/issue_token_spec.js
@@ -1,241 +1,146 @@
-import Vue from 'vue';
+import { shallowMount } from '@vue/test-utils';
import { PathIdSeparator } from '~/related_issues/constants';
-import issueToken from '~/related_issues/components/issue_token.vue';
+import IssueToken from '~/related_issues/components/issue_token.vue';
describe('IssueToken', () => {
const idKey = 200;
const displayReference = 'foo/bar#123';
- const title = 'some title';
- const pathIdSeparator = PathIdSeparator.Issue;
const eventNamespace = 'pendingIssuable';
- let IssueToken;
- let vm;
+ const path = '/foo/bar/issues/123';
+ const pathIdSeparator = PathIdSeparator.Issue;
+ const title = 'some title';
- beforeEach(() => {
- IssueToken = Vue.extend(issueToken);
- });
+ let wrapper;
+
+ const defaultProps = {
+ idKey,
+ displayReference,
+ pathIdSeparator,
+ };
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(IssueToken, {
+ propsData: { ...defaultProps, ...props },
+ });
+ };
afterEach(() => {
- if (vm) {
- vm.$destroy();
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
}
});
+ const findLink = () => wrapper.find({ ref: 'link' });
+ const findReference = () => wrapper.find({ ref: 'reference' });
+ const findReferenceIcon = () => wrapper.find('[data-testid="referenceIcon"]');
+ const findRemoveBtn = () => wrapper.find('[data-testid="removeBtn"]');
+ const findTitle = () => wrapper.find({ ref: 'title' });
+
describe('with reference supplied', () => {
beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- },
- }).$mount();
+ createComponent();
});
it('shows reference', () => {
- expect(vm.$el.textContent.trim()).toEqual(displayReference);
+ expect(wrapper.text()).toContain(displayReference);
});
it('does not link without path specified', () => {
- expect(vm.$refs.link.tagName.toLowerCase()).toEqual('span');
- expect(vm.$refs.link.getAttribute('href')).toBeNull();
+ expect(findLink().element.tagName).toBe('SPAN');
+ expect(findLink().attributes('href')).toBeUndefined();
});
});
describe('with reference and title supplied', () => {
- beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- title,
- },
- }).$mount();
- });
-
it('shows reference and title', () => {
- expect(vm.$refs.reference.textContent.trim()).toEqual(displayReference);
- expect(vm.$refs.title.textContent.trim()).toEqual(title);
- });
- });
-
- describe('with path supplied', () => {
- const path = '/foo/bar/issues/123';
- beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- title,
- path,
- },
- }).$mount();
- });
+ createComponent({
+ title,
+ });
- it('links reference and title', () => {
- expect(vm.$refs.link.getAttribute('href')).toEqual(path);
+ expect(findReference().text()).toBe(displayReference);
+ expect(findTitle().text()).toBe(title);
});
});
- describe('with state supplied', () => {
- describe("`state: 'opened'`", () => {
- beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- state: 'opened',
- },
- }).$mount();
+ describe('with path and title supplied', () => {
+ it('links reference and title', () => {
+ createComponent({
+ path,
+ title,
});
- it('shows green circle icon', () => {
- expect(vm.$el.querySelector('.issue-token-state-icon-open.fa.fa-circle-o')).toBeDefined();
- });
- });
-
- describe("`state: 'reopened'`", () => {
- beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- state: 'reopened',
- },
- }).$mount();
- });
-
- it('shows green circle icon', () => {
- expect(vm.$el.querySelector('.issue-token-state-icon-open.fa.fa-circle-o')).toBeDefined();
- });
+ expect(findLink().attributes('href')).toBe(path);
});
+ });
- describe("`state: 'closed'`", () => {
- beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- state: 'closed',
- },
- }).$mount();
+ describe('with state supplied', () => {
+ it.each`
+ state | icon | cssClass
+ ${'opened'} | ${'issue-open-m'} | ${'issue-token-state-icon-open'}
+ ${'reopened'} | ${'issue-open-m'} | ${'issue-token-state-icon-open'}
+ ${'closed'} | ${'issue-close'} | ${'issue-token-state-icon-closed'}
+ `('shows "$icon" icon when "$state"', ({ state, icon, cssClass }) => {
+ createComponent({
+ path,
+ state,
});
- it('shows red minus icon', () => {
- expect(vm.$el.querySelector('.issue-token-state-icon-closed.fa.fa-minus')).toBeDefined();
- });
+ expect(findReferenceIcon().props('name')).toBe(icon);
+ expect(findReferenceIcon().classes()).toContain(cssClass);
});
});
describe('with reference, title, state', () => {
const state = 'opened';
- beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- title,
- state,
- },
- }).$mount();
- });
it('shows reference, title, and state', () => {
- const stateIcon = vm.$refs.reference.querySelector('svg');
+ createComponent({
+ title,
+ state,
+ });
- expect(stateIcon.getAttribute('aria-label')).toEqual(state);
- expect(vm.$refs.reference.textContent.trim()).toEqual(displayReference);
- expect(vm.$refs.title.textContent.trim()).toEqual(title);
+ expect(findReferenceIcon().attributes('aria-label')).toBe(state);
+ expect(findReference().text()).toBe(displayReference);
+ expect(findTitle().text()).toBe(title);
});
});
describe('with canRemove', () => {
describe('`canRemove: false` (default)', () => {
- beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- },
- }).$mount();
- });
-
it('does not have remove button', () => {
- expect(vm.$el.querySelector('.issue-token-remove-button')).toBeNull();
+ createComponent();
+
+ expect(findRemoveBtn().exists()).toBe(false);
});
});
describe('`canRemove: true`', () => {
beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- canRemove: true,
- },
- }).$mount();
+ createComponent({
+ eventNamespace,
+ canRemove: true,
+ });
});
it('has remove button', () => {
- expect(vm.$el.querySelector('.issue-token-remove-button')).toBeDefined();
+ expect(findRemoveBtn().exists()).toBe(true);
});
- });
- });
-
- describe('methods', () => {
- beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- },
- }).$mount();
- });
- it('when getting checked', () => {
- jest.spyOn(vm, '$emit').mockImplementation(() => {});
- vm.onRemoveRequest();
+ it('emits event when clicked', () => {
+ findRemoveBtn().trigger('click');
- expect(vm.$emit).toHaveBeenCalledWith('pendingIssuableRemoveRequest', vm.idKey);
- });
- });
+ const emitted = wrapper.emitted(`${eventNamespace}RemoveRequest`);
- describe('tooltip', () => {
- beforeEach(() => {
- vm = new IssueToken({
- propsData: {
- idKey,
- eventNamespace,
- displayReference,
- pathIdSeparator,
- canRemove: true,
- },
- }).$mount();
- });
-
- it('should not be escaped', () => {
- const { originalTitle } = vm.$refs.removeButton.dataset;
+ expect(emitted).toHaveLength(1);
+ expect(emitted[0]).toEqual([idKey]);
+ });
- expect(originalTitle).toEqual(`Remove ${displayReference}`);
+ it('tooltip should not be escaped', () => {
+ expect(findRemoveBtn().attributes('data-original-title')).toBe(
+ `Remove ${displayReference}`,
+ );
+ });
});
});
});
diff --git a/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js b/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js
index 0f88e4d71fe..b758b85beef 100644
--- a/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js
+++ b/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js
@@ -18,7 +18,10 @@ describe('RelatedIssuesBlock', () => {
const findIssueCountBadgeAddButton = () => wrapper.find(GlButton);
afterEach(() => {
- wrapper.destroy();
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
});
describe('with defaults', () => {
diff --git a/spec/frontend/issuable/related_issues/components/related_issues_list_spec.js b/spec/frontend/issuable/related_issues/components/related_issues_list_spec.js
index 6cf0b9d21ea..39bc244297b 100644
--- a/spec/frontend/issuable/related_issues/components/related_issues_list_spec.js
+++ b/spec/frontend/issuable/related_issues/components/related_issues_list_spec.js
@@ -14,7 +14,10 @@ describe('RelatedIssuesList', () => {
let wrapper;
afterEach(() => {
- wrapper.destroy();
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
});
describe('with defaults', () => {
diff --git a/spec/frontend/issuable_create/components/issuable_form_spec.js b/spec/frontend/issuable_create/components/issuable_form_spec.js
index e2c6b4d9521..e489d1dae3e 100644
--- a/spec/frontend/issuable_create/components/issuable_form_spec.js
+++ b/spec/frontend/issuable_create/components/issuable_form_spec.js
@@ -79,6 +79,7 @@ describe('IssuableForm', () => {
markdownDocsPath: wrapper.vm.descriptionHelpPath,
addSpacingClasses: false,
showSuggestPopover: true,
+ textareaValue: '',
});
expect(descriptionFieldEl.find('textarea').exists()).toBe(true);
expect(descriptionFieldEl.find('textarea').attributes('placeholder')).toBe(
diff --git a/spec/frontend/issue_show/components/incidents/highlight_bar_spec.js b/spec/frontend/issue_show/components/incidents/highlight_bar_spec.js
index 8d50df5e406..766a27015bb 100644
--- a/spec/frontend/issue_show/components/incidents/highlight_bar_spec.js
+++ b/spec/frontend/issue_show/components/incidents/highlight_bar_spec.js
@@ -9,6 +9,7 @@ describe('Highlight Bar', () => {
let wrapper;
const alert = {
+ iid: 1,
startedAt: '2020-05-29T10:39:22Z',
detailsUrl: 'http://127.0.0.1:3000/root/unique-alerts/-/alert_management/1/details',
eventCount: 1,
@@ -39,7 +40,8 @@ describe('Highlight Bar', () => {
it('renders a link to the alert page', () => {
expect(findLink().exists()).toBe(true);
expect(findLink().attributes('href')).toBe(alert.detailsUrl);
- expect(findLink().text()).toContain(alert.title);
+ expect(findLink().attributes('title')).toBe(alert.title);
+ expect(findLink().text()).toBe(`#${alert.iid}`);
});
it('renders formatted start time of the alert', () => {
diff --git a/spec/frontend/issue_show/components/incidents/incident_tabs_spec.js b/spec/frontend/issue_show/components/incidents/incident_tabs_spec.js
index a51b497cd79..6babba37b57 100644
--- a/spec/frontend/issue_show/components/incidents/incident_tabs_spec.js
+++ b/spec/frontend/issue_show/components/incidents/incident_tabs_spec.js
@@ -79,7 +79,7 @@ describe('Incident Tabs component', () => {
it('renders the alert details table with the correct props', () => {
const alert = { iid: mockAlert.iid };
- expect(findAlertDetailsComponent().props('alert')).toEqual(alert);
+ expect(findAlertDetailsComponent().props('alert')).toMatchObject(alert);
expect(findAlertDetailsComponent().props('loading')).toBe(true);
});
diff --git a/spec/frontend/issue_show/issue_spec.js b/spec/frontend/issue_show/issue_spec.js
index befb670c6cd..c0175e774a2 100644
--- a/spec/frontend/issue_show/issue_spec.js
+++ b/spec/frontend/issue_show/issue_spec.js
@@ -14,12 +14,8 @@ useMockIntersectionObserver();
jest.mock('~/lib/utils/poll');
const setupHTML = initialData => {
- document.body.innerHTML = `
- <div id="js-issuable-app"></div>
- <script id="js-issuable-app-initial-data" type="application/json">
- ${JSON.stringify(initialData)}
- </script>
- `;
+ document.body.innerHTML = `<div id="js-issuable-app"></div>`;
+ document.getElementById('js-issuable-app').dataset.initial = JSON.stringify(initialData);
};
describe('Issue show index', () => {
diff --git a/spec/frontend/jobs/store/utils_spec.js b/spec/frontend/jobs/store/utils_spec.js
index 294f88bbc74..e50d304bb08 100644
--- a/spec/frontend/jobs/store/utils_spec.js
+++ b/spec/frontend/jobs/store/utils_spec.js
@@ -35,6 +35,14 @@ describe('Jobs Store Utils', () => {
lines: [],
});
});
+
+ it('pre-closes a section when specified in options', () => {
+ const headerLine = { content: [{ text: 'foo' }], section_options: { collapsed: 'true' } };
+
+ const parsedHeaderLine = parseHeaderLine(headerLine, 2);
+
+ expect(parsedHeaderLine.isClosed).toBe(true);
+ });
});
describe('parseLine', () => {
diff --git a/spec/frontend/lib/dompurify_spec.js b/spec/frontend/lib/dompurify_spec.js
new file mode 100644
index 00000000000..ee1971a4931
--- /dev/null
+++ b/spec/frontend/lib/dompurify_spec.js
@@ -0,0 +1,98 @@
+import { sanitize } from '~/lib/dompurify';
+
+// GDK
+const rootGon = {
+ sprite_file_icons: '/assets/icons-123a.svg',
+ sprite_icons: '/assets/icons-456b.svg',
+};
+
+// Production
+const absoluteGon = {
+ sprite_file_icons: `${window.location.protocol}//${window.location.hostname}/assets/icons-123a.svg`,
+ sprite_icons: `${window.location.protocol}//${window.location.hostname}/assets/icons-456b.svg`,
+};
+
+const expectedSanitized = '<svg><use></use></svg>';
+
+const safeUrls = {
+ root: Object.values(rootGon).map(url => `${url}#ellipsis_h`),
+ absolute: Object.values(absoluteGon).map(url => `${url}#ellipsis_h`),
+};
+
+const unsafeUrls = [
+ '/an/evil/url',
+ '../../../evil/url',
+ 'https://evil.url/assets/icons-123a.svg',
+ 'https://evil.url/assets/icons-456b.svg',
+ `https://evil.url/${rootGon.sprite_icons}`,
+ `https://evil.url/${rootGon.sprite_file_icons}`,
+ `https://evil.url/${absoluteGon.sprite_icons}`,
+ `https://evil.url/${absoluteGon.sprite_file_icons}`,
+];
+
+describe('~/lib/dompurify', () => {
+ let originalGon;
+
+ it('uses local configuration when given', () => {
+ // As dompurify uses a "Persistent Configuration", it might
+ // ignore config, this check verifies we respect
+ // https://github.com/cure53/DOMPurify#persistent-configuration
+ expect(sanitize('<br>', { ALLOWED_TAGS: [] })).toBe('');
+ expect(sanitize('<strong></strong>', { ALLOWED_TAGS: [] })).toBe('');
+ });
+
+ describe.each`
+ type | gon
+ ${'root'} | ${rootGon}
+ ${'absolute'} | ${absoluteGon}
+ `('when gon contains $type icon urls', ({ type, gon }) => {
+ beforeAll(() => {
+ originalGon = window.gon;
+ window.gon = gon;
+ });
+
+ afterAll(() => {
+ window.gon = originalGon;
+ });
+
+ it('allows no href attrs', () => {
+ const htmlHref = `<svg><use></use></svg>`;
+ expect(sanitize(htmlHref)).toBe(htmlHref);
+ });
+
+ it.each(safeUrls[type])('allows safe URL %s', url => {
+ const htmlHref = `<svg><use href="${url}"></use></svg>`;
+ expect(sanitize(htmlHref)).toBe(htmlHref);
+
+ const htmlXlink = `<svg><use xlink:href="${url}"></use></svg>`;
+ expect(sanitize(htmlXlink)).toBe(htmlXlink);
+ });
+
+ it.each(unsafeUrls)('sanitizes unsafe URL %s', url => {
+ const htmlHref = `<svg><use href="${url}"></use></svg>`;
+ const htmlXlink = `<svg><use xlink:href="${url}"></use></svg>`;
+
+ expect(sanitize(htmlHref)).toBe(expectedSanitized);
+ expect(sanitize(htmlXlink)).toBe(expectedSanitized);
+ });
+ });
+
+ describe('when gon does not contain icon urls', () => {
+ beforeAll(() => {
+ originalGon = window.gon;
+ window.gon = {};
+ });
+
+ afterAll(() => {
+ window.gon = originalGon;
+ });
+
+ it.each([...safeUrls.root, ...safeUrls.absolute, ...unsafeUrls])('sanitizes URL %s', url => {
+ const htmlHref = `<svg><use href="${url}"></use></svg>`;
+ const htmlXlink = `<svg><use xlink:href="${url}"></use></svg>`;
+
+ expect(sanitize(htmlHref)).toBe(expectedSanitized);
+ expect(sanitize(htmlXlink)).toBe(expectedSanitized);
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/axios_startup_calls_spec.js b/spec/frontend/lib/utils/axios_startup_calls_spec.js
index e804cae7914..e12bf725560 100644
--- a/spec/frontend/lib/utils/axios_startup_calls_spec.js
+++ b/spec/frontend/lib/utils/axios_startup_calls_spec.js
@@ -111,21 +111,44 @@ describe('setupAxiosStartupCalls', () => {
});
});
- it('removes GitLab Base URL from startup call', async () => {
- const oldGon = window.gon;
- window.gon = { gitlab_url: 'https://example.org/gitlab' };
-
- window.gl.startup_calls = {
- '/startup': {
- fetchCall: mockFetchCall(200),
- },
- };
- setupAxiosStartupCalls(axios);
+ describe('startup call', () => {
+ let oldGon;
+
+ beforeEach(() => {
+ oldGon = window.gon;
+ window.gon = { gitlab_url: 'https://example.org/gitlab' };
+ });
+
+ afterEach(() => {
+ window.gon = oldGon;
+ });
- const { data } = await axios.get('https://example.org/gitlab/startup');
+ it('removes GitLab Base URL from startup call', async () => {
+ window.gl.startup_calls = {
+ '/startup': {
+ fetchCall: mockFetchCall(200),
+ },
+ };
+ setupAxiosStartupCalls(axios);
- expect(data).toEqual(STARTUP_JS_RESPONSE);
+ const { data } = await axios.get('https://example.org/gitlab/startup');
- window.gon = oldGon;
+ expect(data).toEqual(STARTUP_JS_RESPONSE);
+ });
+
+ it('sorts the params in the requested API url', async () => {
+ window.gl.startup_calls = {
+ '/startup?alpha=true&bravo=true': {
+ fetchCall: mockFetchCall(200),
+ },
+ };
+ setupAxiosStartupCalls(axios);
+
+ // Use a full url instead of passing options = { params: { ... } } to axios.get
+ // to ensure the params are listed in the specified order.
+ const { data } = await axios.get('https://example.org/gitlab/startup?bravo=true&alpha=true');
+
+ expect(data).toEqual(STARTUP_JS_RESPONSE);
+ });
});
});
diff --git a/spec/frontend/lib/utils/datetime_utility_spec.js b/spec/frontend/lib/utils/datetime_utility_spec.js
index 5b1fdea058b..a7973d66b50 100644
--- a/spec/frontend/lib/utils/datetime_utility_spec.js
+++ b/spec/frontend/lib/utils/datetime_utility_spec.js
@@ -667,3 +667,26 @@ describe('differenceInMilliseconds', () => {
expect(datetimeUtility.differenceInMilliseconds(startDate, endDate)).toBe(expected);
});
});
+
+describe('dateAtFirstDayOfMonth', () => {
+ const date = new Date('2019-07-16T12:00:00.000Z');
+
+ it('returns the date at the first day of the month', () => {
+ const startDate = datetimeUtility.dateAtFirstDayOfMonth(date);
+ const expectedStartDate = new Date('2019-07-01T12:00:00.000Z');
+
+ expect(startDate).toStrictEqual(expectedStartDate);
+ });
+});
+
+describe('datesMatch', () => {
+ const date = new Date('2019-07-17T00:00:00.000Z');
+
+ it.each`
+ date1 | date2 | expected
+ ${date} | ${new Date('2019-07-17T00:00:00.000Z')} | ${true}
+ ${date} | ${new Date('2019-07-17T12:00:00.000Z')} | ${false}
+ `('returns $expected for $date1 matches $date2', ({ date1, date2, expected }) => {
+ expect(datetimeUtility.datesMatch(date1, date2)).toBe(expected);
+ });
+});
diff --git a/spec/frontend/lib/utils/experimentation_spec.js b/spec/frontend/lib/utils/experimentation_spec.js
new file mode 100644
index 00000000000..2c5d2f89297
--- /dev/null
+++ b/spec/frontend/lib/utils/experimentation_spec.js
@@ -0,0 +1,20 @@
+import * as experimentUtils from '~/lib/utils/experimentation';
+
+const TEST_KEY = 'abc';
+
+describe('experiment Utilities', () => {
+ describe('isExperimentEnabled', () => {
+ it.each`
+ experiments | value
+ ${{ [TEST_KEY]: true }} | ${true}
+ ${{ [TEST_KEY]: false }} | ${false}
+ ${{ def: true }} | ${false}
+ ${{}} | ${false}
+ ${null} | ${false}
+ `('returns correct value of $value for experiments=$experiments', ({ experiments, value }) => {
+ window.gon = { experiments };
+
+ expect(experimentUtils.isExperimentEnabled(TEST_KEY)).toEqual(value);
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/url_utility_spec.js b/spec/frontend/lib/utils/url_utility_spec.js
index 869ae274a3f..2afc1694281 100644
--- a/spec/frontend/lib/utils/url_utility_spec.js
+++ b/spec/frontend/lib/utils/url_utility_spec.js
@@ -664,6 +664,19 @@ describe('URL utility', () => {
});
});
+ describe('cleanLeadingSeparator', () => {
+ it.each`
+ path | expected
+ ${'/foo/bar'} | ${'foo/bar'}
+ ${'foo/bar'} | ${'foo/bar'}
+ ${'//foo/bar'} | ${'foo/bar'}
+ ${'/./foo/bar'} | ${'./foo/bar'}
+ ${''} | ${''}
+ `('$path becomes $expected', ({ path, expected }) => {
+ expect(urlUtils.cleanLeadingSeparator(path)).toBe(expected);
+ });
+ });
+
describe('joinPaths', () => {
it.each`
paths | expected
@@ -688,6 +701,18 @@ describe('URL utility', () => {
});
});
+ describe('stripFinalUrlSegment', () => {
+ it.each`
+ path | expected
+ ${'http://fake.domain/twitter/typeahead-js/-/tags/v0.11.0'} | ${'http://fake.domain/twitter/typeahead-js/-/tags/'}
+ ${'http://fake.domain/bar/cool/-/nested/content'} | ${'http://fake.domain/bar/cool/-/nested/'}
+ ${'http://fake.domain/bar/cool?q="search"'} | ${'http://fake.domain/bar/'}
+ ${'http://fake.domain/bar/cool#link-to-something'} | ${'http://fake.domain/bar/'}
+ `('stripFinalUrlSegment $path => $expected', ({ path, expected }) => {
+ expect(urlUtils.stripFinalUrlSegment(path)).toBe(expected);
+ });
+ });
+
describe('escapeFileUrl', () => {
it('encodes URL excluding the slashes', () => {
expect(urlUtils.escapeFileUrl('/foo-bar/file.md')).toBe('/foo-bar/file.md');
@@ -787,4 +812,36 @@ describe('URL utility', () => {
expect(urlUtils.getHTTPProtocol(url)).toBe(expectation);
});
});
+
+ describe('stripPathTail', () => {
+ it.each`
+ path | expected
+ ${''} | ${''}
+ ${'index.html'} | ${''}
+ ${'/'} | ${'/'}
+ ${'/foo/bar'} | ${'/foo/'}
+ ${'/foo/bar/'} | ${'/foo/bar/'}
+ ${'/foo/bar/index.html'} | ${'/foo/bar/'}
+ `('strips the filename from $path => $expected', ({ path, expected }) => {
+ expect(urlUtils.stripPathTail(path)).toBe(expected);
+ });
+ });
+
+ describe('getURLOrigin', () => {
+ it('when no url passed, returns correct origin from window location', () => {
+ const origin = 'https://foo.bar';
+
+ setWindowLocation({ origin });
+ expect(urlUtils.getURLOrigin()).toBe(origin);
+ });
+
+ it.each`
+ url | expectation
+ ${'not-a-url'} | ${null}
+ ${'wss://example.com'} | ${'wss://example.com'}
+ ${'https://foo.bar/foo/bar'} | ${'https://foo.bar'}
+ `('returns correct origin for $url', ({ url, expectation }) => {
+ expect(urlUtils.getURLOrigin(url)).toBe(expectation);
+ });
+ });
});
diff --git a/spec/frontend/merge_request_spec.js b/spec/frontend/merge_request_spec.js
index 16f04d032fd..37509f77f71 100644
--- a/spec/frontend/merge_request_spec.js
+++ b/spec/frontend/merge_request_spec.js
@@ -3,8 +3,6 @@ import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'spec/test_constants';
import axios from '~/lib/utils/axios_utils';
import MergeRequest from '~/merge_request';
-import CloseReopenReportToggle from '~/close_reopen_report_toggle';
-import IssuablesHelper from '~/helpers/issuables_helper';
describe('MergeRequest', () => {
const test = {};
@@ -112,66 +110,7 @@ describe('MergeRequest', () => {
});
});
- describe('class constructor', () => {
- beforeEach(() => {
- jest.spyOn($, 'ajax').mockImplementation();
- });
-
- it('calls .initCloseReopenReport', () => {
- jest.spyOn(IssuablesHelper, 'initCloseReopenReport').mockImplementation(() => {});
-
- new MergeRequest(); // eslint-disable-line no-new
-
- expect(IssuablesHelper.initCloseReopenReport).toHaveBeenCalled();
- });
-
- it('calls .initDroplab', () => {
- const container = {
- querySelector: jest.fn().mockName('container.querySelector'),
- };
- const dropdownTrigger = {};
- const dropdownList = {};
- const button = {};
-
- jest.spyOn(CloseReopenReportToggle.prototype, 'initDroplab').mockImplementation(() => {});
- jest.spyOn(document, 'querySelector').mockReturnValue(container);
-
- container.querySelector
- .mockReturnValueOnce(dropdownTrigger)
- .mockReturnValueOnce(dropdownList)
- .mockReturnValueOnce(button);
-
- new MergeRequest(); // eslint-disable-line no-new
-
- expect(document.querySelector).toHaveBeenCalledWith('.js-issuable-close-dropdown');
- expect(container.querySelector).toHaveBeenCalledWith('.js-issuable-close-toggle');
- expect(container.querySelector).toHaveBeenCalledWith('.js-issuable-close-menu');
- expect(container.querySelector).toHaveBeenCalledWith('.js-issuable-close-button');
- expect(CloseReopenReportToggle.prototype.initDroplab).toHaveBeenCalled();
- });
- });
-
describe('hideCloseButton', () => {
- describe('merge request of another user', () => {
- beforeEach(() => {
- loadFixtures('merge_requests/merge_request_with_task_list.html');
- test.el = document.querySelector('.js-issuable-actions');
- new MergeRequest(); // eslint-disable-line no-new
- MergeRequest.hideCloseButton();
- });
-
- it('hides the dropdown close item and selects the next item', () => {
- const closeItem = test.el.querySelector('li.close-item');
- const smallCloseItem = test.el.querySelector('.js-close-item');
- const reportItem = test.el.querySelector('li.report-item');
-
- expect(closeItem).toHaveClass('hidden');
- expect(smallCloseItem).toHaveClass('hidden');
- expect(reportItem).toHaveClass('droplab-item-selected');
- expect(reportItem).not.toHaveClass('hidden');
- });
- });
-
describe('merge request of current_user', () => {
beforeEach(() => {
loadFixtures('merge_requests/merge_request_of_current_user.html');
@@ -180,10 +119,8 @@ describe('MergeRequest', () => {
});
it('hides the close button', () => {
- const closeButton = test.el.querySelector('.btn-close');
const smallCloseItem = test.el.querySelector('.js-close-item');
- expect(closeButton).toHaveClass('hidden');
expect(smallCloseItem).toHaveClass('hidden');
});
});
diff --git a/spec/frontend/monitoring/components/__snapshots__/group_empty_state_spec.js.snap b/spec/frontend/monitoring/components/__snapshots__/group_empty_state_spec.js.snap
index c30fb572826..9b2aa3a5b5b 100644
--- a/spec/frontend/monitoring/components/__snapshots__/group_empty_state_spec.js.snap
+++ b/spec/frontend/monitoring/components/__snapshots__/group_empty_state_spec.js.snap
@@ -1,79 +1,146 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`GroupEmptyState Renders an empty state for BAD_QUERY 1`] = `
-<gl-empty-state-stub
- compact="true"
- primarybuttonlink="/path/to/settings"
- primarybuttontext="Verify configuration"
- svgpath="/path/to/empty-group-illustration.svg"
- title="Query cannot be processed"
-/>
+exports[`GroupEmptyState given state BAD_QUERY passes the expected props to GlEmptyState 1`] = `
+Object {
+ "compact": true,
+ "description": null,
+ "primaryButtonLink": "/path/to/settings",
+ "primaryButtonText": "Verify configuration",
+ "secondaryButtonLink": null,
+ "secondaryButtonText": null,
+ "svgHeight": null,
+ "svgPath": "/path/to/empty-group-illustration.svg",
+ "title": "Query cannot be processed",
+}
`;
-exports[`GroupEmptyState Renders an empty state for BAD_QUERY 2`] = `"The Prometheus server responded with \\"bad request\\". Please check your queries are correct and are supported in your Prometheus version. <a href=\\"/path/to/docs\\">More information</a>"`;
+exports[`GroupEmptyState given state BAD_QUERY renders the slotted content 1`] = `
+<div>
+ <div>
+ The Prometheus server responded with "bad request". Please check your queries are correct and are supported in your Prometheus version.
+ <a
+ href="/path/to/docs"
+ >
+ More information
+ </a>
+ </div>
+</div>
+`;
-exports[`GroupEmptyState Renders an empty state for CONNECTION_FAILED 1`] = `
-<gl-empty-state-stub
- compact="true"
- description="We couldn't reach the Prometheus server. Either the server no longer exists or the configuration details need updating."
- primarybuttonlink="/path/to/settings"
- primarybuttontext="Verify configuration"
- svgpath="/path/to/empty-group-illustration.svg"
- title="Connection failed"
-/>
+exports[`GroupEmptyState given state CONNECTION_FAILED passes the expected props to GlEmptyState 1`] = `
+Object {
+ "compact": true,
+ "description": "We couldn't reach the Prometheus server. Either the server no longer exists or the configuration details need updating.",
+ "primaryButtonLink": "/path/to/settings",
+ "primaryButtonText": "Verify configuration",
+ "secondaryButtonLink": null,
+ "secondaryButtonText": null,
+ "svgHeight": null,
+ "svgPath": "/path/to/empty-group-illustration.svg",
+ "title": "Connection failed",
+}
`;
-exports[`GroupEmptyState Renders an empty state for CONNECTION_FAILED 2`] = `undefined`;
+exports[`GroupEmptyState given state CONNECTION_FAILED renders the slotted content 1`] = `<div />`;
-exports[`GroupEmptyState Renders an empty state for FOO STATE 1`] = `
-<gl-empty-state-stub
- compact="true"
- description="An error occurred while loading the data. Please try again."
- svgpath="/path/to/empty-group-illustration.svg"
- title="An error has occurred"
-/>
+exports[`GroupEmptyState given state FOO STATE passes the expected props to GlEmptyState 1`] = `
+Object {
+ "compact": true,
+ "description": "An error occurred while loading the data. Please try again.",
+ "primaryButtonLink": null,
+ "primaryButtonText": null,
+ "secondaryButtonLink": null,
+ "secondaryButtonText": null,
+ "svgHeight": null,
+ "svgPath": "/path/to/empty-group-illustration.svg",
+ "title": "An error has occurred",
+}
`;
-exports[`GroupEmptyState Renders an empty state for FOO STATE 2`] = `undefined`;
+exports[`GroupEmptyState given state FOO STATE renders the slotted content 1`] = `<div />`;
-exports[`GroupEmptyState Renders an empty state for LOADING 1`] = `
-<gl-empty-state-stub
- compact="true"
- description="Creating graphs uses the data from the Prometheus server. If this takes a long time, ensure that data is available."
- svgpath="/path/to/empty-group-illustration.svg"
- title="Waiting for performance data"
-/>
+exports[`GroupEmptyState given state LOADING passes the expected props to GlEmptyState 1`] = `
+Object {
+ "compact": true,
+ "description": "Creating graphs uses the data from the Prometheus server. If this takes a long time, ensure that data is available.",
+ "primaryButtonLink": null,
+ "primaryButtonText": null,
+ "secondaryButtonLink": null,
+ "secondaryButtonText": null,
+ "svgHeight": null,
+ "svgPath": "/path/to/empty-group-illustration.svg",
+ "title": "Waiting for performance data",
+}
`;
-exports[`GroupEmptyState Renders an empty state for LOADING 2`] = `undefined`;
+exports[`GroupEmptyState given state LOADING renders the slotted content 1`] = `<div />`;
-exports[`GroupEmptyState Renders an empty state for NO_DATA 1`] = `
-<gl-empty-state-stub
- compact="true"
- svgpath="/path/to/empty-group-illustration.svg"
- title="No data to display"
-/>
+exports[`GroupEmptyState given state NO_DATA passes the expected props to GlEmptyState 1`] = `
+Object {
+ "compact": true,
+ "description": null,
+ "primaryButtonLink": null,
+ "primaryButtonText": null,
+ "secondaryButtonLink": null,
+ "secondaryButtonText": null,
+ "svgHeight": null,
+ "svgPath": "/path/to/empty-group-illustration.svg",
+ "title": "No data to display",
+}
`;
-exports[`GroupEmptyState Renders an empty state for NO_DATA 2`] = `"The data source is connected, but there is no data to display. <a href=\\"/path/to/docs\\">More information</a>"`;
+exports[`GroupEmptyState given state NO_DATA renders the slotted content 1`] = `
+<div>
+ <div>
+ The data source is connected, but there is no data to display.
+ <a
+ href="/path/to/docs"
+ >
+ More information
+ </a>
+ </div>
+</div>
+`;
-exports[`GroupEmptyState Renders an empty state for TIMEOUT 1`] = `
-<gl-empty-state-stub
- compact="true"
- svgpath="/path/to/empty-group-illustration.svg"
- title="Connection timed out"
-/>
+exports[`GroupEmptyState given state TIMEOUT passes the expected props to GlEmptyState 1`] = `
+Object {
+ "compact": true,
+ "description": null,
+ "primaryButtonLink": null,
+ "primaryButtonText": null,
+ "secondaryButtonLink": null,
+ "secondaryButtonText": null,
+ "svgHeight": null,
+ "svgPath": "/path/to/empty-group-illustration.svg",
+ "title": "Connection timed out",
+}
`;
-exports[`GroupEmptyState Renders an empty state for TIMEOUT 2`] = `"Charts can't be displayed as the request for data has timed out. <a href=\\"/path/to/docs\\">More information</a>"`;
+exports[`GroupEmptyState given state TIMEOUT renders the slotted content 1`] = `
+<div>
+ <div>
+ Charts can't be displayed as the request for data has timed out.
+ <a
+ href="/path/to/docs"
+ >
+ More information
+ </a>
+ </div>
+</div>
+`;
-exports[`GroupEmptyState Renders an empty state for UNKNOWN_ERROR 1`] = `
-<gl-empty-state-stub
- compact="true"
- description="An error occurred while loading the data. Please try again."
- svgpath="/path/to/empty-group-illustration.svg"
- title="An error has occurred"
-/>
+exports[`GroupEmptyState given state UNKNOWN_ERROR passes the expected props to GlEmptyState 1`] = `
+Object {
+ "compact": true,
+ "description": "An error occurred while loading the data. Please try again.",
+ "primaryButtonLink": null,
+ "primaryButtonText": null,
+ "secondaryButtonLink": null,
+ "secondaryButtonText": null,
+ "svgHeight": null,
+ "svgPath": "/path/to/empty-group-illustration.svg",
+ "title": "An error has occurred",
+}
`;
-exports[`GroupEmptyState Renders an empty state for UNKNOWN_ERROR 2`] = `undefined`;
+exports[`GroupEmptyState given state UNKNOWN_ERROR renders the slotted content 1`] = `<div />`;
diff --git a/spec/frontend/monitoring/components/group_empty_state_spec.js b/spec/frontend/monitoring/components/group_empty_state_spec.js
index 90bd6f67196..3b94c4c6806 100644
--- a/spec/frontend/monitoring/components/group_empty_state_spec.js
+++ b/spec/frontend/monitoring/components/group_empty_state_spec.js
@@ -1,7 +1,13 @@
+import { GlEmptyState } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import GroupEmptyState from '~/monitoring/components/group_empty_state.vue';
import { metricStates } from '~/monitoring/constants';
+const MockGlEmptyState = {
+ props: GlEmptyState.props,
+ template: '<div><slot name="description"></slot></div>',
+};
+
function createComponent(props) {
return shallowMount(GroupEmptyState, {
propsData: {
@@ -10,11 +16,20 @@ function createComponent(props) {
settingsPath: '/path/to/settings',
svgPath: '/path/to/empty-group-illustration.svg',
},
+ stubs: {
+ GlEmptyState: MockGlEmptyState,
+ },
});
}
describe('GroupEmptyState', () => {
- const supportedStates = [
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe.each([
metricStates.NO_DATA,
metricStates.TIMEOUT,
metricStates.CONNECTION_FAILED,
@@ -22,13 +37,17 @@ describe('GroupEmptyState', () => {
metricStates.LOADING,
metricStates.UNKNOWN_ERROR,
'FOO STATE', // does not fail with unknown states
- ];
+ ])('given state %s', selectedState => {
+ beforeEach(() => {
+ wrapper = createComponent({ selectedState });
+ });
- it.each(supportedStates)('Renders an empty state for %s', selectedState => {
- const wrapper = createComponent({ selectedState });
+ it('renders the slotted content', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
- expect(wrapper.element).toMatchSnapshot();
- // slot is not rendered by the stub, test it separately
- expect(wrapper.vm.currentState.slottedDescription).toMatchSnapshot();
+ it('passes the expected props to GlEmptyState', () => {
+ expect(wrapper.find(MockGlEmptyState).props()).toMatchSnapshot();
+ });
});
});
diff --git a/spec/frontend/notes/components/discussion_counter_spec.js b/spec/frontend/notes/components/discussion_counter_spec.js
index affd6c1d1d2..d82590c7e9e 100644
--- a/spec/frontend/notes/components/discussion_counter_spec.js
+++ b/spec/frontend/notes/components/discussion_counter_spec.js
@@ -1,6 +1,6 @@
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
-import { GlIcon } from '@gitlab/ui';
+import { GlButton } from '@gitlab/ui';
import notesModule from '~/notes/stores/modules';
import DiscussionCounter from '~/notes/components/discussion_counter.vue';
import { noteableDataMock, discussionMock, notesDataMock, userDataMock } from '../mock_data';
@@ -9,6 +9,7 @@ import * as types from '~/notes/stores/mutation_types';
describe('DiscussionCounter component', () => {
let store;
let wrapper;
+ let setExpandDiscussionsFn;
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -16,6 +17,7 @@ describe('DiscussionCounter component', () => {
beforeEach(() => {
window.mrTabs = {};
const { state, getters, mutations, actions } = notesModule();
+ setExpandDiscussionsFn = jest.fn().mockImplementation(actions.setExpandDiscussions);
store = new Vuex.Store({
state: {
@@ -24,7 +26,10 @@ describe('DiscussionCounter component', () => {
},
getters,
mutations,
- actions,
+ actions: {
+ ...actions,
+ setExpandDiscussions: setExpandDiscussionsFn,
+ },
});
store.dispatch('setNoteableData', {
...noteableDataMock,
@@ -84,7 +89,7 @@ describe('DiscussionCounter component', () => {
wrapper = shallowMount(DiscussionCounter, { store, localVue });
expect(wrapper.find(`.is-active`).exists()).toBe(isActive);
- expect(wrapper.findAll('[role="group"').length).toBe(groupLength);
+ expect(wrapper.findAll(GlButton)).toHaveLength(groupLength);
});
});
@@ -103,23 +108,22 @@ describe('DiscussionCounter component', () => {
it('calls button handler when clicked', () => {
updateStoreWithExpanded(true);
- wrapper.setMethods({ handleExpandDiscussions: jest.fn() });
- toggleAllButton.trigger('click');
+ toggleAllButton.vm.$emit('click');
- expect(wrapper.vm.handleExpandDiscussions).toHaveBeenCalledTimes(1);
+ expect(setExpandDiscussionsFn).toHaveBeenCalledTimes(1);
});
it('collapses all discussions if expanded', () => {
updateStoreWithExpanded(true);
expect(wrapper.vm.allExpanded).toBe(true);
- expect(toggleAllButton.find(GlIcon).props().name).toBe('angle-up');
+ expect(toggleAllButton.props('icon')).toBe('angle-up');
- toggleAllButton.trigger('click');
+ toggleAllButton.vm.$emit('click');
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.vm.allExpanded).toBe(false);
- expect(toggleAllButton.find(GlIcon).props().name).toBe('angle-down');
+ expect(toggleAllButton.props('icon')).toBe('angle-down');
});
});
@@ -127,13 +131,13 @@ describe('DiscussionCounter component', () => {
updateStoreWithExpanded(false);
expect(wrapper.vm.allExpanded).toBe(false);
- expect(toggleAllButton.find(GlIcon).props().name).toBe('angle-down');
+ expect(toggleAllButton.props('icon')).toBe('angle-down');
- toggleAllButton.trigger('click');
+ toggleAllButton.vm.$emit('click');
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.vm.allExpanded).toBe(true);
- expect(toggleAllButton.find(GlIcon).props().name).toBe('angle-up');
+ expect(toggleAllButton.props('icon')).toBe('angle-up');
});
});
});
diff --git a/spec/frontend/notes/components/discussion_filter_spec.js b/spec/frontend/notes/components/discussion_filter_spec.js
index 91ff796b9de..e3e3518fd31 100644
--- a/spec/frontend/notes/components/discussion_filter_spec.js
+++ b/spec/frontend/notes/components/discussion_filter_spec.js
@@ -74,13 +74,15 @@ describe('DiscussionFilter component', () => {
});
it('renders the all filters', () => {
- expect(wrapper.findAll('.dropdown-menu li').length).toBe(discussionFiltersMock.length);
+ expect(wrapper.findAll('.discussion-filter-container .dropdown-item').length).toBe(
+ discussionFiltersMock.length,
+ );
});
it('renders the default selected item', () => {
expect(
wrapper
- .find('#discussion-filter-dropdown')
+ .find('#discussion-filter-dropdown .dropdown-item')
.text()
.trim(),
).toBe(discussionFiltersMock[0].title);
@@ -88,7 +90,7 @@ describe('DiscussionFilter component', () => {
it('updates to the selected item', () => {
const filterItem = wrapper.find(
- `.dropdown-menu li[data-filter-type="${DISCUSSION_FILTER_TYPES.HISTORY}"] button`,
+ `.discussion-filter-container .dropdown-item[data-filter-type="${DISCUSSION_FILTER_TYPES.HISTORY}"]`,
);
filterItem.trigger('click');
@@ -98,7 +100,9 @@ describe('DiscussionFilter component', () => {
it('only updates when selected filter changes', () => {
wrapper
- .find(`.dropdown-menu li[data-filter-type="${DISCUSSION_FILTER_TYPES.ALL}"] button`)
+ .find(
+ `.discussion-filter-container .dropdown-item[data-filter-type="${DISCUSSION_FILTER_TYPES.ALL}"]`,
+ )
.trigger('click');
expect(filterDiscussion).not.toHaveBeenCalled();
@@ -106,7 +110,7 @@ describe('DiscussionFilter component', () => {
it('disables commenting when "Show history only" filter is applied', () => {
const filterItem = wrapper.find(
- `.dropdown-menu li[data-filter-type="${DISCUSSION_FILTER_TYPES.HISTORY}"] button`,
+ `.discussion-filter-container .dropdown-item[data-filter-type="${DISCUSSION_FILTER_TYPES.HISTORY}"]`,
);
filterItem.trigger('click');
@@ -115,7 +119,7 @@ describe('DiscussionFilter component', () => {
it('enables commenting when "Show history only" filter is not applied', () => {
const filterItem = wrapper.find(
- `.dropdown-menu li[data-filter-type="${DISCUSSION_FILTER_TYPES.ALL}"] button`,
+ `.discussion-filter-container .dropdown-item[data-filter-type="${DISCUSSION_FILTER_TYPES.ALL}"]`,
);
filterItem.trigger('click');
@@ -124,10 +128,10 @@ describe('DiscussionFilter component', () => {
it('renders a dropdown divider for the default filter', () => {
const defaultFilter = wrapper.findAll(
- `.dropdown-menu li[data-filter-type="${DISCUSSION_FILTER_TYPES.ALL}"] > *`,
+ `.discussion-filter-container .dropdown-item-wrapper > *`,
);
- expect(defaultFilter.at(defaultFilter.length - 1).classes('dropdown-divider')).toBe(true);
+ expect(defaultFilter.at(1).classes('gl-new-dropdown-divider')).toBe(true);
});
describe('Merge request tabs', () => {
diff --git a/spec/frontend/notes/components/sort_discussion_spec.js b/spec/frontend/notes/components/sort_discussion_spec.js
index 575f1057db2..49b85d60a27 100644
--- a/spec/frontend/notes/components/sort_discussion_spec.js
+++ b/spec/frontend/notes/components/sort_discussion_spec.js
@@ -55,7 +55,7 @@ describe('Sort Discussion component', () => {
it('calls the right actions', () => {
createComponent();
- wrapper.find('.js-newest-first').trigger('click');
+ wrapper.find('.js-newest-first').vm.$emit('click');
expect(store.dispatch).toHaveBeenCalledWith('setDiscussionSortDirection', DESC);
expect(Tracking.event).toHaveBeenCalledWith(undefined, 'change_discussion_sort_direction', {
@@ -67,7 +67,7 @@ describe('Sort Discussion component', () => {
it('shows the "Oldest First" as the dropdown', () => {
createComponent();
- expect(wrapper.find('.js-dropdown-text').text()).toBe('Oldest first');
+ expect(wrapper.find('.js-dropdown-text').props('text')).toBe('Oldest first');
});
});
@@ -79,7 +79,7 @@ describe('Sort Discussion component', () => {
describe('when the dropdown item is clicked', () => {
it('calls the right actions', () => {
- wrapper.find('.js-oldest-first').trigger('click');
+ wrapper.find('.js-oldest-first').vm.$emit('click');
expect(store.dispatch).toHaveBeenCalledWith('setDiscussionSortDirection', ASC);
expect(Tracking.event).toHaveBeenCalledWith(undefined, 'change_discussion_sort_direction', {
@@ -87,13 +87,13 @@ describe('Sort Discussion component', () => {
});
});
- it('applies the active class to the correct button in the dropdown', () => {
- expect(wrapper.find('.js-newest-first').classes()).toContain('is-active');
+ it('sets is-checked to true on the active button in the dropdown', () => {
+ expect(wrapper.find('.js-newest-first').props('isChecked')).toBe(true);
});
});
it('shows the "Newest First" as the dropdown', () => {
- expect(wrapper.find('.js-dropdown-text').text()).toBe('Newest first');
+ expect(wrapper.find('.js-dropdown-text').props('text')).toBe('Newest first');
});
});
});
diff --git a/spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap
index 4d9e0af1545..d317264bdae 100644
--- a/spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap
+++ b/spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap
@@ -2,151 +2,163 @@
exports[`PackageTitle renders with tags 1`] = `
<div
- class="gl-display-flex gl-justify-content-space-between gl-py-3"
+ class="gl-display-flex gl-flex-direction-column"
data-qa-selector="package_title"
>
<div
- class="gl-flex-direction-column"
+ class="gl-display-flex gl-justify-content-space-between gl-py-3"
>
<div
- class="gl-display-flex"
+ class="gl-flex-direction-column"
>
- <!---->
-
<div
- class="gl-display-flex gl-flex-direction-column"
+ class="gl-display-flex"
>
- <h1
- class="gl-font-size-h1 gl-mt-3 gl-mb-2"
- data-testid="title"
- >
- Test package
- </h1>
+ <!---->
<div
- class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-mt-1"
+ class="gl-display-flex gl-flex-direction-column"
>
- <gl-icon-stub
- class="gl-mr-3"
- name="eye"
- size="16"
- />
+ <h1
+ class="gl-font-size-h1 gl-mt-3 gl-mb-2"
+ data-testid="title"
+ >
+ Test package
+ </h1>
- <gl-sprintf-stub
- message="v%{version} published %{timeAgo}"
- />
+ <div
+ class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-mt-1"
+ >
+ <gl-icon-stub
+ class="gl-mr-3"
+ name="eye"
+ size="16"
+ />
+
+ <gl-sprintf-stub
+ message="v%{version} published %{timeAgo}"
+ />
+ </div>
</div>
</div>
- </div>
-
- <div
- class="gl-display-flex gl-flex-wrap gl-align-items-center gl-mt-3"
- >
- <div
- class="gl-display-flex gl-align-items-center gl-mr-5"
- >
- <metadata-item-stub
- data-testid="package-type"
- icon="package"
- link=""
- size="s"
- text="maven"
- />
- </div>
- <div
- class="gl-display-flex gl-align-items-center gl-mr-5"
- >
- <metadata-item-stub
- data-testid="package-size"
- icon="disk"
- link=""
- size="s"
- text="300 bytes"
- />
- </div>
+
<div
- class="gl-display-flex gl-align-items-center gl-mr-5"
+ class="gl-display-flex gl-flex-wrap gl-align-items-center gl-mt-3"
>
- <package-tags-stub
- hidelabel="true"
- tagdisplaylimit="2"
- tags="[object Object],[object Object],[object Object],[object Object]"
- />
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <metadata-item-stub
+ data-testid="package-type"
+ icon="package"
+ link=""
+ size="s"
+ text="maven"
+ />
+ </div>
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <metadata-item-stub
+ data-testid="package-size"
+ icon="disk"
+ link=""
+ size="s"
+ text="300 bytes"
+ />
+ </div>
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <package-tags-stub
+ hidelabel="true"
+ tagdisplaylimit="2"
+ tags="[object Object],[object Object],[object Object],[object Object]"
+ />
+ </div>
</div>
</div>
+
+ <!---->
</div>
- <!---->
+ <p />
</div>
`;
exports[`PackageTitle renders without tags 1`] = `
<div
- class="gl-display-flex gl-justify-content-space-between gl-py-3"
+ class="gl-display-flex gl-flex-direction-column"
data-qa-selector="package_title"
>
<div
- class="gl-flex-direction-column"
+ class="gl-display-flex gl-justify-content-space-between gl-py-3"
>
<div
- class="gl-display-flex"
+ class="gl-flex-direction-column"
>
- <!---->
-
<div
- class="gl-display-flex gl-flex-direction-column"
+ class="gl-display-flex"
>
- <h1
- class="gl-font-size-h1 gl-mt-3 gl-mb-2"
- data-testid="title"
- >
- Test package
- </h1>
+ <!---->
<div
- class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-mt-1"
+ class="gl-display-flex gl-flex-direction-column"
>
- <gl-icon-stub
- class="gl-mr-3"
- name="eye"
- size="16"
- />
+ <h1
+ class="gl-font-size-h1 gl-mt-3 gl-mb-2"
+ data-testid="title"
+ >
+ Test package
+ </h1>
- <gl-sprintf-stub
- message="v%{version} published %{timeAgo}"
- />
+ <div
+ class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-mt-1"
+ >
+ <gl-icon-stub
+ class="gl-mr-3"
+ name="eye"
+ size="16"
+ />
+
+ <gl-sprintf-stub
+ message="v%{version} published %{timeAgo}"
+ />
+ </div>
</div>
</div>
- </div>
-
- <div
- class="gl-display-flex gl-flex-wrap gl-align-items-center gl-mt-3"
- >
- <div
- class="gl-display-flex gl-align-items-center gl-mr-5"
- >
- <metadata-item-stub
- data-testid="package-type"
- icon="package"
- link=""
- size="s"
- text="maven"
- />
- </div>
+
<div
- class="gl-display-flex gl-align-items-center gl-mr-5"
+ class="gl-display-flex gl-flex-wrap gl-align-items-center gl-mt-3"
>
- <metadata-item-stub
- data-testid="package-size"
- icon="disk"
- link=""
- size="s"
- text="300 bytes"
- />
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <metadata-item-stub
+ data-testid="package-type"
+ icon="package"
+ link=""
+ size="s"
+ text="maven"
+ />
+ </div>
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <metadata-item-stub
+ data-testid="package-size"
+ icon="disk"
+ link=""
+ size="s"
+ text="300 bytes"
+ />
+ </div>
</div>
</div>
+
+ <!---->
</div>
- <!---->
+ <p />
</div>
`;
diff --git a/spec/frontend/packages/details/store/getters_spec.js b/spec/frontend/packages/details/store/getters_spec.js
index 0e95ee4cfd3..06e5950eb5d 100644
--- a/spec/frontend/packages/details/store/getters_spec.js
+++ b/spec/frontend/packages/details/store/getters_spec.js
@@ -69,7 +69,7 @@ describe('Getters PackageDetails Store', () => {
const nugetInstallationCommandStr = `nuget install ${nugetPackage.name} -Source "GitLab"`;
const nugetSetupCommandStr = `nuget source Add -Name "GitLab" -Source "${registryUrl}" -UserName <your_username> -Password <your_token>`;
- const pypiPipCommandStr = `pip install ${pypiPackage.name} --index-url ${registryUrl}`;
+ const pypiPipCommandStr = `pip install ${pypiPackage.name} --extra-index-url ${registryUrl}`;
const composerRegistryIncludeStr = '{"type":"composer","url":"foo"}';
const composerPackageIncludeStr = JSON.stringify({
[packageWithoutBuildInfo.name]: packageWithoutBuildInfo.version,
diff --git a/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap b/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap
index 6ff9376565a..794e583a487 100644
--- a/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap
+++ b/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap
@@ -1,457 +1,463 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`packages_list_app renders 1`] = `
-<b-tabs-stub
- activenavitemclass="gl-tab-nav-item-active gl-tab-nav-item-active-indigo"
- class="gl-tabs"
- contentclass=",gl-tab-content"
- navclass="gl-tabs-nav"
- nofade="true"
- nonavstyle="true"
- tag="div"
->
- <template>
-
- <b-tab-stub
- tag="div"
- title="All"
- titlelinkclass="gl-tab-nav-item"
- >
- <template>
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
+<div>
+ <package-title-stub
+ packagehelpurl="foo"
+ />
+
+ <b-tabs-stub
+ activenavitemclass="gl-tab-nav-item-active gl-tab-nav-item-active-indigo"
+ class="gl-tabs"
+ contentclass=",gl-tab-content"
+ navclass="gl-tabs-nav"
+ nofade="true"
+ nonavstyle="true"
+ tag="div"
+ >
+ <template>
+
+ <b-tab-stub
+ tag="div"
+ title="All"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
>
<div
- class="svg-250 svg-content"
+ class="col-12"
>
- <img
- alt="There are no packages yet"
- class="gl-max-w-full"
- src="helpSvg"
- />
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
</div>
- </div>
-
- <div
- class="col-12"
- >
+
<div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ class="col-12"
>
- <h1
- class="h4"
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
>
- There are no packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
+ <h1
+ class="h4"
>
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
+ There are no packages yet
+ </h1>
- <!---->
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
+
+ <div>
+ <!---->
+
+ <!---->
+ </div>
</div>
</div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
- <b-tab-stub
- tag="div"
- title="Composer"
- titlelinkclass="gl-tab-nav-item"
- >
- <template>
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
+ <b-tab-stub
+ tag="div"
+ title="Composer"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
>
<div
- class="svg-250 svg-content"
+ class="col-12"
>
- <img
- alt="There are no Composer packages yet"
- class="gl-max-w-full"
- src="helpSvg"
- />
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no Composer packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
</div>
- </div>
-
- <div
- class="col-12"
- >
+
<div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ class="col-12"
>
- <h1
- class="h4"
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
>
- There are no Composer packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
+ <h1
+ class="h4"
>
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
+ There are no Composer packages yet
+ </h1>
- <!---->
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
+
+ <div>
+ <!---->
+
+ <!---->
+ </div>
</div>
</div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
- <b-tab-stub
- tag="div"
- title="Conan"
- titlelinkclass="gl-tab-nav-item"
- >
- <template>
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
+ <b-tab-stub
+ tag="div"
+ title="Conan"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
>
<div
- class="svg-250 svg-content"
+ class="col-12"
>
- <img
- alt="There are no Conan packages yet"
- class="gl-max-w-full"
- src="helpSvg"
- />
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no Conan packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
</div>
- </div>
-
- <div
- class="col-12"
- >
+
<div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ class="col-12"
>
- <h1
- class="h4"
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
>
- There are no Conan packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
+ <h1
+ class="h4"
>
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
+ There are no Conan packages yet
+ </h1>
- <!---->
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
+
+ <div>
+ <!---->
+
+ <!---->
+ </div>
</div>
</div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
- <b-tab-stub
- tag="div"
- title="Maven"
- titlelinkclass="gl-tab-nav-item"
- >
- <template>
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
+ <b-tab-stub
+ tag="div"
+ title="Maven"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
>
<div
- class="svg-250 svg-content"
+ class="col-12"
>
- <img
- alt="There are no Maven packages yet"
- class="gl-max-w-full"
- src="helpSvg"
- />
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no Maven packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
</div>
- </div>
-
- <div
- class="col-12"
- >
+
<div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ class="col-12"
>
- <h1
- class="h4"
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
>
- There are no Maven packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
+ <h1
+ class="h4"
>
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
+ There are no Maven packages yet
+ </h1>
+
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
- <!---->
+ <div>
+ <!---->
+
+ <!---->
+ </div>
</div>
</div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
- <b-tab-stub
- tag="div"
- title="NPM"
- titlelinkclass="gl-tab-nav-item"
- >
- <template>
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
+ <b-tab-stub
+ tag="div"
+ title="NPM"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
>
<div
- class="svg-250 svg-content"
+ class="col-12"
>
- <img
- alt="There are no NPM packages yet"
- class="gl-max-w-full"
- src="helpSvg"
- />
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no NPM packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
</div>
- </div>
-
- <div
- class="col-12"
- >
+
<div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ class="col-12"
>
- <h1
- class="h4"
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
>
- There are no NPM packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
+ <h1
+ class="h4"
>
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
+ There are no NPM packages yet
+ </h1>
+
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
- <!---->
+ <div>
+ <!---->
+
+ <!---->
+ </div>
</div>
</div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
- <b-tab-stub
- tag="div"
- title="NuGet"
- titlelinkclass="gl-tab-nav-item"
- >
- <template>
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
+ <b-tab-stub
+ tag="div"
+ title="NuGet"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
>
<div
- class="svg-250 svg-content"
+ class="col-12"
>
- <img
- alt="There are no NuGet packages yet"
- class="gl-max-w-full"
- src="helpSvg"
- />
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no NuGet packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
</div>
- </div>
-
- <div
- class="col-12"
- >
+
<div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ class="col-12"
>
- <h1
- class="h4"
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
>
- There are no NuGet packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
+ <h1
+ class="h4"
>
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
+ There are no NuGet packages yet
+ </h1>
- <!---->
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
+
+ <div>
+ <!---->
+
+ <!---->
+ </div>
</div>
</div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
- <b-tab-stub
- tag="div"
- title="PyPi"
- titlelinkclass="gl-tab-nav-item"
- >
- <template>
- <div>
- <section
- class="row empty-state text-center"
- >
- <div
- class="col-12"
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
+ <b-tab-stub
+ tag="div"
+ title="PyPi"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
>
<div
- class="svg-250 svg-content"
+ class="col-12"
>
- <img
- alt="There are no PyPi packages yet"
- class="gl-max-w-full"
- src="helpSvg"
- />
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no PyPi packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
</div>
- </div>
-
- <div
- class="col-12"
- >
+
<div
- class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ class="col-12"
>
- <h1
- class="h4"
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
>
- There are no PyPi packages yet
- </h1>
-
- <p>
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="helpUrl"
- routertag="a"
- target="_blank"
+ <h1
+ class="h4"
>
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div>
- <!---->
+ There are no PyPi packages yet
+ </h1>
+
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
- <!---->
+ <div>
+ <!---->
+
+ <!---->
+ </div>
</div>
</div>
- </div>
- </section>
- </div>
- </template>
- </b-tab-stub>
-
- <!---->
- </template>
- <template>
- <div
- class="gl-display-flex gl-align-self-center gl-py-2 gl-flex-grow-1 gl-justify-content-end"
- >
- <package-filter-stub
- class="mr-1"
- />
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
- <package-sort-stub />
- </div>
- </template>
-</b-tabs-stub>
+ <!---->
+ </template>
+ <template>
+ <div
+ class="gl-display-flex gl-align-self-center gl-py-2 gl-flex-grow-1 gl-justify-content-end"
+ >
+ <package-filter-stub
+ class="gl-mr-2"
+ />
+
+ <package-sort-stub />
+ </div>
+ </template>
+ </b-tabs-stub>
+</div>
`;
diff --git a/spec/frontend/packages/list/components/packages_list_app_spec.js b/spec/frontend/packages/list/components/packages_list_app_spec.js
index 19ff4290f50..217096f822a 100644
--- a/spec/frontend/packages/list/components/packages_list_app_spec.js
+++ b/spec/frontend/packages/list/components/packages_list_app_spec.js
@@ -36,6 +36,7 @@ describe('packages_list_app', () => {
resourceId: 'project_id',
emptyListIllustration: 'helpSvg',
emptyListHelpUrl,
+ packageHelpUrl: 'foo',
},
filterQuery,
},
diff --git a/spec/frontend/packages/list/components/packages_title_spec.js b/spec/frontend/packages/list/components/packages_title_spec.js
new file mode 100644
index 00000000000..5e9ebd8ecb0
--- /dev/null
+++ b/spec/frontend/packages/list/components/packages_title_spec.js
@@ -0,0 +1,71 @@
+import { shallowMount } from '@vue/test-utils';
+import PackageTitle from '~/packages/list/components/package_title.vue';
+import TitleArea from '~/vue_shared/components/registry/title_area.vue';
+import MetadataItem from '~/vue_shared/components/registry/metadata_item.vue';
+import { LIST_INTRO_TEXT, LIST_TITLE_TEXT } from '~/packages/list//constants';
+
+describe('PackageTitle', () => {
+ let wrapper;
+ let store;
+
+ const findTitleArea = () => wrapper.find(TitleArea);
+ const findMetadataItem = () => wrapper.find(MetadataItem);
+
+ const mountComponent = (propsData = { packageHelpUrl: 'foo' }) => {
+ wrapper = shallowMount(PackageTitle, {
+ store,
+ propsData,
+ stubs: {
+ TitleArea,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('title area', () => {
+ it('exists', () => {
+ mountComponent();
+
+ expect(findTitleArea().exists()).toBe(true);
+ });
+
+ it('has the correct props', () => {
+ mountComponent();
+
+ expect(findTitleArea().props()).toMatchObject({
+ title: LIST_TITLE_TEXT,
+ infoMessages: [{ text: LIST_INTRO_TEXT, link: 'foo' }],
+ });
+ });
+ });
+
+ describe.each`
+ packagesCount | exist | text
+ ${null} | ${false} | ${''}
+ ${undefined} | ${false} | ${''}
+ ${0} | ${true} | ${'0 Packages'}
+ ${1} | ${true} | ${'1 Package'}
+ ${2} | ${true} | ${'2 Packages'}
+ `('when packagesCount is $packagesCount metadata item', ({ packagesCount, exist, text }) => {
+ beforeEach(() => {
+ mountComponent({ packagesCount, packageHelpUrl: 'foo' });
+ });
+
+ it(`is ${exist} that it exists`, () => {
+ expect(findMetadataItem().exists()).toBe(exist);
+ });
+
+ if (exist) {
+ it('has the correct props', () => {
+ expect(findMetadataItem().props()).toMatchObject({
+ icon: 'package',
+ text,
+ });
+ });
+ }
+ });
+});
diff --git a/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap b/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap
index 6aaefed92d0..5faae5690db 100644
--- a/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap
+++ b/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap
@@ -52,27 +52,6 @@ exports[`packages_list_row renders 1`] = `
<!---->
<div
- class="gl-display-flex gl-align-items-center"
- >
- <gl-icon-stub
- class="gl-ml-3 gl-mr-2 gl-min-w-0"
- name="review-list"
- size="16"
- />
-
- <gl-link-stub
- class="gl-text-body gl-min-w-0"
- data-testid="packages-row-project"
- href="/foo/bar/baz"
- >
- <gl-truncate-stub
- position="end"
- text="foo/bar/baz"
- />
- </gl-link-stub>
- </div>
-
- <div
class="d-flex align-items-center"
data-testid="package-type"
>
@@ -86,6 +65,10 @@ exports[`packages_list_row renders 1`] = `
Maven
</span>
</div>
+
+ <package-path-stub
+ path="foo/bar/baz"
+ />
</div>
</div>
</div>
@@ -118,6 +101,7 @@ exports[`packages_list_row renders 1`] = `
>
<gl-button-stub
aria-label="Remove package"
+ buttontextclasses=""
category="primary"
data-testid="action-delete"
icon="remove"
diff --git a/spec/frontend/packages/shared/components/__snapshots__/publish_method_spec.js.snap b/spec/frontend/packages/shared/components/__snapshots__/publish_method_spec.js.snap
index 9a0c52cee47..acdf7c49ebd 100644
--- a/spec/frontend/packages/shared/components/__snapshots__/publish_method_spec.js.snap
+++ b/spec/frontend/packages/shared/components/__snapshots__/publish_method_spec.js.snap
@@ -32,7 +32,8 @@ exports[`publish_method renders 1`] = `
</gl-link-stub>
<clipboard-button-stub
- cssclass="gl-border-0 gl-py-0 gl-px-2"
+ category="tertiary"
+ size="small"
text="sha-baz"
title="Copy commit SHA"
tooltipplacement="top"
diff --git a/spec/frontend/packages/shared/components/package_list_row_spec.js b/spec/frontend/packages/shared/components/package_list_row_spec.js
index f4eabf7bb67..0d0ea4e2122 100644
--- a/spec/frontend/packages/shared/components/package_list_row_spec.js
+++ b/spec/frontend/packages/shared/components/package_list_row_spec.js
@@ -1,6 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import PackagesListRow from '~/packages/shared/components/package_list_row.vue';
import PackageTags from '~/packages/shared/components/package_tags.vue';
+import PackagePath from '~/packages/shared/components/package_path.vue';
import ListItem from '~/vue_shared/components/registry/list_item.vue';
import { packageList } from '../../mock_data';
@@ -11,7 +12,7 @@ describe('packages_list_row', () => {
const [packageWithoutTags, packageWithTags] = packageList;
const findPackageTags = () => wrapper.find(PackageTags);
- const findProjectLink = () => wrapper.find('[data-testid="packages-row-project"]');
+ const findPackagePath = () => wrapper.find(PackagePath);
const findDeleteButton = () => wrapper.find('[data-testid="action-delete"]');
const findPackageType = () => wrapper.find('[data-testid="package-type"]');
@@ -63,8 +64,9 @@ describe('packages_list_row', () => {
mountComponent({ isGroup: true });
});
- it('has project field', () => {
- expect(findProjectLink().exists()).toBe(true);
+ it('has a package path component', () => {
+ expect(findPackagePath().exists()).toBe(true);
+ expect(findPackagePath().props()).toMatchObject({ path: 'foo/bar/baz' });
});
});
diff --git a/spec/frontend/packages/shared/components/package_path_spec.js b/spec/frontend/packages/shared/components/package_path_spec.js
new file mode 100644
index 00000000000..40d455ac77c
--- /dev/null
+++ b/spec/frontend/packages/shared/components/package_path_spec.js
@@ -0,0 +1,86 @@
+import { shallowMount } from '@vue/test-utils';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import PackagePath from '~/packages/shared/components/package_path.vue';
+
+describe('PackagePath', () => {
+ let wrapper;
+
+ const mountComponent = (propsData = { path: 'foo' }) => {
+ wrapper = shallowMount(PackagePath, {
+ propsData,
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ });
+ };
+
+ const BASE_ICON = 'base-icon';
+ const ROOT_LINK = 'root-link';
+ const ROOT_CHEVRON = 'root-chevron';
+ const ELLIPSIS_ICON = 'ellipsis-icon';
+ const ELLIPSIS_CHEVRON = 'ellipsis-chevron';
+ const LEAF_LINK = 'leaf-link';
+
+ const findItem = name => wrapper.find(`[data-testid="${name}"]`);
+ const findTooltip = w => getBinding(w.element, 'gl-tooltip');
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe.each`
+ path | rootUrl | shouldExist | shouldNotExist
+ ${'foo/bar'} | ${'/foo/bar'} | ${[]} | ${[ROOT_CHEVRON, ELLIPSIS_ICON, ELLIPSIS_CHEVRON, LEAF_LINK]}
+ ${'foo/bar/baz'} | ${'/foo/bar'} | ${[ROOT_CHEVRON, LEAF_LINK]} | ${[ELLIPSIS_ICON, ELLIPSIS_CHEVRON]}
+ ${'foo/bar/baz/baz2'} | ${'/foo/bar'} | ${[ROOT_CHEVRON, LEAF_LINK, ELLIPSIS_ICON, ELLIPSIS_CHEVRON]} | ${[]}
+ ${'foo/bar/baz/baz2/bar2'} | ${'/foo/bar'} | ${[ROOT_CHEVRON, LEAF_LINK, ELLIPSIS_ICON, ELLIPSIS_CHEVRON]} | ${[]}
+ `('given path $path', ({ path, shouldExist, shouldNotExist, rootUrl }) => {
+ const pathPieces = path.split('/').slice(1);
+ const hasTooltip = shouldExist.includes(ELLIPSIS_ICON);
+
+ beforeEach(() => {
+ mountComponent({ path });
+ });
+
+ it('should have a base icon', () => {
+ expect(findItem(BASE_ICON).exists()).toBe(true);
+ });
+
+ it('should have a root link', () => {
+ const root = findItem(ROOT_LINK);
+ expect(root.exists()).toBe(true);
+ expect(root.attributes('href')).toBe(rootUrl);
+ });
+
+ if (hasTooltip) {
+ it('should have a tooltip', () => {
+ const tooltip = findTooltip(findItem(ELLIPSIS_ICON));
+ expect(tooltip).toBeDefined();
+ expect(tooltip.value).toMatchObject({
+ title: path,
+ });
+ });
+ }
+
+ if (shouldExist.length) {
+ it.each(shouldExist)(`should have %s`, element => {
+ expect(findItem(element).exists()).toBe(true);
+ });
+ }
+
+ if (shouldNotExist.length) {
+ it.each(shouldNotExist)(`should not have %s`, element => {
+ expect(findItem(element).exists()).toBe(false);
+ });
+ }
+
+ if (shouldExist.includes(LEAF_LINK)) {
+ it('the last link should be the last piece of the path', () => {
+ const leaf = findItem(LEAF_LINK);
+ expect(leaf.attributes('href')).toBe(`/${path}`);
+ expect(leaf.text()).toBe(pathPieces[pathPieces.length - 1]);
+ });
+ }
+ });
+});
diff --git a/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap b/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
index 2fbc700d4f5..ddeaa2a79db 100644
--- a/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
+++ b/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
@@ -39,6 +39,7 @@ exports[`User Operation confirmation modal renders modal with form included 1`]
/>
</form>
<gl-button-stub
+ buttontextclasses=""
category="primary"
icon=""
size="medium"
@@ -48,6 +49,7 @@ exports[`User Operation confirmation modal renders modal with form included 1`]
</gl-button-stub>
<gl-button-stub
+ buttontextclasses=""
category="primary"
disabled="true"
icon=""
@@ -60,6 +62,7 @@ exports[`User Operation confirmation modal renders modal with form included 1`]
</gl-button-stub>
<gl-button-stub
+ buttontextclasses=""
category="primary"
disabled="true"
icon=""
diff --git a/spec/frontend/pages/projects/pipeline_schedules/shared/components/pipeline_schedule_callout_spec.js b/spec/frontend/pages/projects/pipeline_schedules/shared/components/pipeline_schedule_callout_spec.js
index 5a61f9fca69..5da998d9d2d 100644
--- a/spec/frontend/pages/projects/pipeline_schedules/shared/components/pipeline_schedule_callout_spec.js
+++ b/spec/frontend/pages/projects/pipeline_schedules/shared/components/pipeline_schedule_callout_spec.js
@@ -1,23 +1,18 @@
import Vue from 'vue';
import Cookies from 'js-cookie';
import PipelineSchedulesCallout from '~/pages/projects/pipeline_schedules/shared/components/pipeline_schedules_callout.vue';
-import '~/pages/projects/pipeline_schedules/shared/icons/intro_illustration.svg';
-
-jest.mock(
- '~/pages/projects/pipeline_schedules/shared/icons/intro_illustration.svg',
- () => '<svg></svg>',
-);
const PipelineSchedulesCalloutComponent = Vue.extend(PipelineSchedulesCallout);
const cookieKey = 'pipeline_schedules_callout_dismissed';
const docsUrl = 'help/ci/scheduled_pipelines';
+const imageUrl = 'pages/projects/pipeline_schedules/shared/icons/intro_illustration.svg';
describe('Pipeline Schedule Callout', () => {
let calloutComponent;
beforeEach(() => {
setFixtures(`
- <div id='pipeline-schedules-callout' data-docs-url=${docsUrl}></div>
+ <div id='pipeline-schedules-callout' data-docs-url=${docsUrl} data-image-url=${imageUrl}></div>
`);
});
@@ -30,13 +25,13 @@ describe('Pipeline Schedule Callout', () => {
expect(calloutComponent).toBeDefined();
});
- it('correctly sets illustrationSvg', () => {
- expect(calloutComponent.illustrationSvg).toContain('<svg');
- });
-
it('correctly sets docsUrl', () => {
expect(calloutComponent.docsUrl).toContain(docsUrl);
});
+
+ it('correctly sets imageUrl', () => {
+ expect(calloutComponent.imageUrl).toContain(imageUrl);
+ });
});
describe(`when ${cookieKey} cookie is set`, () => {
@@ -68,8 +63,8 @@ describe('Pipeline Schedule Callout', () => {
expect(calloutComponent.$el.querySelector('.bordered-box')).not.toBeNull();
});
- it('renders the callout svg', () => {
- expect(calloutComponent.$el.outerHTML).toContain('<svg');
+ it('renders the callout img', () => {
+ expect(calloutComponent.$el.outerHTML).toContain('<img');
});
it('renders the callout title', () => {
diff --git a/spec/frontend/pipeline_new/mock_data.js b/spec/frontend/pipeline_new/mock_data.js
index 55286e0ec7e..cdbd6d4437e 100644
--- a/spec/frontend/pipeline_new/mock_data.js
+++ b/spec/frontend/pipeline_new/mock_data.js
@@ -14,9 +14,9 @@ export const mockProjectId = '21';
export const mockPostParams = {
ref: 'tag-1',
- variables: [
- { key: 'test_var', value: 'test_var_val', variable_type: 'env_var' },
- { key: 'test_file', value: 'test_file_val', variable_type: 'file' },
+ variables_attributes: [
+ { key: 'test_var', secret_value: 'test_var_val', variable_type: 'env_var' },
+ { key: 'test_file', secret_value: 'test_file_val', variable_type: 'file' },
],
};
diff --git a/spec/frontend/pipelines/components/dag/dag_spec.js b/spec/frontend/pipelines/components/dag/dag_spec.js
index 989f6c17197..08a43199594 100644
--- a/spec/frontend/pipelines/components/dag/dag_spec.js
+++ b/spec/frontend/pipelines/components/dag/dag_spec.js
@@ -4,13 +4,8 @@ import Dag from '~/pipelines/components/dag/dag.vue';
import DagGraph from '~/pipelines/components/dag/dag_graph.vue';
import DagAnnotations from '~/pipelines/components/dag/dag_annotations.vue';
-import {
- ADD_NOTE,
- REMOVE_NOTE,
- REPLACE_NOTES,
- PARSE_FAILURE,
- UNSUPPORTED_DATA,
-} from '~/pipelines/components/dag//constants';
+import { ADD_NOTE, REMOVE_NOTE, REPLACE_NOTES } from '~/pipelines/components/dag/constants';
+import { PARSE_FAILURE, UNSUPPORTED_DATA } from '~/pipelines/constants';
import {
mockParsedGraphQLNodes,
tooSmallGraph,
diff --git a/spec/frontend/pipelines/graph/graph_component_spec.js b/spec/frontend/pipelines/graph/graph_component_spec.js
index d977db58a0e..062c9759a65 100644
--- a/spec/frontend/pipelines/graph/graph_component_spec.js
+++ b/spec/frontend/pipelines/graph/graph_component_spec.js
@@ -3,23 +3,27 @@ import { mount } from '@vue/test-utils';
import { setHTMLFixture } from 'helpers/fixtures';
import PipelineStore from '~/pipelines/stores/pipeline_store';
import graphComponent from '~/pipelines/components/graph/graph_component.vue';
-import stageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
+import StageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
import linkedPipelinesColumn from '~/pipelines/components/graph/linked_pipelines_column.vue';
import graphJSON from './mock_data';
import linkedPipelineJSON from './linked_pipelines_mock_data';
import PipelinesMediator from '~/pipelines/pipeline_details_mediator';
describe('graph component', () => {
- const store = new PipelineStore();
- store.storePipeline(linkedPipelineJSON);
- const mediator = new PipelinesMediator({ endpoint: '' });
-
+ let store;
+ let mediator;
let wrapper;
const findExpandPipelineBtn = () => wrapper.find('[data-testid="expandPipelineButton"]');
const findAllExpandPipelineBtns = () => wrapper.findAll('[data-testid="expandPipelineButton"]');
+ const findStageColumns = () => wrapper.findAll(StageColumnComponent);
+ const findStageColumnAt = i => findStageColumns().at(i);
beforeEach(() => {
+ mediator = new PipelinesMediator({ endpoint: '' });
+ store = new PipelineStore();
+ store.storePipeline(linkedPipelineJSON);
+
setHTMLFixture('<div class="layout-page"></div>');
});
@@ -43,7 +47,7 @@ describe('graph component', () => {
});
describe('with data', () => {
- it('should render the graph', () => {
+ beforeEach(() => {
wrapper = mount(graphComponent, {
propsData: {
isLoading: false,
@@ -51,26 +55,17 @@ describe('graph component', () => {
mediator,
},
});
+ });
+ it('renders the graph', () => {
expect(wrapper.find('.js-pipeline-graph').exists()).toBe(true);
-
- expect(wrapper.find(stageColumnComponent).classes()).toContain('no-margin');
-
- expect(
- wrapper
- .findAll(stageColumnComponent)
- .at(1)
- .classes(),
- ).toContain('left-margin');
-
- expect(wrapper.find('.stage-column:nth-child(2) .build:nth-child(1)').classes()).toContain(
- 'left-connector',
- );
-
expect(wrapper.find('.loading-icon').exists()).toBe(false);
-
expect(wrapper.find('.stage-column-list').exists()).toBe(true);
});
+
+ it('renders columns in the graph', () => {
+ expect(findStageColumns()).toHaveLength(graphJSON.details.stages.length);
+ });
});
describe('when linked pipelines are present', () => {
@@ -93,26 +88,26 @@ describe('graph component', () => {
expect(wrapper.find('.fa-spinner').exists()).toBe(false);
});
- it('should include the stage column list', () => {
- expect(wrapper.find(stageColumnComponent).exists()).toBe(true);
- });
-
- it('should include the no-margin class on the first child if there is only one job', () => {
- const firstStageColumnElement = wrapper.find(stageColumnComponent);
-
- expect(firstStageColumnElement.classes()).toContain('no-margin');
+ it('should include the stage column', () => {
+ expect(findStageColumnAt(0).exists()).toBe(true);
});
- it('should include the has-only-one-job class on the first child', () => {
- const firstStageColumnElement = wrapper.find('.stage-column-list .stage-column');
-
- expect(firstStageColumnElement.classes()).toContain('has-only-one-job');
+ it('stage column should have no-margin, gl-mr-26, has-only-one-job classes if there is only one job', () => {
+ expect(findStageColumnAt(0).classes()).toEqual(
+ expect.arrayContaining(['no-margin', 'gl-mr-26', 'has-only-one-job']),
+ );
});
it('should include the left-margin class on the second child', () => {
- const firstStageColumnElement = wrapper.find('.stage-column-list .stage-column:last-child');
+ expect(findStageColumnAt(1).classes('left-margin')).toBe(true);
+ });
- expect(firstStageColumnElement.classes()).toContain('left-margin');
+ it('should include the left-connector class in the build of the second child', () => {
+ expect(
+ findStageColumnAt(1)
+ .find('.build:nth-child(1)')
+ .classes('left-connector'),
+ ).toBe(true);
});
it('should include the js-has-linked-pipelines flag', () => {
@@ -134,12 +129,7 @@ describe('graph component', () => {
describe('stageConnectorClass', () => {
it('it returns left-margin when there is a triggerer', () => {
- expect(
- wrapper
- .findAll(stageColumnComponent)
- .at(1)
- .classes(),
- ).toContain('left-margin');
+ expect(findStageColumnAt(1).classes('left-margin')).toBe(true);
});
});
});
@@ -248,6 +238,16 @@ describe('graph component', () => {
.catch(done.fail);
});
});
+
+ describe('when column requests a refresh', () => {
+ beforeEach(() => {
+ findStageColumnAt(0).vm.$emit('refreshPipelineGraph');
+ });
+
+ it('refreshPipelineGraph is emitted', () => {
+ expect(wrapper.emitted().refreshPipelineGraph).toHaveLength(1);
+ });
+ });
});
});
});
@@ -268,7 +268,7 @@ describe('graph component', () => {
it('should include the first column with a no margin', () => {
const firstColumn = wrapper.find('.stage-column');
- expect(firstColumn.classes()).toContain('no-margin');
+ expect(firstColumn.classes('no-margin')).toBe(true);
});
it('should not render a linked pipelines column', () => {
@@ -278,16 +278,11 @@ describe('graph component', () => {
describe('stageConnectorClass', () => {
it('it returns no-margin when no triggerer and there is one job', () => {
- expect(wrapper.find(stageColumnComponent).classes()).toContain('no-margin');
+ expect(findStageColumnAt(0).classes('no-margin')).toBe(true);
});
it('it returns left-margin when no triggerer and not the first stage', () => {
- expect(
- wrapper
- .findAll(stageColumnComponent)
- .at(1)
- .classes(),
- ).toContain('left-margin');
+ expect(findStageColumnAt(1).classes('left-margin')).toBe(true);
});
});
});
@@ -302,12 +297,9 @@ describe('graph component', () => {
},
});
- expect(
- wrapper
- .find('.stage-column:nth-child(2) .stage-name')
- .text()
- .trim(),
- ).toEqual('Deploy &lt;img src=x onerror=alert(document.domain)&gt;');
+ expect(findStageColumnAt(1).props('title')).toEqual(
+ 'Deploy &lt;img src=x onerror=alert(document.domain)&gt;',
+ );
});
});
});
diff --git a/spec/frontend/pipelines/header_component_spec.js b/spec/frontend/pipelines/header_component_spec.js
index 5388d624d3c..2e10b0f068c 100644
--- a/spec/frontend/pipelines/header_component_spec.js
+++ b/spec/frontend/pipelines/header_component_spec.js
@@ -1,115 +1,164 @@
import { shallowMount } from '@vue/test-utils';
-import { GlModal } from '@gitlab/ui';
+import { GlModal, GlLoadingIcon } from '@gitlab/ui';
+import MockAdapter from 'axios-mock-adapter';
+import {
+ mockCancelledPipelineHeader,
+ mockFailedPipelineHeader,
+ mockRunningPipelineHeader,
+ mockSuccessfulPipelineHeader,
+} from './mock_data';
+import axios from '~/lib/utils/axios_utils';
import HeaderComponent from '~/pipelines/components/header_component.vue';
-import CiHeader from '~/vue_shared/components/header_ci_component.vue';
-import eventHub from '~/pipelines/event_hub';
describe('Pipeline details header', () => {
let wrapper;
let glModalDirective;
-
- const threeWeeksAgo = new Date();
- threeWeeksAgo.setDate(threeWeeksAgo.getDate() - 21);
+ let mockAxios;
const findDeleteModal = () => wrapper.find(GlModal);
-
- const defaultProps = {
- pipeline: {
- details: {
- status: {
- group: 'failed',
- icon: 'status_failed',
- label: 'failed',
- text: 'failed',
- details_path: 'path',
- },
- },
- id: 123,
- created_at: threeWeeksAgo.toISOString(),
- user: {
- web_url: 'path',
- name: 'Foo',
- username: 'foobar',
- email: 'foo@bar.com',
- avatar_url: 'link',
- },
- retry_path: 'retry',
- cancel_path: 'cancel',
- delete_path: 'delete',
+ const findRetryButton = () => wrapper.find('[data-testid="retryPipeline"]');
+ const findCancelButton = () => wrapper.find('[data-testid="cancelPipeline"]');
+ const findDeleteButton = () => wrapper.find('[data-testid="deletePipeline"]');
+ const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
+
+ const defaultProvideOptions = {
+ pipelineId: 14,
+ pipelineIid: 1,
+ paths: {
+ retry: '/retry',
+ cancel: '/cancel',
+ delete: '/delete',
+ fullProject: '/namespace/my-project',
},
- isLoading: false,
};
- const createComponent = (props = {}) => {
+ const createComponent = (pipelineMock = mockRunningPipelineHeader, { isLoading } = false) => {
glModalDirective = jest.fn();
- wrapper = shallowMount(HeaderComponent, {
- propsData: {
- ...props,
+ const $apollo = {
+ queries: {
+ pipeline: {
+ loading: isLoading,
+ stopPolling: jest.fn(),
+ startPolling: jest.fn(),
+ },
+ },
+ };
+
+ return shallowMount(HeaderComponent, {
+ data() {
+ return {
+ pipeline: pipelineMock,
+ };
+ },
+ provide: {
+ ...defaultProvideOptions,
},
directives: {
glModal: {
- bind(el, { value }) {
+ bind(_, { value }) {
glModalDirective(value);
},
},
},
+ mocks: { $apollo },
});
};
beforeEach(() => {
- jest.spyOn(eventHub, '$emit');
-
- createComponent(defaultProps);
+ mockAxios = new MockAdapter(axios);
+ mockAxios.onGet('*').replyOnce(200);
});
afterEach(() => {
- eventHub.$off();
-
wrapper.destroy();
wrapper = null;
+
+ mockAxios.restore();
});
- it('should render provided pipeline info', () => {
- expect(wrapper.find(CiHeader).props()).toMatchObject({
- status: defaultProps.pipeline.details.status,
- itemId: defaultProps.pipeline.id,
- time: defaultProps.pipeline.created_at,
- user: defaultProps.pipeline.user,
+ describe('initial loading', () => {
+ beforeEach(() => {
+ wrapper = createComponent(null, { isLoading: true });
});
- });
- describe('action buttons', () => {
- it('should not trigger eventHub when nothing happens', () => {
- expect(eventHub.$emit).not.toHaveBeenCalled();
+ it('shows a loading state while graphQL is fetching initial data', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
});
+ });
+
+ describe('visible state', () => {
+ it.each`
+ state | pipelineData | retryValue | cancelValue
+ ${'cancelled'} | ${mockCancelledPipelineHeader} | ${true} | ${false}
+ ${'failed'} | ${mockFailedPipelineHeader} | ${true} | ${false}
+ ${'running'} | ${mockRunningPipelineHeader} | ${false} | ${true}
+ ${'successful'} | ${mockSuccessfulPipelineHeader} | ${false} | ${false}
+ `(
+ 'with a $state pipeline, it will show actions: retry $retryValue and cancel $cancelValue',
+ ({ pipelineData, retryValue, cancelValue }) => {
+ wrapper = createComponent(pipelineData);
+
+ expect(findRetryButton().exists()).toBe(retryValue);
+ expect(findCancelButton().exists()).toBe(cancelValue);
+ },
+ );
+ });
- it('should call postAction when retry button action is clicked', () => {
- wrapper.find('[data-testid="retryButton"]').vm.$emit('click');
+ describe('actions', () => {
+ describe('Retry action', () => {
+ beforeEach(() => {
+ wrapper = createComponent(mockCancelledPipelineHeader);
+ });
- expect(eventHub.$emit).toHaveBeenCalledWith('headerPostAction', 'retry');
- });
+ it('should call axios with the right path when retry button is clicked', async () => {
+ jest.spyOn(axios, 'post');
+ findRetryButton().vm.$emit('click');
- it('should call postAction when cancel button action is clicked', () => {
- wrapper.find('[data-testid="cancelPipeline"]').vm.$emit('click');
+ await wrapper.vm.$nextTick();
- expect(eventHub.$emit).toHaveBeenCalledWith('headerPostAction', 'cancel');
+ expect(axios.post).toHaveBeenCalledWith(defaultProvideOptions.paths.retry);
+ });
});
- it('does not show delete modal', () => {
- expect(findDeleteModal()).not.toBeVisible();
+ describe('Cancel action', () => {
+ beforeEach(() => {
+ wrapper = createComponent(mockRunningPipelineHeader);
+ });
+
+ it('should call axios with the right path when cancel button is clicked', async () => {
+ jest.spyOn(axios, 'post');
+ findCancelButton().vm.$emit('click');
+
+ await wrapper.vm.$nextTick();
+
+ expect(axios.post).toHaveBeenCalledWith(defaultProvideOptions.paths.cancel);
+ });
});
- describe('when delete button action is clicked', () => {
- it('displays delete modal', () => {
+ describe('Delete action', () => {
+ beforeEach(() => {
+ wrapper = createComponent(mockFailedPipelineHeader);
+ });
+
+ it('displays delete modal when clicking on delete and does not call the delete action', async () => {
+ jest.spyOn(axios, 'delete');
+ findDeleteButton().vm.$emit('click');
+
+ await wrapper.vm.$nextTick();
+
expect(findDeleteModal().props('modalId')).toBe(wrapper.vm.$options.DELETE_MODAL_ID);
expect(glModalDirective).toHaveBeenCalledWith(wrapper.vm.$options.DELETE_MODAL_ID);
+ expect(axios.delete).not.toHaveBeenCalled();
});
- it('should call delete when modal is submitted', () => {
+ it('should call delete path when modal is submitted', async () => {
+ jest.spyOn(axios, 'delete');
findDeleteModal().vm.$emit('ok');
- expect(eventHub.$emit).toHaveBeenCalledWith('headerDeleteAction', 'delete');
+ await wrapper.vm.$nextTick();
+
+ expect(axios.delete).toHaveBeenCalledWith(defaultProvideOptions.paths.delete);
});
});
});
diff --git a/spec/frontend/pipelines/legacy_header_component_spec.js b/spec/frontend/pipelines/legacy_header_component_spec.js
new file mode 100644
index 00000000000..fb7feb8898a
--- /dev/null
+++ b/spec/frontend/pipelines/legacy_header_component_spec.js
@@ -0,0 +1,116 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlModal } from '@gitlab/ui';
+import LegacyHeaderComponent from '~/pipelines/components/legacy_header_component.vue';
+import CiHeader from '~/vue_shared/components/header_ci_component.vue';
+import eventHub from '~/pipelines/event_hub';
+
+describe('Pipeline details header', () => {
+ let wrapper;
+ let glModalDirective;
+
+ const threeWeeksAgo = new Date();
+ threeWeeksAgo.setDate(threeWeeksAgo.getDate() - 21);
+
+ const findDeleteModal = () => wrapper.find(GlModal);
+
+ const defaultProps = {
+ pipeline: {
+ details: {
+ status: {
+ group: 'failed',
+ icon: 'status_failed',
+ label: 'failed',
+ text: 'failed',
+ details_path: 'path',
+ },
+ },
+ id: 123,
+ created_at: threeWeeksAgo.toISOString(),
+ user: {
+ web_url: 'path',
+ name: 'Foo',
+ username: 'foobar',
+ email: 'foo@bar.com',
+ avatar_url: 'link',
+ },
+ retry_path: 'retry',
+ cancel_path: 'cancel',
+ delete_path: 'delete',
+ },
+ isLoading: false,
+ };
+
+ const createComponent = (props = {}) => {
+ glModalDirective = jest.fn();
+
+ wrapper = shallowMount(LegacyHeaderComponent, {
+ propsData: {
+ ...props,
+ },
+ directives: {
+ glModal: {
+ bind(el, { value }) {
+ glModalDirective(value);
+ },
+ },
+ },
+ });
+ };
+
+ beforeEach(() => {
+ jest.spyOn(eventHub, '$emit');
+
+ createComponent(defaultProps);
+ });
+
+ afterEach(() => {
+ eventHub.$off();
+
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('should render provided pipeline info', () => {
+ expect(wrapper.find(CiHeader).props()).toMatchObject({
+ status: defaultProps.pipeline.details.status,
+ itemId: defaultProps.pipeline.id,
+ time: defaultProps.pipeline.created_at,
+ user: defaultProps.pipeline.user,
+ });
+ });
+
+ describe('action buttons', () => {
+ it('should not trigger eventHub when nothing happens', () => {
+ expect(eventHub.$emit).not.toHaveBeenCalled();
+ });
+
+ it('should call postAction when retry button action is clicked', () => {
+ wrapper.find('[data-testid="retryButton"]').vm.$emit('click');
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('headerPostAction', 'retry');
+ });
+
+ it('should call postAction when cancel button action is clicked', () => {
+ wrapper.find('[data-testid="cancelPipeline"]').vm.$emit('click');
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('headerPostAction', 'cancel');
+ });
+
+ it('does not show delete modal', () => {
+ expect(findDeleteModal()).not.toBeVisible();
+ });
+
+ describe('when delete button action is clicked', () => {
+ it('displays delete modal', () => {
+ expect(findDeleteModal().props('modalId')).toBe(wrapper.vm.$options.DELETE_MODAL_ID);
+ expect(glModalDirective).toHaveBeenCalledWith(wrapper.vm.$options.DELETE_MODAL_ID);
+ });
+
+ it('should call delete when modal is submitted', () => {
+ findDeleteModal().vm.$emit('ok');
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('headerDeleteAction', 'delete');
+ });
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/mock_data.js b/spec/frontend/pipelines/mock_data.js
index e63efc543f1..2afdbb05107 100644
--- a/spec/frontend/pipelines/mock_data.js
+++ b/spec/frontend/pipelines/mock_data.js
@@ -1,3 +1,7 @@
+const PIPELINE_RUNNING = 'RUNNING';
+const PIPELINE_CANCELED = 'CANCELED';
+const PIPELINE_FAILED = 'FAILED';
+
export const pipelineWithStages = {
id: 20333396,
user: {
@@ -320,6 +324,80 @@ export const pipelineWithStages = {
triggered: [],
};
+const threeWeeksAgo = new Date();
+threeWeeksAgo.setDate(threeWeeksAgo.getDate() - 21);
+
+export const mockPipelineHeader = {
+ detailedStatus: {},
+ id: 123,
+ userPermissions: {
+ destroyPipeline: true,
+ },
+ createdAt: threeWeeksAgo.toISOString(),
+ user: {
+ name: 'Foo',
+ username: 'foobar',
+ email: 'foo@bar.com',
+ avatarUrl: 'link',
+ },
+};
+
+export const mockFailedPipelineHeader = {
+ ...mockPipelineHeader,
+ status: PIPELINE_FAILED,
+ retryable: true,
+ cancelable: false,
+ detailedStatus: {
+ group: 'failed',
+ icon: 'status_failed',
+ label: 'failed',
+ text: 'failed',
+ detailsPath: 'path',
+ },
+};
+
+export const mockRunningPipelineHeader = {
+ ...mockPipelineHeader,
+ status: PIPELINE_RUNNING,
+ retryable: false,
+ cancelable: true,
+ detailedStatus: {
+ group: 'running',
+ icon: 'status_running',
+ label: 'running',
+ text: 'running',
+ detailsPath: 'path',
+ },
+};
+
+export const mockCancelledPipelineHeader = {
+ ...mockPipelineHeader,
+ status: PIPELINE_CANCELED,
+ retryable: true,
+ cancelable: false,
+ detailedStatus: {
+ group: 'cancelled',
+ icon: 'status_cancelled',
+ label: 'cancelled',
+ text: 'cancelled',
+ detailsPath: 'path',
+ },
+};
+
+export const mockSuccessfulPipelineHeader = {
+ ...mockPipelineHeader,
+ status: 'SUCCESS',
+ retryable: false,
+ cancelable: false,
+ detailedStatus: {
+ group: 'success',
+ icon: 'status_success',
+ label: 'success',
+ text: 'success',
+ detailsPath: 'path',
+ },
+};
+
export const stageReply = {
name: 'deploy',
title: 'deploy: running',
diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/pipelines/pipelines_spec.js
index b0ad6bbd228..1298a2a1524 100644
--- a/spec/frontend/pipelines/pipelines_spec.js
+++ b/spec/frontend/pipelines/pipelines_spec.js
@@ -1,9 +1,17 @@
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
-import { GlFilteredSearch } from '@gitlab/ui';
+import { GlFilteredSearch, GlButton, GlLoadingIcon } from '@gitlab/ui';
import Api from '~/api';
import axios from '~/lib/utils/axios_utils';
+import NavigationTabs from '~/vue_shared/components/navigation_tabs.vue';
+import TablePagination from '~/vue_shared/components/pagination/table_pagination.vue';
+
+import NavigationControls from '~/pipelines/components/pipelines_list/nav_controls.vue';
+import EmptyState from '~/pipelines/components/pipelines_list/empty_state.vue';
+import BlankState from '~/pipelines/components/pipelines_list/blank_state.vue';
+import PipelinesTableComponent from '~/pipelines/components/pipelines_list/pipelines_table.vue';
+
import PipelinesComponent from '~/pipelines/components/pipelines_list/pipelines.vue';
import Store from '~/pipelines/stores/pipelines_store';
import { pipelineWithStages, stageReply, users, mockSearch, branches } from './mock_data';
@@ -49,6 +57,20 @@ describe('Pipelines', () => {
};
const findFilteredSearch = () => wrapper.find(GlFilteredSearch);
+ const findByTestId = id => wrapper.find(`[data-testid="${id}"]`);
+ const findNavigationTabs = () => wrapper.find(NavigationTabs);
+ const findNavigationControls = () => wrapper.find(NavigationControls);
+ const findTab = tab => findByTestId(`pipelines-tab-${tab}`);
+
+ const findRunPipelineButton = () => findByTestId('run-pipeline-button');
+ const findCiLintButton = () => findByTestId('ci-lint-button');
+ const findCleanCacheButton = () => findByTestId('clear-cache-button');
+
+ const findEmptyState = () => wrapper.find(EmptyState);
+ const findBlankState = () => wrapper.find(BlankState);
+ const findStagesDropdown = () => wrapper.find('.js-builds-dropdown-button');
+
+ const findTablePagination = () => wrapper.find(TablePagination);
const createComponent = (props = defaultProps, methods) => {
wrapper = mount(PipelinesComponent, {
@@ -87,19 +109,19 @@ describe('Pipelines', () => {
});
it('renders tabs', () => {
- expect(wrapper.find('.js-pipelines-tab-all').text()).toContain('All');
+ expect(findTab('all').text()).toContain('All');
});
it('renders Run Pipeline link', () => {
- expect(wrapper.find('.js-run-pipeline').attributes('href')).toBe(paths.newPipelinePath);
+ expect(findRunPipelineButton().attributes('href')).toBe(paths.newPipelinePath);
});
it('renders CI Lint link', () => {
- expect(wrapper.find('.js-ci-lint').attributes('href')).toBe(paths.ciLintPath);
+ expect(findCiLintButton().attributes('href')).toBe(paths.ciLintPath);
});
it('renders Clear Runner Cache button', () => {
- expect(wrapper.find('.js-clear-cache').text()).toBe('Clear Runner Caches');
+ expect(findCleanCacheButton().text()).toBe('Clear Runner Caches');
});
it('renders pipelines table', () => {
@@ -127,23 +149,31 @@ describe('Pipelines', () => {
});
it('renders tabs', () => {
- expect(wrapper.find('.js-pipelines-tab-all').text()).toContain('All');
+ expect(findTab('all').text()).toContain('All');
});
it('renders Run Pipeline link', () => {
- expect(wrapper.find('.js-run-pipeline').attributes('href')).toEqual(paths.newPipelinePath);
+ expect(findRunPipelineButton().attributes('href')).toBe(paths.newPipelinePath);
});
it('renders CI Lint link', () => {
- expect(wrapper.find('.js-ci-lint').attributes('href')).toEqual(paths.ciLintPath);
+ expect(findCiLintButton().attributes('href')).toBe(paths.ciLintPath);
});
it('renders Clear Runner Cache button', () => {
- expect(wrapper.find('.js-clear-cache').text()).toEqual('Clear Runner Caches');
+ expect(findCleanCacheButton().text()).toBe('Clear Runner Caches');
});
it('renders tab empty state', () => {
- expect(wrapper.find('.empty-state h4').text()).toEqual('There are currently no pipelines.');
+ expect(findBlankState().text()).toBe('There are currently no pipelines.');
+ });
+
+ it('renders tab empty state finished scope', () => {
+ wrapper.vm.scope = 'finished';
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findBlankState().text()).toBe('There are currently no finished pipelines.');
+ });
});
});
@@ -165,18 +195,23 @@ describe('Pipelines', () => {
});
it('renders empty state', () => {
- expect(wrapper.find('.js-empty-state h4').text()).toEqual('Build with confidence');
-
- expect(wrapper.find('.js-get-started-pipelines').attributes('href')).toEqual(
- paths.helpPagePath,
- );
+ expect(
+ findEmptyState()
+ .find('h4')
+ .text(),
+ ).toBe('Build with confidence');
+ expect(
+ findEmptyState()
+ .find(GlButton)
+ .attributes('href'),
+ ).toBe(paths.helpPagePath);
});
it('does not render tabs nor buttons', () => {
- expect(wrapper.find('.js-pipelines-tab-all').exists()).toBeFalsy();
- expect(wrapper.find('.js-run-pipeline').exists()).toBeFalsy();
- expect(wrapper.find('.js-ci-lint').exists()).toBeFalsy();
- expect(wrapper.find('.js-clear-cache').exists()).toBeFalsy();
+ expect(findTab('all').exists()).toBe(false);
+ expect(findRunPipelineButton().exists()).toBeFalsy();
+ expect(findCiLintButton().exists()).toBeFalsy();
+ expect(findCleanCacheButton().exists()).toBeFalsy();
});
});
@@ -189,20 +224,18 @@ describe('Pipelines', () => {
});
it('renders tabs', () => {
- expect(wrapper.find('.js-pipelines-tab-all').text()).toContain('All');
+ expect(findTab('all').text()).toContain('All');
});
it('renders buttons', () => {
- expect(wrapper.find('.js-run-pipeline').attributes('href')).toEqual(paths.newPipelinePath);
+ expect(findRunPipelineButton().attributes('href')).toBe(paths.newPipelinePath);
- expect(wrapper.find('.js-ci-lint').attributes('href')).toEqual(paths.ciLintPath);
- expect(wrapper.find('.js-clear-cache').text()).toEqual('Clear Runner Caches');
+ expect(findCiLintButton().attributes('href')).toBe(paths.ciLintPath);
+ expect(findCleanCacheButton().text()).toBe('Clear Runner Caches');
});
it('renders error state', () => {
- expect(wrapper.find('.empty-state').text()).toContain(
- 'There was an error fetching the pipelines.',
- );
+ expect(findBlankState().text()).toContain('There was an error fetching the pipelines.');
});
});
});
@@ -218,13 +251,13 @@ describe('Pipelines', () => {
});
it('renders tabs', () => {
- expect(wrapper.find('.js-pipelines-tab-all').text()).toContain('All');
+ expect(findTab('all').text()).toContain('All');
});
it('does not render buttons', () => {
- expect(wrapper.find('.js-run-pipeline').exists()).toBeFalsy();
- expect(wrapper.find('.js-ci-lint').exists()).toBeFalsy();
- expect(wrapper.find('.js-clear-cache').exists()).toBeFalsy();
+ expect(findRunPipelineButton().exists()).toBeFalsy();
+ expect(findCiLintButton().exists()).toBeFalsy();
+ expect(findCleanCacheButton().exists()).toBeFalsy();
});
it('renders pipelines table', () => {
@@ -252,17 +285,17 @@ describe('Pipelines', () => {
});
it('renders tabs', () => {
- expect(wrapper.find('.js-pipelines-tab-all').text()).toContain('All');
+ expect(findTab('all').text()).toContain('All');
});
it('does not render buttons', () => {
- expect(wrapper.find('.js-run-pipeline').exists()).toBeFalsy();
- expect(wrapper.find('.js-ci-lint').exists()).toBeFalsy();
- expect(wrapper.find('.js-clear-cache').exists()).toBeFalsy();
+ expect(findRunPipelineButton().exists()).toBeFalsy();
+ expect(findCiLintButton().exists()).toBeFalsy();
+ expect(findCleanCacheButton().exists()).toBeFalsy();
});
it('renders tab empty state', () => {
- expect(wrapper.find('.empty-state h4').text()).toEqual('There are currently no pipelines.');
+ expect(wrapper.find('.empty-state h4').text()).toBe('There are currently no pipelines.');
});
});
@@ -284,18 +317,22 @@ describe('Pipelines', () => {
});
it('renders empty state without button to set CI', () => {
- expect(wrapper.find('.js-empty-state').text()).toEqual(
+ expect(findEmptyState().text()).toBe(
'This project is not currently set up to run pipelines.',
);
- expect(wrapper.find('.js-get-started-pipelines').exists()).toBeFalsy();
+ expect(
+ findEmptyState()
+ .find(GlButton)
+ .exists(),
+ ).toBeFalsy();
});
it('does not render tabs or buttons', () => {
- expect(wrapper.find('.js-pipelines-tab-all').exists()).toBeFalsy();
- expect(wrapper.find('.js-run-pipeline').exists()).toBeFalsy();
- expect(wrapper.find('.js-ci-lint').exists()).toBeFalsy();
- expect(wrapper.find('.js-clear-cache').exists()).toBeFalsy();
+ expect(findTab('all').exists()).toBe(false);
+ expect(findRunPipelineButton().exists()).toBeFalsy();
+ expect(findCiLintButton().exists()).toBeFalsy();
+ expect(findCleanCacheButton().exists()).toBeFalsy();
});
});
@@ -309,13 +346,13 @@ describe('Pipelines', () => {
});
it('renders tabs', () => {
- expect(wrapper.find('.js-pipelines-tab-all').text()).toContain('All');
+ expect(findTab('all').text()).toContain('All');
});
it('does not renders buttons', () => {
- expect(wrapper.find('.js-run-pipeline').exists()).toBeFalsy();
- expect(wrapper.find('.js-ci-lint').exists()).toBeFalsy();
- expect(wrapper.find('.js-clear-cache').exists()).toBeFalsy();
+ expect(findRunPipelineButton().exists()).toBeFalsy();
+ expect(findCiLintButton().exists()).toBeFalsy();
+ expect(findCleanCacheButton().exists()).toBeFalsy();
});
it('renders error state', () => {
@@ -342,14 +379,20 @@ describe('Pipelines', () => {
);
});
- it('should render navigation tabs', () => {
- expect(wrapper.find('.js-pipelines-tab-all').text()).toContain('All');
-
- expect(wrapper.find('.js-pipelines-tab-finished').text()).toContain('Finished');
-
- expect(wrapper.find('.js-pipelines-tab-branches').text()).toContain('Branches');
+ it('should set up navigation tabs', () => {
+ expect(findNavigationTabs().props('tabs')).toEqual([
+ { name: 'All', scope: 'all', count: '3', isActive: true },
+ { name: 'Finished', scope: 'finished', count: undefined, isActive: false },
+ { name: 'Branches', scope: 'branches', isActive: false },
+ { name: 'Tags', scope: 'tags', isActive: false },
+ ]);
+ });
- expect(wrapper.find('.js-pipelines-tab-tags').text()).toContain('Tags');
+ it('should render navigation tabs', () => {
+ expect(findTab('all').html()).toContain('All');
+ expect(findTab('finished').text()).toContain('Finished');
+ expect(findTab('branches').text()).toContain('Branches');
+ expect(findTab('tags').text()).toContain('Tags');
});
it('should make an API request when using tabs', () => {
@@ -362,7 +405,7 @@ describe('Pipelines', () => {
);
return waitForPromises().then(() => {
- wrapper.find('.js-pipelines-tab-finished').trigger('click');
+ findTab('finished').trigger('click');
expect(updateContentMock).toHaveBeenCalledWith({ scope: 'finished', page: '1' });
});
@@ -401,133 +444,172 @@ describe('Pipelines', () => {
});
});
- describe('methods', () => {
+ describe('User Interaction', () => {
+ let updateContentMock;
+
beforeEach(() => {
jest.spyOn(window.history, 'pushState').mockImplementation(() => null);
});
- describe('onChangeTab', () => {
- it('should set page to 1', () => {
- const updateContentMock = jest.fn(() => {});
- createComponent(
- { hasGitlabCi: true, canCreatePipeline: true, ...paths },
- {
- updateContent: updateContentMock,
- },
- );
+ beforeEach(() => {
+ mock.onGet(paths.endpoint).reply(200, pipelines);
+ createComponent();
- wrapper.vm.onChangeTab('running');
+ updateContentMock = jest.spyOn(wrapper.vm, 'updateContent');
+
+ return waitForPromises();
+ });
+
+ describe('when user changes tabs', () => {
+ it('should set page to 1', () => {
+ findNavigationTabs().vm.$emit('onChangeTab', 'running');
expect(updateContentMock).toHaveBeenCalledWith({ scope: 'running', page: '1' });
});
});
- describe('onChangePage', () => {
+ describe('when user changes page', () => {
it('should update page and keep scope', () => {
- const updateContentMock = jest.fn(() => {});
- createComponent(
- { hasGitlabCi: true, canCreatePipeline: true, ...paths },
- {
- updateContent: updateContentMock,
- },
- );
-
- wrapper.vm.onChangePage(4);
+ findTablePagination().vm.change(4);
expect(updateContentMock).toHaveBeenCalledWith({ scope: wrapper.vm.scope, page: '4' });
});
});
- });
- describe('computed properties', () => {
- beforeEach(() => {
- createComponent();
- });
+ describe('updates results when a staged is clicked', () => {
+ beforeEach(() => {
+ const copyPipeline = { ...pipelineWithStages };
+ copyPipeline.id += 1;
+ mock
+ .onGet('twitter/flight/pipelines.json')
+ .reply(
+ 200,
+ {
+ pipelines: [pipelineWithStages],
+ count: {
+ all: 1,
+ finished: 1,
+ pending: 0,
+ running: 0,
+ },
+ },
+ {
+ 'POLL-INTERVAL': 100,
+ },
+ )
+ .onGet(pipelineWithStages.details.stages[0].dropdown_path)
+ .reply(200, stageReply);
- describe('tabs', () => {
- it('returns default tabs', () => {
- expect(wrapper.vm.tabs).toEqual([
- { name: 'All', scope: 'all', count: undefined, isActive: true },
- { name: 'Finished', scope: 'finished', count: undefined, isActive: false },
- { name: 'Branches', scope: 'branches', isActive: false },
- { name: 'Tags', scope: 'tags', isActive: false },
- ]);
+ createComponent();
});
- });
- describe('emptyTabMessage', () => {
- it('returns message with finished scope', () => {
- wrapper.vm.scope = 'finished';
+ describe('when a request is being made', () => {
+ it('stops polling, cancels the request, & restarts polling', () => {
+ const stopMock = jest.spyOn(wrapper.vm.poll, 'stop');
+ const restartMock = jest.spyOn(wrapper.vm.poll, 'restart');
+ const cancelMock = jest.spyOn(wrapper.vm.service.cancelationSource, 'cancel');
+ mock.onGet('twitter/flight/pipelines.json').reply(200, pipelines);
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.emptyTabMessage).toEqual('There are currently no finished pipelines.');
+ return waitForPromises()
+ .then(() => {
+ wrapper.vm.isMakingRequest = true;
+ findStagesDropdown().trigger('click');
+ })
+ .then(() => {
+ expect(cancelMock).toHaveBeenCalled();
+ expect(stopMock).toHaveBeenCalled();
+ expect(restartMock).toHaveBeenCalled();
+ });
});
});
- it('returns message without scope when scope is `all`', () => {
- expect(wrapper.vm.emptyTabMessage).toEqual('There are currently no pipelines.');
+ describe('when no request is being made', () => {
+ it('stops polling & restarts polling', () => {
+ const stopMock = jest.spyOn(wrapper.vm.poll, 'stop');
+ const restartMock = jest.spyOn(wrapper.vm.poll, 'restart');
+ mock.onGet('twitter/flight/pipelines.json').reply(200, pipelines);
+
+ return waitForPromises()
+ .then(() => {
+ findStagesDropdown().trigger('click');
+ expect(stopMock).toHaveBeenCalled();
+ })
+ .then(() => {
+ expect(restartMock).toHaveBeenCalled();
+ });
+ });
});
});
+ });
- describe('stateToRender', () => {
- it('returns loading state when the app is loading', () => {
- expect(wrapper.vm.stateToRender).toEqual('loading');
+ describe('Rendered content', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ describe('displays different content', () => {
+ it('shows loading state when the app is loading', () => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
});
- it('returns error state when app has error', () => {
+ it('shows error state when app has error', () => {
wrapper.vm.hasError = true;
wrapper.vm.isLoading = false;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.stateToRender).toEqual('error');
+ expect(findBlankState().props('message')).toBe(
+ 'There was an error fetching the pipelines. Try again in a few moments or contact your support team.',
+ );
});
});
- it('returns table list when app has pipelines', () => {
+ it('shows table list when app has pipelines', () => {
wrapper.vm.isLoading = false;
wrapper.vm.hasError = false;
wrapper.vm.state.pipelines = pipelines.pipelines;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.stateToRender).toEqual('tableList');
+ expect(wrapper.find(PipelinesTableComponent).exists()).toBe(true);
});
});
- it('returns empty tab when app does not have pipelines but project has pipelines', () => {
+ it('shows empty tab when app does not have pipelines but project has pipelines', () => {
wrapper.vm.state.count.all = 10;
wrapper.vm.isLoading = false;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.stateToRender).toEqual('emptyTab');
+ expect(findBlankState().exists()).toBe(true);
+ expect(findBlankState().props('message')).toBe('There are currently no pipelines.');
});
});
- it('returns empty tab when project has CI', () => {
+ it('shows empty tab when project has CI', () => {
wrapper.vm.isLoading = false;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.stateToRender).toEqual('emptyTab');
+ expect(findBlankState().exists()).toBe(true);
+ expect(findBlankState().props('message')).toBe('There are currently no pipelines.');
});
});
- it('returns empty state when project does not have pipelines nor CI', () => {
+ it('shows empty state when project does not have pipelines nor CI', () => {
createComponent({ hasGitlabCi: false, canCreatePipeline: true, ...paths });
wrapper.vm.isLoading = false;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.stateToRender).toEqual('emptyState');
+ expect(wrapper.find(EmptyState).exists()).toBe(true);
});
});
});
- describe('shouldRenderTabs', () => {
+ describe('displays tabs', () => {
it('returns true when state is loading & has already made the first request', () => {
wrapper.vm.isLoading = true;
wrapper.vm.hasMadeRequest = true;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.shouldRenderTabs).toEqual(true);
+ expect(findNavigationTabs().exists()).toBe(true);
});
});
@@ -537,7 +619,7 @@ describe('Pipelines', () => {
wrapper.vm.hasMadeRequest = true;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.shouldRenderTabs).toEqual(true);
+ expect(findNavigationTabs().exists()).toBe(true);
});
});
@@ -547,7 +629,7 @@ describe('Pipelines', () => {
wrapper.vm.hasMadeRequest = true;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.shouldRenderTabs).toEqual(true);
+ expect(findNavigationTabs().exists()).toBe(true);
});
});
@@ -557,7 +639,7 @@ describe('Pipelines', () => {
wrapper.vm.hasMadeRequest = true;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.shouldRenderTabs).toEqual(true);
+ expect(findNavigationTabs().exists()).toBe(true);
});
});
@@ -565,7 +647,7 @@ describe('Pipelines', () => {
wrapper.vm.hasMadeRequest = false;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.shouldRenderTabs).toEqual(false);
+ expect(findNavigationTabs().exists()).toBe(false);
});
});
@@ -576,17 +658,17 @@ describe('Pipelines', () => {
wrapper.vm.hasMadeRequest = true;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.shouldRenderTabs).toEqual(false);
+ expect(findNavigationTabs().exists()).toBe(false);
});
});
});
- describe('shouldRenderButtons', () => {
+ describe('displays buttons', () => {
it('returns true when it has paths & has made the first request', () => {
wrapper.vm.hasMadeRequest = true;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.shouldRenderButtons).toEqual(true);
+ expect(findNavigationControls().exists()).toBe(true);
});
});
@@ -594,77 +676,12 @@ describe('Pipelines', () => {
wrapper.vm.hasMadeRequest = false;
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.shouldRenderButtons).toEqual(false);
+ expect(findNavigationControls().exists()).toBe(false);
});
});
});
});
- describe('updates results when a staged is clicked', () => {
- beforeEach(() => {
- const copyPipeline = { ...pipelineWithStages };
- copyPipeline.id += 1;
- mock
- .onGet('twitter/flight/pipelines.json')
- .reply(
- 200,
- {
- pipelines: [pipelineWithStages],
- count: {
- all: 1,
- finished: 1,
- pending: 0,
- running: 0,
- },
- },
- {
- 'POLL-INTERVAL': 100,
- },
- )
- .onGet(pipelineWithStages.details.stages[0].dropdown_path)
- .reply(200, stageReply);
-
- createComponent();
- });
-
- describe('when a request is being made', () => {
- it('stops polling, cancels the request, & restarts polling', () => {
- const stopMock = jest.spyOn(wrapper.vm.poll, 'stop');
- const restartMock = jest.spyOn(wrapper.vm.poll, 'restart');
- const cancelMock = jest.spyOn(wrapper.vm.service.cancelationSource, 'cancel');
- mock.onGet('twitter/flight/pipelines.json').reply(200, pipelines);
-
- return waitForPromises()
- .then(() => {
- wrapper.vm.isMakingRequest = true;
- wrapper.find('.js-builds-dropdown-button').trigger('click');
- })
- .then(() => {
- expect(cancelMock).toHaveBeenCalled();
- expect(stopMock).toHaveBeenCalled();
- expect(restartMock).toHaveBeenCalled();
- });
- });
- });
-
- describe('when no request is being made', () => {
- it('stops polling & restarts polling', () => {
- const stopMock = jest.spyOn(wrapper.vm.poll, 'stop');
- const restartMock = jest.spyOn(wrapper.vm.poll, 'restart');
- mock.onGet('twitter/flight/pipelines.json').reply(200, pipelines);
-
- return waitForPromises()
- .then(() => {
- wrapper.find('.js-builds-dropdown-button').trigger('click');
- expect(stopMock).toHaveBeenCalled();
- })
- .then(() => {
- expect(restartMock).toHaveBeenCalled();
- });
- });
- });
- });
-
describe('Pipeline filters', () => {
let updateContentMock;
diff --git a/spec/frontend/pipelines/test_reports/mock_data.js b/spec/frontend/pipelines/test_reports/mock_data.js
index 1d03f0b655f..872cb5c87be 100644
--- a/spec/frontend/pipelines/test_reports/mock_data.js
+++ b/spec/frontend/pipelines/test_reports/mock_data.js
@@ -9,4 +9,20 @@ export default [
status: TestStatus.SKIPPED,
system_output: null,
},
+ {
+ classname: 'spec.test_spec',
+ execution_time: 0,
+ name: 'Test#error text',
+ stack_trace: null,
+ status: TestStatus.ERROR,
+ system_output: null,
+ },
+ {
+ classname: 'spec.test_spec',
+ execution_time: 0,
+ name: 'Test#unknown text',
+ stack_trace: null,
+ status: TestStatus.UNKNOWN,
+ system_output: null,
+ },
];
diff --git a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js b/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
index 2feb6aa5799..af2150be7a0 100644
--- a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
@@ -61,18 +61,17 @@ describe('Test reports suite table', () => {
expect(allCaseRows().length).toBe(testCases.length);
});
- it('renders the correct icon for each status', () => {
- const failedTest = testCases.findIndex(x => x.status === TestStatus.FAILED);
- const skippedTest = testCases.findIndex(x => x.status === TestStatus.SKIPPED);
- const successTest = testCases.findIndex(x => x.status === TestStatus.SUCCESS);
+ it.each([
+ TestStatus.ERROR,
+ TestStatus.FAILED,
+ TestStatus.SKIPPED,
+ TestStatus.SUCCESS,
+ 'unknown',
+ ])('renders the correct icon for test case with %s status', status => {
+ const test = testCases.findIndex(x => x.status === status);
+ const row = findCaseRowAtIndex(test);
- const failedRow = findCaseRowAtIndex(failedTest);
- const skippedRow = findCaseRowAtIndex(skippedTest);
- const successRow = findCaseRowAtIndex(successTest);
-
- expect(findIconForRow(failedRow, TestStatus.FAILED).exists()).toBe(true);
- expect(findIconForRow(skippedRow, TestStatus.SKIPPED).exists()).toBe(true);
- expect(findIconForRow(successRow, TestStatus.SUCCESS).exists()).toBe(true);
+ expect(findIconForRow(row, status).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/project_find_file_spec.js b/spec/frontend/project_find_file_spec.js
index 757a02a04a3..6a50f68a4e9 100644
--- a/spec/frontend/project_find_file_spec.js
+++ b/spec/frontend/project_find_file_spec.js
@@ -1,11 +1,12 @@
import MockAdapter from 'axios-mock-adapter';
import $ from 'jquery';
import { TEST_HOST } from 'helpers/test_constants';
-import { sanitize } from 'dompurify';
+import { sanitize } from '~/lib/dompurify';
import ProjectFindFile from '~/project_find_file';
import axios from '~/lib/utils/axios_utils';
-jest.mock('dompurify', () => ({
+jest.mock('~/lib/dompurify', () => ({
+ addHook: jest.fn(),
sanitize: jest.fn(val => val),
}));
diff --git a/spec/frontend/projects/commit_box/info/load_branches_spec.js b/spec/frontend/projects/commit_box/info/load_branches_spec.js
new file mode 100644
index 00000000000..ebd4ee45dab
--- /dev/null
+++ b/spec/frontend/projects/commit_box/info/load_branches_spec.js
@@ -0,0 +1,68 @@
+import axios from 'axios';
+import waitForPromises from 'helpers/wait_for_promises';
+import MockAdapter from 'axios-mock-adapter';
+import { loadBranches } from '~/projects/commit_box/info/load_branches';
+
+const mockCommitPath = '/commit/abcd/branches';
+const mockBranchesRes =
+ '<a href="/-/commits/master">master</a><span><a href="/-/commits/my-branch">my-branch</a></span>';
+
+describe('~/projects/commit_box/info/load_branches', () => {
+ let mock;
+ let el;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ mock.onGet(mockCommitPath).reply(200, mockBranchesRes);
+
+ el = document.createElement('div');
+ el.dataset.commitPath = mockCommitPath;
+ el.innerHTML = '<div class="commit-info branches"><span class="spinner"/></div>';
+ });
+
+ it('loads and renders branches info', async () => {
+ loadBranches(el);
+ await waitForPromises();
+
+ expect(el.innerHTML).toBe(`<div class="commit-info branches">${mockBranchesRes}</div>`);
+ });
+
+ it('does not load when no container is provided', async () => {
+ loadBranches(null);
+ await waitForPromises();
+
+ expect(mock.history.get).toHaveLength(0);
+ });
+
+ describe('when braches request returns unsafe content', () => {
+ beforeEach(() => {
+ mock
+ .onGet(mockCommitPath)
+ .reply(200, '<a onload="alert(\'xss!\');" href="/-/commits/master">master</a>');
+ });
+
+ it('displays sanitized html', async () => {
+ loadBranches(el);
+ await waitForPromises();
+
+ expect(el.innerHTML).toBe(
+ '<div class="commit-info branches"><a href="/-/commits/master">master</a></div>',
+ );
+ });
+ });
+
+ describe('when braches request fails', () => {
+ beforeEach(() => {
+ mock.onGet(mockCommitPath).reply(500, 'Error!');
+ });
+
+ it('attempts to load and renders an error', async () => {
+ loadBranches(el);
+ await waitForPromises();
+
+ expect(el.innerHTML).toBe(
+ '<div class="commit-info branches">Failed to load branches. Please try again.</div>',
+ );
+ });
+ });
+});
diff --git a/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap b/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
index 455467e7b29..a0fd6012546 100644
--- a/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
+++ b/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
@@ -17,6 +17,7 @@ exports[`Project remove modal initialized matches the snapshot 1`] = `
/>
<gl-button-stub
+ buttontextclasses=""
category="primary"
icon=""
role="button"
diff --git a/spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap b/spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap
index 692b8f6cf52..4630415f61c 100644
--- a/spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap
+++ b/spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap
@@ -18,6 +18,7 @@ exports[`Project remove modal intialized matches the snapshot 1`] = `
/>
<gl-button-stub
+ buttontextclasses=""
category="primary"
icon=""
role="button"
@@ -84,6 +85,7 @@ exports[`Project remove modal intialized matches the snapshot 1`] = `
<template>
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="js-modal-action-cancel"
icon=""
@@ -98,6 +100,7 @@ exports[`Project remove modal intialized matches the snapshot 1`] = `
<!---->
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="js-modal-action-primary"
disabled="true"
diff --git a/spec/frontend/projects/settings/access_dropdown_spec.js b/spec/frontend/projects/settings/access_dropdown_spec.js
index 3b375c5610f..41b9c0c3763 100644
--- a/spec/frontend/projects/settings/access_dropdown_spec.js
+++ b/spec/frontend/projects/settings/access_dropdown_spec.js
@@ -14,6 +14,7 @@ describe('AccessDropdown', () => {
`);
const $dropdown = $('#dummy-dropdown');
$dropdown.data('defaultLabel', defaultLabel);
+ gon.features = { deployKeysOnProtectedBranches: true };
const options = {
$dropdown,
accessLevelsData: {
@@ -37,6 +38,9 @@ describe('AccessDropdown', () => {
{ type: LEVEL_TYPES.GROUP },
{ type: LEVEL_TYPES.GROUP },
{ type: LEVEL_TYPES.GROUP },
+ { type: LEVEL_TYPES.DEPLOY_KEY },
+ { type: LEVEL_TYPES.DEPLOY_KEY },
+ { type: LEVEL_TYPES.DEPLOY_KEY },
];
beforeEach(() => {
@@ -49,7 +53,7 @@ describe('AccessDropdown', () => {
const label = dropdown.toggleLabel();
- expect(label).toBe('1 role, 2 users, 3 groups');
+ expect(label).toBe('1 role, 2 users, 3 deploy keys, 3 groups');
expect($dropdownToggleText).not.toHaveClass('is-default');
});
@@ -122,6 +126,21 @@ describe('AccessDropdown', () => {
expect($dropdownToggleText).not.toHaveClass('is-default');
});
});
+
+ describe('with users and deploy keys', () => {
+ beforeEach(() => {
+ const selectedTypes = [LEVEL_TYPES.DEPLOY_KEY, LEVEL_TYPES.USER];
+ dropdown.setSelectedItems(dummyItems.filter(item => selectedTypes.includes(item.type)));
+ $dropdownToggleText.addClass('is-default');
+ });
+
+ it('displays number of deploy keys', () => {
+ const label = dropdown.toggleLabel();
+
+ expect(label).toBe('2 users, 3 deploy keys');
+ expect($dropdownToggleText).not.toHaveClass('is-default');
+ });
+ });
});
describe('userRowHtml', () => {
diff --git a/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js b/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
index 0f3b699f6b2..62aeb4ddee5 100644
--- a/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
+++ b/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
@@ -218,9 +218,7 @@ describe('ServiceDeskRoot', () => {
.$nextTick()
.then(waitForPromises)
.then(() => {
- expect(wrapper.html()).toContain(
- 'An error occurred while saving the template. Please check if the template exists.',
- );
+ expect(wrapper.html()).toContain('An error occured while making the changes:');
});
});
});
diff --git a/spec/frontend/registry/explorer/components/details_page/partial_cleanup_alert_spec.js b/spec/frontend/registry/explorer/components/details_page/partial_cleanup_alert_spec.js
new file mode 100644
index 00000000000..17821d8be31
--- /dev/null
+++ b/spec/frontend/registry/explorer/components/details_page/partial_cleanup_alert_spec.js
@@ -0,0 +1,71 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlAlert, GlSprintf } from '@gitlab/ui';
+import component from '~/registry/explorer/components/details_page/partial_cleanup_alert.vue';
+import { DELETE_ALERT_TITLE, DELETE_ALERT_LINK_TEXT } from '~/registry/explorer/constants';
+
+describe('Partial Cleanup alert', () => {
+ let wrapper;
+
+ const findAlert = () => wrapper.find(GlAlert);
+ const findRunLink = () => wrapper.find('[data-testid="run-link"');
+ const findHelpLink = () => wrapper.find('[data-testid="help-link"');
+
+ const mountComponent = () => {
+ wrapper = shallowMount(component, {
+ stubs: { GlSprintf },
+ propsData: {
+ runCleanupPoliciesHelpPagePath: 'foo',
+ cleanupPoliciesHelpPagePath: 'bar',
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it(`gl-alert has the correct properties`, () => {
+ mountComponent();
+
+ expect(findAlert().props()).toMatchObject({
+ title: DELETE_ALERT_TITLE,
+ variant: 'warning',
+ });
+ });
+
+ it('has the right text', () => {
+ mountComponent();
+
+ expect(wrapper.text()).toMatchInterpolatedText(DELETE_ALERT_LINK_TEXT);
+ });
+
+ it('contains run link', () => {
+ mountComponent();
+
+ const link = findRunLink();
+ expect(link.exists()).toBe(true);
+ expect(link.attributes()).toMatchObject({
+ href: 'foo',
+ target: '_blank',
+ });
+ });
+
+ it('contains help link', () => {
+ mountComponent();
+
+ const link = findHelpLink();
+ expect(link.exists()).toBe(true);
+ expect(link.attributes()).toMatchObject({
+ href: 'bar',
+ target: '_blank',
+ });
+ });
+
+ it('GlAlert dismiss event triggers a dismiss event', () => {
+ mountComponent();
+
+ findAlert().vm.$emit('dismiss');
+ expect(wrapper.emitted('dismiss')).toEqual([[]]);
+ });
+});
diff --git a/spec/frontend/registry/explorer/components/list_page/registry_header_spec.js b/spec/frontend/registry/explorer/components/list_page/registry_header_spec.js
index 7a27f8fa431..3c997093d46 100644
--- a/spec/frontend/registry/explorer/components/list_page/registry_header_spec.js
+++ b/spec/frontend/registry/explorer/components/list_page/registry_header_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlSprintf, GlLink } from '@gitlab/ui';
+import { GlSprintf } from '@gitlab/ui';
import Component from '~/registry/explorer/components/list_page/registry_header.vue';
import TitleArea from '~/vue_shared/components/registry/title_area.vue';
import {
@@ -19,12 +19,8 @@ describe('registry_header', () => {
const findTitleArea = () => wrapper.find(TitleArea);
const findCommandsSlot = () => wrapper.find('[data-testid="commands-slot"]');
- const findInfoArea = () => wrapper.find('[data-testid="info-area"]');
- const findIntroText = () => wrapper.find('[data-testid="default-intro"]');
const findImagesCountSubHeader = () => wrapper.find('[data-testid="images-count"]');
const findExpirationPolicySubHeader = () => wrapper.find('[data-testid="expiration-policy"]');
- const findDisabledExpirationPolicyMessage = () =>
- wrapper.find('[data-testid="expiration-disabled-message"]');
const mountComponent = (propsData, slots) => {
wrapper = shallowMount(Component, {
@@ -123,44 +119,18 @@ describe('registry_header', () => {
});
});
- describe('info area', () => {
- it('exists', () => {
- mountComponent();
-
- expect(findInfoArea().exists()).toBe(true);
- });
-
+ describe('info messages', () => {
describe('default message', () => {
- beforeEach(() => {
- return mountComponent({ helpPagePath: 'bar' });
- });
-
- it('exists', () => {
- expect(findIntroText().exists()).toBe(true);
- });
-
- it('has the correct copy', () => {
- expect(findIntroText().text()).toMatchInterpolatedText(LIST_INTRO_TEXT);
- });
+ it('is correctly bound to title_area props', () => {
+ mountComponent({ helpPagePath: 'foo' });
- it('has the correct link', () => {
- expect(
- findIntroText()
- .find(GlLink)
- .attributes('href'),
- ).toBe('bar');
+ expect(findTitleArea().props('infoMessages')).toEqual([
+ { text: LIST_INTRO_TEXT, link: 'foo' },
+ ]);
});
});
describe('expiration policy info message', () => {
- describe('when there are no images', () => {
- it('is hidden', () => {
- mountComponent();
-
- expect(findDisabledExpirationPolicyMessage().exists()).toBe(false);
- });
- });
-
describe('when there are images', () => {
describe('when expiration policy is disabled', () => {
beforeEach(() => {
@@ -170,43 +140,27 @@ describe('registry_header', () => {
imagesCount: 1,
});
});
- it('message exist', () => {
- expect(findDisabledExpirationPolicyMessage().exists()).toBe(true);
- });
- it('has the correct copy', () => {
- expect(findDisabledExpirationPolicyMessage().text()).toMatchInterpolatedText(
- EXPIRATION_POLICY_DISABLED_MESSAGE,
- );
- });
- it('has the correct link', () => {
- expect(
- findDisabledExpirationPolicyMessage()
- .find(GlLink)
- .attributes('href'),
- ).toBe('foo');
+ it('the prop is correctly bound', () => {
+ expect(findTitleArea().props('infoMessages')).toEqual([
+ { text: LIST_INTRO_TEXT, link: '' },
+ { text: EXPIRATION_POLICY_DISABLED_MESSAGE, link: 'foo' },
+ ]);
});
});
- describe('when expiration policy is enabled', () => {
+ describe.each`
+ desc | props
+ ${'when there are no images'} | ${{ expirationPolicy: { enabled: false }, imagesCount: 0 }}
+ ${'when expiration policy is enabled'} | ${{ expirationPolicy: { enabled: true }, imagesCount: 1 }}
+ ${'when the expiration policy is completely disabled'} | ${{ expirationPolicy: { enabled: false }, imagesCount: 1, hideExpirationPolicyData: true }}
+ `('$desc', ({ props }) => {
it('message does not exist', () => {
- mountComponent({
- expirationPolicy: { enabled: true },
- imagesCount: 1,
- });
-
- expect(findDisabledExpirationPolicyMessage().exists()).toBe(false);
- });
- });
- describe('when the expiration policy is completely disabled', () => {
- it('message does not exist', () => {
- mountComponent({
- expirationPolicy: { enabled: true },
- imagesCount: 1,
- hideExpirationPolicyData: true,
- });
+ mountComponent(props);
- expect(findDisabledExpirationPolicyMessage().exists()).toBe(false);
+ expect(findTitleArea().props('infoMessages')).toEqual([
+ { text: LIST_INTRO_TEXT, link: '' },
+ ]);
});
});
});
diff --git a/spec/frontend/registry/explorer/pages/details_spec.js b/spec/frontend/registry/explorer/pages/details_spec.js
index 66e8a4aea0d..86b52c4f06a 100644
--- a/spec/frontend/registry/explorer/pages/details_spec.js
+++ b/spec/frontend/registry/explorer/pages/details_spec.js
@@ -3,6 +3,7 @@ import { GlPagination } from '@gitlab/ui';
import Tracking from '~/tracking';
import component from '~/registry/explorer/pages/details.vue';
import DeleteAlert from '~/registry/explorer/components/details_page/delete_alert.vue';
+import PartialCleanupAlert from '~/registry/explorer/components/details_page/partial_cleanup_alert.vue';
import DetailsHeader from '~/registry/explorer/components/details_page/details_header.vue';
import TagsLoader from '~/registry/explorer/components/details_page/tags_loader.vue';
import TagsList from '~/registry/explorer/components/details_page/tags_list.vue';
@@ -30,8 +31,10 @@ describe('Details Page', () => {
const findDeleteAlert = () => wrapper.find(DeleteAlert);
const findDetailsHeader = () => wrapper.find(DetailsHeader);
const findEmptyTagsState = () => wrapper.find(EmptyTagsState);
+ const findPartialCleanupAlert = () => wrapper.find(PartialCleanupAlert);
- const routeId = window.btoa(JSON.stringify({ name: 'foo', tags_path: 'bar' }));
+ const routeIdGenerator = override =>
+ window.btoa(JSON.stringify({ name: 'foo', tags_path: 'bar', ...override }));
const tagsArrayToSelectedTags = tags =>
tags.reduce((acc, c) => {
@@ -39,7 +42,7 @@ describe('Details Page', () => {
return acc;
}, {});
- const mountComponent = options => {
+ const mountComponent = ({ options, routeParams } = {}) => {
wrapper = shallowMount(component, {
store,
stubs: {
@@ -48,7 +51,7 @@ describe('Details Page', () => {
mocks: {
$route: {
params: {
- id: routeId,
+ id: routeIdGenerator(routeParams),
},
},
},
@@ -224,7 +227,7 @@ describe('Details Page', () => {
findDeleteModal().vm.$emit('confirmDelete');
expect(dispatchSpy).toHaveBeenCalledWith('requestDeleteTag', {
tag: store.state.tags[0],
- params: routeId,
+ params: routeIdGenerator(),
});
});
});
@@ -239,7 +242,7 @@ describe('Details Page', () => {
findDeleteModal().vm.$emit('confirmDelete');
expect(dispatchSpy).toHaveBeenCalledWith('requestDeleteTags', {
ids: store.state.tags.map(t => t.name),
- params: routeId,
+ params: routeIdGenerator(),
});
});
});
@@ -273,11 +276,57 @@ describe('Details Page', () => {
it('has the correct props', () => {
store.commit(SET_INITIAL_STATE, { ...config });
mountComponent({
- data: () => ({
- deleteAlertType,
- }),
+ options: {
+ data: () => ({
+ deleteAlertType,
+ }),
+ },
});
expect(findDeleteAlert().props()).toEqual({ ...config, deleteAlertType });
});
});
+
+ describe('Partial Cleanup Alert', () => {
+ const config = {
+ runCleanupPoliciesHelpPagePath: 'foo',
+ cleanupPoliciesHelpPagePath: 'bar',
+ };
+
+ describe('when expiration_policy_started is not null', () => {
+ const routeParams = { cleanup_policy_started_at: Date.now().toString() };
+
+ it('exists', () => {
+ mountComponent({ routeParams });
+
+ expect(findPartialCleanupAlert().exists()).toBe(true);
+ });
+
+ it('has the correct props', () => {
+ store.commit(SET_INITIAL_STATE, { ...config });
+
+ mountComponent({ routeParams });
+
+ expect(findPartialCleanupAlert().props()).toEqual({ ...config });
+ });
+
+ it('dismiss hides the component', async () => {
+ mountComponent({ routeParams });
+
+ expect(findPartialCleanupAlert().exists()).toBe(true);
+ findPartialCleanupAlert().vm.$emit('dismiss');
+
+ await wrapper.vm.$nextTick();
+
+ expect(findPartialCleanupAlert().exists()).toBe(false);
+ });
+ });
+
+ describe('when expiration_policy_started is null', () => {
+ it('the component is hidden', () => {
+ mountComponent();
+
+ expect(findPartialCleanupAlert().exists()).toBe(false);
+ });
+ });
+ });
});
diff --git a/spec/frontend/registry/settings/components/__snapshots__/registry_settings_app_spec.js.snap b/spec/frontend/registry/settings/components/__snapshots__/registry_settings_app_spec.js.snap
deleted file mode 100644
index 11393c89d06..00000000000
--- a/spec/frontend/registry/settings/components/__snapshots__/registry_settings_app_spec.js.snap
+++ /dev/null
@@ -1,7 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`Registry Settings App renders 1`] = `
-<div>
- <settings-form-stub />
-</div>
-`;
diff --git a/spec/frontend/registry/settings/components/registry_settings_app_spec.js b/spec/frontend/registry/settings/components/registry_settings_app_spec.js
index 9551ee72e51..01d6852e1e5 100644
--- a/spec/frontend/registry/settings/components/registry_settings_app_spec.js
+++ b/spec/frontend/registry/settings/components/registry_settings_app_spec.js
@@ -1,28 +1,35 @@
-import { shallowMount } from '@vue/test-utils';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
import { GlAlert, GlSprintf, GlLink } from '@gitlab/ui';
+import createMockApollo from 'jest/helpers/mock_apollo_helper';
import component from '~/registry/settings/components/registry_settings_app.vue';
+import expirationPolicyQuery from '~/registry/settings/graphql/queries/get_expiration_policy.graphql';
import SettingsForm from '~/registry/settings/components/settings_form.vue';
-import { createStore } from '~/registry/settings/store/';
-import { SET_SETTINGS, SET_INITIAL_STATE } from '~/registry/settings/store/mutation_types';
import { FETCH_SETTINGS_ERROR_MESSAGE } from '~/registry/shared/constants';
import {
UNAVAILABLE_FEATURE_INTRO_TEXT,
UNAVAILABLE_USER_FEATURE_TEXT,
} from '~/registry/settings/constants';
-import { stringifiedFormOptions } from '../../shared/mock_data';
+import { expirationPolicyPayload } from '../mock_data';
+
+const localVue = createLocalVue();
describe('Registry Settings App', () => {
let wrapper;
- let store;
+ let fakeApollo;
+
+ const defaultProvidedValues = {
+ projectPath: 'path',
+ isAdmin: false,
+ adminSettingsPath: 'settingsPath',
+ enableHistoricEntries: false,
+ };
const findSettingsComponent = () => wrapper.find(SettingsForm);
const findAlert = () => wrapper.find(GlAlert);
- const mountComponent = ({ dispatchMock = 'mockResolvedValue' } = {}) => {
- const dispatchSpy = jest.spyOn(store, 'dispatch');
- dispatchSpy[dispatchMock]();
-
+ const mountComponent = (provide = defaultProvidedValues, config) => {
wrapper = shallowMount(component, {
stubs: {
GlSprintf,
@@ -32,71 +39,72 @@ describe('Registry Settings App', () => {
show: jest.fn(),
},
},
- store,
+ provide,
+ ...config,
});
};
- beforeEach(() => {
- store = createStore();
- });
+ const mountComponentWithApollo = ({ provide = defaultProvidedValues, resolver } = {}) => {
+ localVue.use(VueApollo);
+
+ const requestHandlers = [[expirationPolicyQuery, resolver]];
+
+ fakeApollo = createMockApollo(requestHandlers);
+ mountComponent(provide, {
+ localVue,
+ apolloProvider: fakeApollo,
+ });
+
+ return requestHandlers.map(request => request[1]);
+ };
afterEach(() => {
wrapper.destroy();
});
- it('renders', () => {
- store.commit(SET_SETTINGS, { foo: 'bar' });
- mountComponent();
- expect(wrapper.element).toMatchSnapshot();
- });
-
- it('call the store function to load the data on mount', () => {
- mountComponent();
- expect(store.dispatch).toHaveBeenCalledWith('fetchSettings');
- });
+ it('renders the setting form', async () => {
+ const requests = mountComponentWithApollo({
+ resolver: jest.fn().mockResolvedValue(expirationPolicyPayload()),
+ });
+ await Promise.all(requests);
- it('renders the setting form', () => {
- store.commit(SET_SETTINGS, { foo: 'bar' });
- mountComponent();
expect(findSettingsComponent().exists()).toBe(true);
});
describe('the form is disabled', () => {
- beforeEach(() => {
- store.commit(SET_SETTINGS, undefined);
+ it('the form is hidden', () => {
mountComponent();
- });
- it('the form is hidden', () => {
expect(findSettingsComponent().exists()).toBe(false);
});
it('shows an alert', () => {
+ mountComponent();
+
const text = findAlert().text();
expect(text).toContain(UNAVAILABLE_FEATURE_INTRO_TEXT);
expect(text).toContain(UNAVAILABLE_USER_FEATURE_TEXT);
});
describe('an admin is visiting the page', () => {
- beforeEach(() => {
- store.commit(SET_INITIAL_STATE, {
- ...stringifiedFormOptions,
- isAdmin: true,
- adminSettingsPath: 'foo',
- });
- });
-
it('shows the admin part of the alert message', () => {
+ mountComponent({ ...defaultProvidedValues, isAdmin: true });
+
const sprintf = findAlert().find(GlSprintf);
expect(sprintf.text()).toBe('administration settings');
- expect(sprintf.find(GlLink).attributes('href')).toBe('foo');
+ expect(sprintf.find(GlLink).attributes('href')).toBe(
+ defaultProvidedValues.adminSettingsPath,
+ );
});
});
});
describe('fetchSettingsError', () => {
beforeEach(() => {
- mountComponent({ dispatchMock: 'mockRejectedValue' });
+ const requests = mountComponentWithApollo({
+ resolver: jest.fn().mockRejectedValue(new Error('GraphQL error')),
+ });
+ return Promise.all(requests);
});
it('the form is hidden', () => {
diff --git a/spec/frontend/registry/settings/components/settings_form_spec.js b/spec/frontend/registry/settings/components/settings_form_spec.js
index 6f9518808db..77fd71a22fc 100644
--- a/spec/frontend/registry/settings/components/settings_form_spec.js
+++ b/spec/frontend/registry/settings/components/settings_form_spec.js
@@ -1,30 +1,37 @@
-import { shallowMount } from '@vue/test-utils';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'jest/helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import Tracking from '~/tracking';
import component from '~/registry/settings/components/settings_form.vue';
import expirationPolicyFields from '~/registry/shared/components/expiration_policy_fields.vue';
-import { createStore } from '~/registry/settings/store/';
+import updateContainerExpirationPolicyMutation from '~/registry/settings/graphql/mutations/update_container_expiration_policy.graphql';
+import expirationPolicyQuery from '~/registry/settings/graphql/queries/get_expiration_policy.graphql';
import {
UPDATE_SETTINGS_ERROR_MESSAGE,
UPDATE_SETTINGS_SUCCESS_MESSAGE,
} from '~/registry/shared/constants';
-import { stringifiedFormOptions } from '../../shared/mock_data';
+import { GlCard, GlLoadingIcon } from '../../shared/stubs';
+import { expirationPolicyPayload, expirationPolicyMutationPayload } from '../mock_data';
+
+const localVue = createLocalVue();
describe('Settings Form', () => {
let wrapper;
- let store;
- let dispatchSpy;
-
- const GlLoadingIcon = { name: 'gl-loading-icon-stub', template: '<svg></svg>' };
- const GlCard = {
- name: 'gl-card-stub',
- template: `
- <div>
- <slot name="header"></slot>
- <slot></slot>
- <slot name="footer"></slot>
- </div>
- `,
+ let fakeApollo;
+
+ const defaultProvidedValues = {
+ projectPath: 'path',
+ };
+
+ const {
+ data: {
+ project: { containerExpirationPolicy },
+ },
+ } = expirationPolicyPayload();
+
+ const defaultProps = {
+ value: { ...containerExpirationPolicy },
};
const trackingPayload = {
@@ -35,14 +42,21 @@ describe('Settings Form', () => {
const findFields = () => wrapper.find(expirationPolicyFields);
const findCancelButton = () => wrapper.find({ ref: 'cancel-button' });
const findSaveButton = () => wrapper.find({ ref: 'save-button' });
- const findLoadingIcon = (parent = wrapper) => parent.find(GlLoadingIcon);
- const mountComponent = (data = {}) => {
+ const mountComponent = ({
+ props = defaultProps,
+ data,
+ config,
+ provide = defaultProvidedValues,
+ mocks,
+ } = {}) => {
wrapper = shallowMount(component, {
stubs: {
GlCard,
GlLoadingIcon,
},
+ propsData: { ...props },
+ provide,
data() {
return {
...data,
@@ -52,15 +66,42 @@ describe('Settings Form', () => {
$toast: {
show: jest.fn(),
},
+ ...mocks,
},
- store,
+ ...config,
});
};
+ const mountComponentWithApollo = ({ provide = defaultProvidedValues, resolver } = {}) => {
+ localVue.use(VueApollo);
+
+ const requestHandlers = [
+ [updateContainerExpirationPolicyMutation, resolver],
+ [expirationPolicyQuery, jest.fn().mockResolvedValue(expirationPolicyPayload())],
+ ];
+
+ fakeApollo = createMockApollo(requestHandlers);
+
+ fakeApollo.defaultClient.cache.writeQuery({
+ query: expirationPolicyQuery,
+ variables: {
+ projectPath: provide.projectPath,
+ },
+ ...expirationPolicyPayload(),
+ });
+
+ mountComponent({
+ provide,
+ config: {
+ localVue,
+ apolloProvider: fakeApollo,
+ },
+ });
+
+ return requestHandlers.map(resolvers => resolvers[1]);
+ };
+
beforeEach(() => {
- store = createStore();
- store.dispatch('setInitialState', stringifiedFormOptions);
- dispatchSpy = jest.spyOn(store, 'dispatch');
jest.spyOn(Tracking, 'event');
});
@@ -72,12 +113,12 @@ describe('Settings Form', () => {
it('v-model change update the settings property', () => {
mountComponent();
findFields().vm.$emit('input', { newValue: 'foo' });
- expect(dispatchSpy).toHaveBeenCalledWith('updateSettings', { settings: 'foo' });
+ expect(wrapper.emitted('input')).toEqual([['foo']]);
});
it('v-model change update the api error property', () => {
const apiErrors = { baz: 'bar' };
- mountComponent({ apiErrors });
+ mountComponent({ data: { apiErrors } });
expect(findFields().props('apiErrors')).toEqual(apiErrors);
findFields().vm.$emit('input', { newValue: 'foo', modified: 'baz' });
expect(findFields().props('apiErrors')).toEqual({});
@@ -85,19 +126,14 @@ describe('Settings Form', () => {
});
describe('form', () => {
- let form;
- beforeEach(() => {
- mountComponent();
- form = findForm();
- dispatchSpy.mockReturnValue();
- });
-
describe('form reset event', () => {
beforeEach(() => {
- form.trigger('reset');
+ mountComponent();
+
+ findForm().trigger('reset');
});
it('calls the appropriate function', () => {
- expect(dispatchSpy).toHaveBeenCalledWith('resetSettings');
+ expect(wrapper.emitted('reset')).toEqual([[]]);
});
it('tracks the reset event', () => {
@@ -108,54 +144,96 @@ describe('Settings Form', () => {
describe('form submit event ', () => {
it('save has type submit', () => {
mountComponent();
+
expect(findSaveButton().attributes('type')).toBe('submit');
});
- it('dispatches the saveSettings action', () => {
- dispatchSpy.mockResolvedValue();
- form.trigger('submit');
- expect(dispatchSpy).toHaveBeenCalledWith('saveSettings');
+ it('dispatches the correct apollo mutation', async () => {
+ const [expirationPolicyMutationResolver] = mountComponentWithApollo({
+ resolver: jest.fn().mockResolvedValue(expirationPolicyMutationPayload()),
+ });
+
+ findForm().trigger('submit');
+ await expirationPolicyMutationResolver();
+ expect(expirationPolicyMutationResolver).toHaveBeenCalled();
});
it('tracks the submit event', () => {
- dispatchSpy.mockResolvedValue();
- form.trigger('submit');
+ mountComponentWithApollo({
+ resolver: jest.fn().mockResolvedValue(expirationPolicyMutationPayload()),
+ });
+
+ findForm().trigger('submit');
+
expect(Tracking.event).toHaveBeenCalledWith(undefined, 'submit_form', trackingPayload);
});
it('show a success toast when submit succeed', async () => {
- dispatchSpy.mockResolvedValue();
- form.trigger('submit');
- await waitForPromises();
+ const handlers = mountComponentWithApollo({
+ resolver: jest.fn().mockResolvedValue(expirationPolicyMutationPayload()),
+ });
+
+ findForm().trigger('submit');
+ await Promise.all(handlers);
+ await wrapper.vm.$nextTick();
+
expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_SUCCESS_MESSAGE, {
type: 'success',
});
});
describe('when submit fails', () => {
- it('shows an error', async () => {
- dispatchSpy.mockRejectedValue({ response: {} });
- form.trigger('submit');
- await waitForPromises();
- expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_ERROR_MESSAGE, {
- type: 'error',
+ describe('user recoverable errors', () => {
+ it('when there is an error is shown in a toast', async () => {
+ const handlers = mountComponentWithApollo({
+ resolver: jest
+ .fn()
+ .mockResolvedValue(expirationPolicyMutationPayload({ errors: ['foo'] })),
+ });
+
+ findForm().trigger('submit');
+ await Promise.all(handlers);
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.vm.$toast.show).toHaveBeenCalledWith('foo', {
+ type: 'error',
+ });
});
});
+ describe('global errors', () => {
+ it('shows an error', async () => {
+ const handlers = mountComponentWithApollo({
+ resolver: jest.fn().mockRejectedValue(expirationPolicyMutationPayload()),
+ });
+
+ findForm().trigger('submit');
+ await Promise.all(handlers);
+ await wrapper.vm.$nextTick();
+ await wrapper.vm.$nextTick();
- it('parses the error messages', async () => {
- dispatchSpy.mockRejectedValue({
- response: {
- data: {
- message: {
- foo: 'bar',
- 'container_expiration_policy.name': ['baz'],
+ expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_ERROR_MESSAGE, {
+ type: 'error',
+ });
+ });
+
+ it('parses the error messages', async () => {
+ const mutate = jest.fn().mockRejectedValue({
+ graphQLErrors: [
+ {
+ extensions: {
+ problems: [{ path: ['name'], message: 'baz' }],
+ },
},
- },
- },
+ ],
+ });
+ mountComponent({ mocks: { $apollo: { mutate } } });
+
+ findForm().trigger('submit');
+ await waitForPromises();
+ await wrapper.vm.$nextTick();
+
+ expect(findFields().props('apiErrors')).toEqual({ name: 'baz' });
});
- form.trigger('submit');
- await waitForPromises();
- expect(findFields().props('apiErrors')).toEqual({ name: 'baz' });
});
});
});
@@ -163,51 +241,78 @@ describe('Settings Form', () => {
describe('form actions', () => {
describe('cancel button', () => {
- beforeEach(() => {
- store.commit('SET_SETTINGS', { foo: 'bar' });
+ it('has type reset', () => {
mountComponent();
- });
- it('has type reset', () => {
expect(findCancelButton().attributes('type')).toBe('reset');
});
- it('is disabled when isEdited is false', () =>
- wrapper.vm.$nextTick().then(() => {
- expect(findCancelButton().attributes('disabled')).toBe('true');
- }));
-
- it('is disabled isLoading is true', () => {
- store.commit('TOGGLE_LOADING');
- store.commit('UPDATE_SETTINGS', { settings: { foo: 'baz' } });
- return wrapper.vm.$nextTick().then(() => {
- expect(findCancelButton().attributes('disabled')).toBe('true');
- store.commit('TOGGLE_LOADING');
- });
- });
+ it.each`
+ isLoading | isEdited | mutationLoading | isDisabled
+ ${true} | ${true} | ${true} | ${true}
+ ${false} | ${true} | ${true} | ${true}
+ ${false} | ${false} | ${true} | ${true}
+ ${true} | ${false} | ${false} | ${true}
+ ${false} | ${false} | ${false} | ${true}
+ ${false} | ${true} | ${false} | ${false}
+ `(
+ 'when isLoading is $isLoading and isEdited is $isEdited and mutationLoading is $mutationLoading is $isDisabled that the is disabled',
+ ({ isEdited, isLoading, mutationLoading, isDisabled }) => {
+ mountComponent({
+ props: { ...defaultProps, isEdited, isLoading },
+ data: { mutationLoading },
+ });
- it('is enabled when isLoading is false and isEdited is true', () => {
- store.commit('UPDATE_SETTINGS', { settings: { foo: 'baz' } });
- return wrapper.vm.$nextTick().then(() => {
- expect(findCancelButton().attributes('disabled')).toBe(undefined);
- });
- });
+ const expectation = isDisabled ? 'true' : undefined;
+ expect(findCancelButton().attributes('disabled')).toBe(expectation);
+ },
+ );
});
- describe('when isLoading is true', () => {
- beforeEach(() => {
- store.commit('TOGGLE_LOADING');
+ describe('submit button', () => {
+ it('has type submit', () => {
mountComponent();
- });
- afterEach(() => {
- store.commit('TOGGLE_LOADING');
- });
- it('submit button is disabled and shows a spinner', () => {
- const button = findSaveButton();
- expect(button.attributes('disabled')).toBeTruthy();
- expect(findLoadingIcon(button).exists()).toBe(true);
+ expect(findSaveButton().attributes('type')).toBe('submit');
});
+ it.each`
+ isLoading | fieldsAreValid | mutationLoading | isDisabled
+ ${true} | ${true} | ${true} | ${true}
+ ${false} | ${true} | ${true} | ${true}
+ ${false} | ${false} | ${true} | ${true}
+ ${true} | ${false} | ${false} | ${true}
+ ${false} | ${false} | ${false} | ${true}
+ ${false} | ${true} | ${false} | ${false}
+ `(
+ 'when isLoading is $isLoading and fieldsAreValid is $fieldsAreValid and mutationLoading is $mutationLoading is $isDisabled that the is disabled',
+ ({ fieldsAreValid, isLoading, mutationLoading, isDisabled }) => {
+ mountComponent({
+ props: { ...defaultProps, isLoading },
+ data: { mutationLoading, fieldsAreValid },
+ });
+
+ const expectation = isDisabled ? 'true' : undefined;
+ expect(findSaveButton().attributes('disabled')).toBe(expectation);
+ },
+ );
+
+ it.each`
+ isLoading | mutationLoading | showLoading
+ ${true} | ${true} | ${true}
+ ${true} | ${false} | ${true}
+ ${false} | ${true} | ${true}
+ ${false} | ${false} | ${false}
+ `(
+ 'when isLoading is $isLoading and mutationLoading is $mutationLoading is $showLoading that the loading icon is shown',
+ ({ isLoading, mutationLoading, showLoading }) => {
+ mountComponent({
+ props: { ...defaultProps, isLoading },
+ data: { mutationLoading },
+ });
+
+ expect(findSaveButton().props('loading')).toBe(showLoading);
+ },
+ );
});
});
});
diff --git a/spec/frontend/registry/settings/graphql/cache_updated_spec.js b/spec/frontend/registry/settings/graphql/cache_updated_spec.js
new file mode 100644
index 00000000000..e5f69a08285
--- /dev/null
+++ b/spec/frontend/registry/settings/graphql/cache_updated_spec.js
@@ -0,0 +1,56 @@
+import { updateContainerExpirationPolicy } from '~/registry/settings/graphql/utils/cache_update';
+import expirationPolicyQuery from '~/registry/settings/graphql/queries/get_expiration_policy.graphql';
+
+describe('Registry settings cache update', () => {
+ let client;
+
+ const payload = {
+ data: {
+ updateContainerExpirationPolicy: {
+ containerExpirationPolicy: {
+ enabled: true,
+ },
+ },
+ },
+ };
+
+ const cacheMock = {
+ project: {
+ containerExpirationPolicy: {
+ enabled: false,
+ },
+ },
+ };
+
+ const queryAndVariables = {
+ query: expirationPolicyQuery,
+ variables: { projectPath: 'foo' },
+ };
+
+ beforeEach(() => {
+ client = {
+ readQuery: jest.fn().mockReturnValue(cacheMock),
+ writeQuery: jest.fn(),
+ };
+ });
+ describe('Registry settings cache update', () => {
+ it('calls readQuery', () => {
+ updateContainerExpirationPolicy('foo')(client, payload);
+ expect(client.readQuery).toHaveBeenCalledWith(queryAndVariables);
+ });
+
+ it('writes the correct result in the cache', () => {
+ updateContainerExpirationPolicy('foo')(client, payload);
+ expect(client.writeQuery).toHaveBeenCalledWith({
+ ...queryAndVariables,
+ data: {
+ project: {
+ containerExpirationPolicy: {
+ enabled: true,
+ },
+ },
+ },
+ });
+ });
+ });
+});
diff --git a/spec/frontend/registry/settings/mock_data.js b/spec/frontend/registry/settings/mock_data.js
new file mode 100644
index 00000000000..6a936785b7c
--- /dev/null
+++ b/spec/frontend/registry/settings/mock_data.js
@@ -0,0 +1,32 @@
+export const expirationPolicyPayload = override => ({
+ data: {
+ project: {
+ containerExpirationPolicy: {
+ cadence: 'EVERY_DAY',
+ enabled: true,
+ keepN: 'TEN_TAGS',
+ nameRegex: 'asdasdssssdfdf',
+ nameRegexKeep: 'sss',
+ olderThan: 'FOURTEEN_DAYS',
+ ...override,
+ },
+ },
+ },
+});
+
+export const expirationPolicyMutationPayload = ({ override, errors = [] } = {}) => ({
+ data: {
+ updateContainerExpirationPolicy: {
+ containerExpirationPolicy: {
+ cadence: 'EVERY_DAY',
+ enabled: true,
+ keepN: 'TEN_TAGS',
+ nameRegex: 'asdasdssssdfdf',
+ nameRegexKeep: 'sss',
+ olderThan: 'FOURTEEN_DAYS',
+ ...override,
+ },
+ errors,
+ },
+ },
+});
diff --git a/spec/frontend/registry/settings/store/actions_spec.js b/spec/frontend/registry/settings/store/actions_spec.js
deleted file mode 100644
index 51b89f96ef2..00000000000
--- a/spec/frontend/registry/settings/store/actions_spec.js
+++ /dev/null
@@ -1,90 +0,0 @@
-import testAction from 'helpers/vuex_action_helper';
-import Api from '~/api';
-import * as actions from '~/registry/settings/store/actions';
-import * as types from '~/registry/settings/store/mutation_types';
-
-describe('Actions Registry Store', () => {
- describe.each`
- actionName | mutationName | payload
- ${'setInitialState'} | ${types.SET_INITIAL_STATE} | ${'foo'}
- ${'updateSettings'} | ${types.UPDATE_SETTINGS} | ${'foo'}
- ${'toggleLoading'} | ${types.TOGGLE_LOADING} | ${undefined}
- ${'resetSettings'} | ${types.RESET_SETTINGS} | ${undefined}
- `(
- '$actionName invokes $mutationName with payload $payload',
- ({ actionName, mutationName, payload }) => {
- it('should set state', done => {
- testAction(actions[actionName], payload, {}, [{ type: mutationName, payload }], [], done);
- });
- },
- );
-
- describe('receiveSettingsSuccess', () => {
- it('calls SET_SETTINGS', () => {
- testAction(
- actions.receiveSettingsSuccess,
- 'foo',
- {},
- [{ type: types.SET_SETTINGS, payload: 'foo' }],
- [],
- );
- });
- });
-
- describe('fetchSettings', () => {
- const state = {
- projectId: 'bar',
- };
-
- const payload = {
- data: {
- container_expiration_policy: 'foo',
- },
- };
-
- it('should fetch the data from the API', done => {
- Api.project = jest.fn().mockResolvedValue(payload);
- testAction(
- actions.fetchSettings,
- null,
- state,
- [],
- [
- { type: 'toggleLoading' },
- { type: 'receiveSettingsSuccess', payload: payload.data.container_expiration_policy },
- { type: 'toggleLoading' },
- ],
- done,
- );
- });
- });
-
- describe('saveSettings', () => {
- const state = {
- projectId: 'bar',
- settings: 'baz',
- };
-
- const payload = {
- data: {
- tag_expiration_policies: 'foo',
- },
- };
-
- it('should fetch the data from the API', done => {
- Api.updateProject = jest.fn().mockResolvedValue(payload);
- testAction(
- actions.saveSettings,
- null,
- state,
- [],
- [
- { type: 'toggleLoading' },
- { type: 'receiveSettingsSuccess', payload: payload.data.container_expiration_policy },
- { type: 'toggleLoading' },
- ],
- done,
- );
- });
- });
-});
diff --git a/spec/frontend/registry/settings/store/getters_spec.js b/spec/frontend/registry/settings/store/getters_spec.js
deleted file mode 100644
index b781d09466c..00000000000
--- a/spec/frontend/registry/settings/store/getters_spec.js
+++ /dev/null
@@ -1,72 +0,0 @@
-import * as getters from '~/registry/settings/store/getters';
-import * as utils from '~/registry/shared/utils';
-import { formOptions } from '../../shared/mock_data';
-
-describe('Getters registry settings store', () => {
- const settings = {
- enabled: true,
- cadence: 'foo',
- keep_n: 'bar',
- older_than: 'baz',
- name_regex: 'name-foo',
- name_regex_keep: 'name-keep-bar',
- };
-
- describe.each`
- getter | variable | formOption
- ${'getCadence'} | ${'cadence'} | ${'cadence'}
- ${'getKeepN'} | ${'keep_n'} | ${'keepN'}
- ${'getOlderThan'} | ${'older_than'} | ${'olderThan'}
- `('Options getter', ({ getter, variable, formOption }) => {
- beforeEach(() => {
- utils.findDefaultOption = jest.fn();
- });
-
- it(`${getter} returns ${variable} when ${variable} exists in settings`, () => {
- expect(getters[getter]({ settings })).toBe(settings[variable]);
- });
-
- it(`${getter} calls findDefaultOption when ${variable} does not exists in settings`, () => {
- getters[getter]({ settings: {}, formOptions });
- expect(utils.findDefaultOption).toHaveBeenCalledWith(formOptions[formOption]);
- });
- });
-
- describe('getSettings', () => {
- it('returns the content of settings', () => {
- const computedGetters = {
- getCadence: settings.cadence,
- getOlderThan: settings.older_than,
- getKeepN: settings.keep_n,
- };
- expect(getters.getSettings({ settings }, computedGetters)).toEqual(settings);
- });
- });
-
- describe('getIsEdited', () => {
- it('returns false when original is equal to settings', () => {
- const same = { foo: 'bar' };
- expect(getters.getIsEdited({ original: same, settings: same })).toBe(false);
- });
-
- it('returns true when original is different from settings', () => {
- expect(getters.getIsEdited({ original: { foo: 'bar' }, settings: { foo: 'baz' } })).toBe(
- true,
- );
- });
- });
-
- describe('getIsDisabled', () => {
- it.each`
- original | enableHistoricEntries | result
- ${undefined} | ${false} | ${true}
- ${{ foo: 'bar' }} | ${undefined} | ${false}
- ${{}} | ${false} | ${false}
- `(
- 'returns $result when original is $original and enableHistoricEntries is $enableHistoricEntries',
- ({ original, enableHistoricEntries, result }) => {
- expect(getters.getIsDisabled({ original, enableHistoricEntries })).toBe(result);
- },
- );
- });
-});
diff --git a/spec/frontend/registry/settings/store/mutations_spec.js b/spec/frontend/registry/settings/store/mutations_spec.js
deleted file mode 100644
index 1d85e38eb36..00000000000
--- a/spec/frontend/registry/settings/store/mutations_spec.js
+++ /dev/null
@@ -1,80 +0,0 @@
-import mutations from '~/registry/settings/store/mutations';
-import * as types from '~/registry/settings/store/mutation_types';
-import createState from '~/registry/settings/store/state';
-import { formOptions, stringifiedFormOptions } from '../../shared/mock_data';
-
-describe('Mutations Registry Store', () => {
- let mockState;
-
- beforeEach(() => {
- mockState = createState();
- });
-
- describe('SET_INITIAL_STATE', () => {
- it('should set the initial state', () => {
- const payload = {
- projectId: 'foo',
- enableHistoricEntries: false,
- adminSettingsPath: 'foo',
- isAdmin: true,
- };
- const expectedState = { ...mockState, ...payload, formOptions };
- mutations[types.SET_INITIAL_STATE](mockState, {
- ...payload,
- ...stringifiedFormOptions,
- });
-
- expect(mockState).toEqual(expectedState);
- });
- });
-
- describe('UPDATE_SETTINGS', () => {
- it('should update the settings', () => {
- mockState.settings = { foo: 'bar' };
- const payload = { foo: 'baz' };
- const expectedState = { ...mockState, settings: payload };
- mutations[types.UPDATE_SETTINGS](mockState, { settings: payload });
- expect(mockState.settings).toEqual(expectedState.settings);
- });
- });
-
- describe('SET_SETTINGS', () => {
- it('should set the settings and original', () => {
- const payload = { foo: 'baz' };
- const expectedState = { ...mockState, settings: payload };
- mutations[types.SET_SETTINGS](mockState, payload);
- expect(mockState.settings).toEqual(expectedState.settings);
- expect(mockState.original).toEqual(expectedState.settings);
- });
-
- it('should keep the default state when settings is not present', () => {
- const originalSettings = { ...mockState.settings };
- mutations[types.SET_SETTINGS](mockState);
- expect(mockState.settings).toEqual(originalSettings);
- expect(mockState.original).toEqual(undefined);
- });
- });
-
- describe('RESET_SETTINGS', () => {
- it('should copy original over settings', () => {
- mockState.settings = { foo: 'bar' };
- mockState.original = { foo: 'baz' };
- mutations[types.RESET_SETTINGS](mockState);
- expect(mockState.settings).toEqual(mockState.original);
- });
-
- it('if original is undefined it should initialize to empty object', () => {
- mockState.settings = { foo: 'bar' };
- mockState.original = undefined;
- mutations[types.RESET_SETTINGS](mockState);
- expect(mockState.settings).toEqual({});
- });
- });
-
- describe('TOGGLE_LOADING', () => {
- it('should toggle the loading', () => {
- mutations[types.TOGGLE_LOADING](mockState);
- expect(mockState.isLoading).toEqual(true);
- });
- });
-});
diff --git a/spec/frontend/registry/shared/__snapshots__/utils_spec.js.snap b/spec/frontend/registry/shared/__snapshots__/utils_spec.js.snap
new file mode 100644
index 00000000000..032007bba51
--- /dev/null
+++ b/spec/frontend/registry/shared/__snapshots__/utils_spec.js.snap
@@ -0,0 +1,101 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Utils formOptionsGenerator returns an object containing cadence 1`] = `
+Array [
+ Object {
+ "default": true,
+ "key": "EVERY_DAY",
+ "label": "Every day",
+ },
+ Object {
+ "default": false,
+ "key": "EVERY_WEEK",
+ "label": "Every week",
+ },
+ Object {
+ "default": false,
+ "key": "EVERY_TWO_WEEKS",
+ "label": "Every two weeks",
+ },
+ Object {
+ "default": false,
+ "key": "EVERY_MONTH",
+ "label": "Every month",
+ },
+ Object {
+ "default": false,
+ "key": "EVERY_THREE_MONTHS",
+ "label": "Every three months",
+ },
+]
+`;
+
+exports[`Utils formOptionsGenerator returns an object containing keepN 1`] = `
+Array [
+ Object {
+ "default": false,
+ "key": "ONE_TAG",
+ "label": "1 tag per image name",
+ "variable": 1,
+ },
+ Object {
+ "default": false,
+ "key": "FIVE_TAGS",
+ "label": "5 tags per image name",
+ "variable": 5,
+ },
+ Object {
+ "default": true,
+ "key": "TEN_TAGS",
+ "label": "10 tags per image name",
+ "variable": 10,
+ },
+ Object {
+ "default": false,
+ "key": "TWENTY_FIVE_TAGS",
+ "label": "25 tags per image name",
+ "variable": 25,
+ },
+ Object {
+ "default": false,
+ "key": "FIFTY_TAGS",
+ "label": "50 tags per image name",
+ "variable": 50,
+ },
+ Object {
+ "default": false,
+ "key": "ONE_HUNDRED_TAGS",
+ "label": "100 tags per image name",
+ "variable": 100,
+ },
+]
+`;
+
+exports[`Utils formOptionsGenerator returns an object containing olderThan 1`] = `
+Array [
+ Object {
+ "default": false,
+ "key": "SEVEN_DAYS",
+ "label": "7 days until tags are automatically removed",
+ "variable": 7,
+ },
+ Object {
+ "default": false,
+ "key": "FOURTEEN_DAYS",
+ "label": "14 days until tags are automatically removed",
+ "variable": 14,
+ },
+ Object {
+ "default": false,
+ "key": "THIRTY_DAYS",
+ "label": "30 days until tags are automatically removed",
+ "variable": 30,
+ },
+ Object {
+ "default": true,
+ "key": "NINETY_DAYS",
+ "label": "90 days until tags are automatically removed",
+ "variable": 90,
+ },
+]
+`;
diff --git a/spec/frontend/registry/shared/components/expiration_policy_fields_spec.js b/spec/frontend/registry/shared/components/expiration_policy_fields_spec.js
index ee765ffd1c0..bee9bca5369 100644
--- a/spec/frontend/registry/shared/components/expiration_policy_fields_spec.js
+++ b/spec/frontend/registry/shared/components/expiration_policy_fields_spec.js
@@ -40,13 +40,13 @@ describe('Expiration Policy Form', () => {
});
describe.each`
- elementName | modelName | value | disabledByToggle
- ${'toggle'} | ${'enabled'} | ${true} | ${'not disabled'}
- ${'interval'} | ${'older_than'} | ${'foo'} | ${'disabled'}
- ${'schedule'} | ${'cadence'} | ${'foo'} | ${'disabled'}
- ${'latest'} | ${'keep_n'} | ${'foo'} | ${'disabled'}
- ${'name-matching'} | ${'name_regex'} | ${'foo'} | ${'disabled'}
- ${'keep-name'} | ${'name_regex_keep'} | ${'bar'} | ${'disabled'}
+ elementName | modelName | value | disabledByToggle
+ ${'toggle'} | ${'enabled'} | ${true} | ${'not disabled'}
+ ${'interval'} | ${'olderThan'} | ${'foo'} | ${'disabled'}
+ ${'schedule'} | ${'cadence'} | ${'foo'} | ${'disabled'}
+ ${'latest'} | ${'keepN'} | ${'foo'} | ${'disabled'}
+ ${'name-matching'} | ${'nameRegex'} | ${'foo'} | ${'disabled'}
+ ${'keep-name'} | ${'nameRegexKeep'} | ${'bar'} | ${'disabled'}
`(
`${FORM_ELEMENTS_ID_PREFIX}-$elementName form element`,
({ elementName, modelName, value, disabledByToggle }) => {
@@ -128,9 +128,9 @@ describe('Expiration Policy Form', () => {
});
describe.each`
- modelName | elementName
- ${'name_regex'} | ${'name-matching'}
- ${'name_regex_keep'} | ${'keep-name'}
+ modelName | elementName
+ ${'nameRegex'} | ${'name-matching'}
+ ${'nameRegexKeep'} | ${'keep-name'}
`('regex textarea validation', ({ modelName, elementName }) => {
const invalidString = new Array(NAME_REGEX_LENGTH + 2).join(',');
diff --git a/spec/frontend/registry/shared/stubs.js b/spec/frontend/registry/shared/stubs.js
new file mode 100644
index 00000000000..f6b88d70e49
--- /dev/null
+++ b/spec/frontend/registry/shared/stubs.js
@@ -0,0 +1,11 @@
+export const GlLoadingIcon = { name: 'gl-loading-icon-stub', template: '<svg></svg>' };
+export const GlCard = {
+ name: 'gl-card-stub',
+ template: `
+<div>
+ <slot name="header"></slot>
+ <slot></slot>
+ <slot name="footer"></slot>
+</div>
+`,
+};
diff --git a/spec/frontend/registry/shared/utils_spec.js b/spec/frontend/registry/shared/utils_spec.js
new file mode 100644
index 00000000000..edb0c3261be
--- /dev/null
+++ b/spec/frontend/registry/shared/utils_spec.js
@@ -0,0 +1,37 @@
+import {
+ formOptionsGenerator,
+ optionLabelGenerator,
+ olderThanTranslationGenerator,
+} from '~/registry/shared/utils';
+
+describe('Utils', () => {
+ describe('optionLabelGenerator', () => {
+ it('returns an array with a set label', () => {
+ const result = optionLabelGenerator(
+ [{ variable: 1 }, { variable: 2 }],
+ olderThanTranslationGenerator,
+ );
+ expect(result).toEqual([
+ { variable: 1, label: '1 day until tags are automatically removed' },
+ { variable: 2, label: '2 days until tags are automatically removed' },
+ ]);
+ });
+ });
+
+ describe('formOptionsGenerator', () => {
+ it('returns an object containing olderThan', () => {
+ expect(formOptionsGenerator().olderThan).toBeDefined();
+ expect(formOptionsGenerator().olderThan).toMatchSnapshot();
+ });
+
+ it('returns an object containing cadence', () => {
+ expect(formOptionsGenerator().cadence).toBeDefined();
+ expect(formOptionsGenerator().cadence).toMatchSnapshot();
+ });
+
+ it('returns an object containing keepN', () => {
+ expect(formOptionsGenerator().keepN).toBeDefined();
+ expect(formOptionsGenerator().keepN).toMatchSnapshot();
+ });
+ });
+});
diff --git a/spec/frontend/related_merge_requests/components/related_merge_requests_spec.js b/spec/frontend/related_merge_requests/components/related_merge_requests_spec.js
index 1b938c93df8..db33a9cdce1 100644
--- a/spec/frontend/related_merge_requests/components/related_merge_requests_spec.js
+++ b/spec/frontend/related_merge_requests/components/related_merge_requests_spec.js
@@ -19,9 +19,8 @@ describe('RelatedMergeRequests', () => {
mockData = getJSONFixture(FIXTURE_PATH);
// put the fixture in DOM as the component expects
- document.body.innerHTML = `<div id="js-issuable-app-initial-data">${JSON.stringify(
- mockData,
- )}</div>`;
+ document.body.innerHTML = `<div id="js-issuable-app"></div>`;
+ document.getElementById('js-issuable-app').dataset.initial = JSON.stringify(mockData);
mock = new MockAdapter(axios);
mock.onGet(`${API_ENDPOINT}?per_page=100`).reply(200, mockData, { 'x-total': 2 });
diff --git a/spec/frontend/releases/__snapshots__/util_spec.js.snap b/spec/frontend/releases/__snapshots__/util_spec.js.snap
index f56e296d106..84247e2a5a0 100644
--- a/spec/frontend/releases/__snapshots__/util_spec.js.snap
+++ b/spec/frontend/releases/__snapshots__/util_spec.js.snap
@@ -5,109 +5,123 @@ Object {
"data": Array [
Object {
"_links": Object {
- "editUrl": "http://0.0.0.0:3000/root/release-test/-/releases/v5.10/edit",
- "issuesUrl": null,
- "mergeRequestsUrl": null,
- "self": "http://0.0.0.0:3000/root/release-test/-/releases/v5.10",
- "selfUrl": "http://0.0.0.0:3000/root/release-test/-/releases/v5.10",
+ "editUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/edit",
+ "issuesUrl": "http://localhost/releases-namespace/releases-project/-/issues?release_tag=v1.1&scope=all&state=opened",
+ "mergeRequestsUrl": "http://localhost/releases-namespace/releases-project/-/merge_requests?release_tag=v1.1&scope=all&state=opened",
+ "self": "http://localhost/releases-namespace/releases-project/-/releases/v1.1",
+ "selfUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1",
},
"assets": Object {
- "count": 7,
+ "count": 8,
"links": Array [
Object {
- "directAssetUrl": "http://0.0.0.0:3000/root/release-test/-/releases/v5.32/permanent/path/to/runbook",
+ "directAssetUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/binaries/awesome-app-3",
"external": true,
- "id": "gid://gitlab/Releases::Link/69",
- "linkType": "other",
- "name": "An example link",
- "url": "https://example.com/link",
+ "id": "gid://gitlab/Releases::Link/13",
+ "linkType": "image",
+ "name": "Image",
+ "url": "https://example.com/image",
},
Object {
- "directAssetUrl": "https://example.com/package",
+ "directAssetUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/binaries/awesome-app-2",
"external": true,
- "id": "gid://gitlab/Releases::Link/68",
+ "id": "gid://gitlab/Releases::Link/12",
"linkType": "package",
- "name": "An example package link",
+ "name": "Package",
"url": "https://example.com/package",
},
Object {
- "directAssetUrl": "https://example.com/image",
+ "directAssetUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/binaries/awesome-app-1",
+ "external": false,
+ "id": "gid://gitlab/Releases::Link/11",
+ "linkType": "runbook",
+ "name": "Runbook",
+ "url": "http://localhost/releases-namespace/releases-project/runbook",
+ },
+ Object {
+ "directAssetUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/binaries/linux-amd64",
"external": true,
- "id": "gid://gitlab/Releases::Link/67",
- "linkType": "image",
- "name": "An example image",
- "url": "https://example.com/image",
+ "id": "gid://gitlab/Releases::Link/10",
+ "linkType": "other",
+ "name": "linux-amd64 binaries",
+ "url": "https://downloads.example.com/bin/gitlab-linux-amd64",
},
],
"sources": Array [
Object {
"format": "zip",
- "url": "http://0.0.0.0:3000/root/release-test/-/archive/v5.10/release-test-v5.10.zip",
+ "url": "http://localhost/releases-namespace/releases-project/-/archive/v1.1/releases-project-v1.1.zip",
},
Object {
"format": "tar.gz",
- "url": "http://0.0.0.0:3000/root/release-test/-/archive/v5.10/release-test-v5.10.tar.gz",
+ "url": "http://localhost/releases-namespace/releases-project/-/archive/v1.1/releases-project-v1.1.tar.gz",
},
Object {
"format": "tar.bz2",
- "url": "http://0.0.0.0:3000/root/release-test/-/archive/v5.10/release-test-v5.10.tar.bz2",
+ "url": "http://localhost/releases-namespace/releases-project/-/archive/v1.1/releases-project-v1.1.tar.bz2",
},
Object {
"format": "tar",
- "url": "http://0.0.0.0:3000/root/release-test/-/archive/v5.10/release-test-v5.10.tar",
+ "url": "http://localhost/releases-namespace/releases-project/-/archive/v1.1/releases-project-v1.1.tar",
},
],
},
"author": Object {
- "avatarUrl": "/uploads/-/system/user/avatar/1/avatar.png",
- "username": "root",
- "webUrl": "http://0.0.0.0:3000/root",
+ "avatarUrl": "https://www.gravatar.com/avatar/16f8e2050ce10180ca571c2eb19cfce2?s=80&d=identicon",
+ "username": "administrator",
+ "webUrl": "http://localhost/administrator",
},
"commit": Object {
- "shortId": "92e7ea2e",
- "title": "Testing a change.",
+ "shortId": "b83d6e39",
+ "title": "Merge branch 'branch-merged' into 'master'",
},
- "commitPath": "http://0.0.0.0:3000/root/release-test/-/commit/92e7ea2ee4496fe0d00ff69830ba0564d3d1e5a7",
- "descriptionHtml": "<p data-sourcepos=\\"1:1-1:24\\" dir=\\"auto\\">This is version <strong>1.0</strong>!</p>",
+ "commitPath": "http://localhost/releases-namespace/releases-project/-/commit/b83d6e391c22777fca1ed3012fce84f633d7fed0",
+ "descriptionHtml": "<p data-sourcepos=\\"1:1-1:33\\" dir=\\"auto\\">Best. Release. <strong>Ever.</strong> <gl-emoji title=\\"rocket\\" data-name=\\"rocket\\" data-unicode-version=\\"6.0\\">🚀</gl-emoji></p>",
"evidences": Array [
Object {
- "collectedAt": "2020-08-21T20:15:19Z",
- "filepath": "http://0.0.0.0:3000/root/release-test/-/releases/v5.10/evidences/34.json",
- "sha": "22bde8e8b93d870a29ddc339287a1fbb598f45d1396d",
+ "collectedAt": "2018-12-03T00:00:00Z",
+ "filepath": "http://localhost/releases-namespace/releases-project/-/releases/v1.1/evidences/1.json",
+ "sha": "760d6cdfb0879c3ffedec13af470e0f71cf52c6cde4d",
},
],
"milestones": Array [
Object {
- "description": "",
- "id": "gid://gitlab/Milestone/60",
+ "description": "The 12.4 milestone",
+ "id": "gid://gitlab/Milestone/124",
"issueStats": Object {
- "closed": 0,
- "total": 0,
+ "closed": 1,
+ "total": 4,
},
"stats": undefined,
"title": "12.4",
"webPath": undefined,
- "webUrl": "/root/release-test/-/milestones/2",
+ "webUrl": "/releases-namespace/releases-project/-/milestones/2",
},
Object {
- "description": "Milestone 12.3",
- "id": "gid://gitlab/Milestone/59",
+ "description": "The 12.3 milestone",
+ "id": "gid://gitlab/Milestone/123",
"issueStats": Object {
- "closed": 1,
- "total": 2,
+ "closed": 3,
+ "total": 5,
},
"stats": undefined,
"title": "12.3",
"webPath": undefined,
- "webUrl": "/root/release-test/-/milestones/1",
+ "webUrl": "/releases-namespace/releases-project/-/milestones/1",
},
],
- "name": "Release 1.0",
- "releasedAt": "2020-08-21T20:15:18Z",
- "tagName": "v5.10",
- "tagPath": "/root/release-test/-/tags/v5.10",
- "upcomingRelease": false,
+ "name": "The first release",
+ "releasedAt": "2018-12-10T00:00:00Z",
+ "tagName": "v1.1",
+ "tagPath": "/releases-namespace/releases-project/-/tags/v1.1",
+ "upcomingRelease": true,
},
],
+ "paginationInfo": Object {
+ "endCursor": "eyJpZCI6IjEiLCJyZWxlYXNlZF9hdCI6IjIwMTgtMTItMTAgMDA6MDA6MDAuMDAwMDAwMDAwIFVUQyJ9",
+ "hasNextPage": false,
+ "hasPreviousPage": false,
+ "startCursor": "eyJpZCI6IjEiLCJyZWxlYXNlZF9hdCI6IjIwMTgtMTItMTAgMDA6MDA6MDAuMDAwMDAwMDAwIFVUQyJ9",
+ },
}
`;
diff --git a/spec/frontend/releases/components/app_edit_new_spec.js b/spec/frontend/releases/components/app_edit_new_spec.js
index e9727801c1a..3367ca8ba3a 100644
--- a/spec/frontend/releases/components/app_edit_new_spec.js
+++ b/spec/frontend/releases/components/app_edit_new_spec.js
@@ -3,12 +3,15 @@ import { mount } from '@vue/test-utils';
import { merge } from 'lodash';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
+import { getJSONFixture } from 'helpers/fixtures';
import ReleaseEditNewApp from '~/releases/components/app_edit_new.vue';
-import { release as originalRelease, milestones as originalMilestones } from '../mock_data';
import * as commonUtils from '~/lib/utils/common_utils';
import { BACK_URL_PARAM } from '~/releases/constants';
import AssetLinksForm from '~/releases/components/asset_links_form.vue';
+const originalRelease = getJSONFixture('api/releases/release.json');
+const originalMilestones = originalRelease.milestones;
+
describe('Release edit/new component', () => {
let wrapper;
let release;
diff --git a/spec/frontend/releases/components/app_index_spec.js b/spec/frontend/releases/components/app_index_spec.js
index bcb87509cc3..9f1577c2f1e 100644
--- a/spec/frontend/releases/components/app_index_spec.js
+++ b/spec/frontend/releases/components/app_index_spec.js
@@ -2,27 +2,33 @@ import { range as rge } from 'lodash';
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import waitForPromises from 'helpers/wait_for_promises';
+import { getJSONFixture } from 'helpers/fixtures';
import ReleasesApp from '~/releases/components/app_index.vue';
import createStore from '~/releases/stores';
import createListModule from '~/releases/stores/modules/list';
import api from '~/api';
-import {
- pageInfoHeadersWithoutPagination,
- pageInfoHeadersWithPagination,
- release2 as release,
- releases,
-} from '../mock_data';
+import { pageInfoHeadersWithoutPagination, pageInfoHeadersWithPagination } from '../mock_data';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
-import TablePagination from '~/vue_shared/components/pagination/table_pagination.vue';
+import ReleasesPagination from '~/releases/components/releases_pagination.vue';
+
+jest.mock('~/lib/utils/common_utils', () => ({
+ ...jest.requireActual('~/lib/utils/common_utils'),
+ getParameterByName: jest.fn().mockImplementation(paramName => {
+ return `${paramName}_param_value`;
+ }),
+}));
const localVue = createLocalVue();
localVue.use(Vuex);
+const release = getJSONFixture('api/releases/release.json');
+const releases = [release];
+
describe('Releases App ', () => {
let wrapper;
let fetchReleaseSpy;
- const releasesPagination = rge(21).map(index => ({
+ const paginatedReleases = rge(21).map(index => ({
...convertObjectPropsToCamelCase(release, { deep: true }),
tagName: `${index}.00`,
}));
@@ -70,9 +76,13 @@ describe('Releases App ', () => {
createComponent();
});
- it('calls fetchRelease with the page parameter', () => {
+ it('calls fetchRelease with the page, before, and after parameters', () => {
expect(fetchReleaseSpy).toHaveBeenCalledTimes(1);
- expect(fetchReleaseSpy).toHaveBeenCalledWith(expect.anything(), { page: null });
+ expect(fetchReleaseSpy).toHaveBeenCalledWith(expect.anything(), {
+ page: 'page_param_value',
+ before: 'before_param_value',
+ after: 'after_param_value',
+ });
});
});
@@ -91,7 +101,7 @@ describe('Releases App ', () => {
expect(wrapper.contains('.js-loading')).toBe(true);
expect(wrapper.contains('.js-empty-state')).toBe(false);
expect(wrapper.contains('.js-success-state')).toBe(false);
- expect(wrapper.contains(TablePagination)).toBe(false);
+ expect(wrapper.contains(ReleasesPagination)).toBe(false);
});
});
@@ -108,7 +118,7 @@ describe('Releases App ', () => {
expect(wrapper.contains('.js-loading')).toBe(false);
expect(wrapper.contains('.js-empty-state')).toBe(false);
expect(wrapper.contains('.js-success-state')).toBe(true);
- expect(wrapper.contains(TablePagination)).toBe(true);
+ expect(wrapper.contains(ReleasesPagination)).toBe(true);
});
});
@@ -116,7 +126,7 @@ describe('Releases App ', () => {
beforeEach(() => {
jest
.spyOn(api, 'releases')
- .mockResolvedValue({ data: releasesPagination, headers: pageInfoHeadersWithPagination });
+ .mockResolvedValue({ data: paginatedReleases, headers: pageInfoHeadersWithPagination });
createComponent();
});
@@ -125,7 +135,7 @@ describe('Releases App ', () => {
expect(wrapper.contains('.js-loading')).toBe(false);
expect(wrapper.contains('.js-empty-state')).toBe(false);
expect(wrapper.contains('.js-success-state')).toBe(true);
- expect(wrapper.contains(TablePagination)).toBe(true);
+ expect(wrapper.contains(ReleasesPagination)).toBe(true);
});
});
@@ -154,7 +164,7 @@ describe('Releases App ', () => {
const newReleasePath = 'path/to/new/release';
beforeEach(() => {
- createComponent({ ...defaultInitialState, newReleasePath });
+ createComponent({ newReleasePath });
});
it('renders the "New release" button', () => {
@@ -174,4 +184,27 @@ describe('Releases App ', () => {
});
});
});
+
+ describe('when the back button is pressed', () => {
+ beforeEach(() => {
+ jest
+ .spyOn(api, 'releases')
+ .mockResolvedValue({ data: releases, headers: pageInfoHeadersWithoutPagination });
+
+ createComponent();
+
+ fetchReleaseSpy.mockClear();
+
+ window.dispatchEvent(new PopStateEvent('popstate'));
+ });
+
+ it('calls fetchRelease with the page parameter', () => {
+ expect(fetchReleaseSpy).toHaveBeenCalledTimes(1);
+ expect(fetchReleaseSpy).toHaveBeenCalledWith(expect.anything(), {
+ page: 'page_param_value',
+ before: 'before_param_value',
+ after: 'after_param_value',
+ });
+ });
+ });
});
diff --git a/spec/frontend/releases/components/app_show_spec.js b/spec/frontend/releases/components/app_show_spec.js
index 502a1053663..181fa0150f1 100644
--- a/spec/frontend/releases/components/app_show_spec.js
+++ b/spec/frontend/releases/components/app_show_spec.js
@@ -1,11 +1,13 @@
import Vuex from 'vuex';
import { shallowMount } from '@vue/test-utils';
-import { GlDeprecatedSkeletonLoading as GlSkeletonLoading } from '@gitlab/ui';
+import { getJSONFixture } from 'helpers/fixtures';
import ReleaseShowApp from '~/releases/components/app_show.vue';
-import { release as originalRelease } from '../mock_data';
+import ReleaseSkeletonLoader from '~/releases/components/release_skeleton_loader.vue';
import ReleaseBlock from '~/releases/components/release_block.vue';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+const originalRelease = getJSONFixture('api/releases/release.json');
+
describe('Release show component', () => {
let wrapper;
let release;
@@ -33,7 +35,7 @@ describe('Release show component', () => {
wrapper = shallowMount(ReleaseShowApp, { store });
};
- const findLoadingSkeleton = () => wrapper.find(GlSkeletonLoading);
+ const findLoadingSkeleton = () => wrapper.find(ReleaseSkeletonLoader);
const findReleaseBlock = () => wrapper.find(ReleaseBlock);
it('calls fetchRelease when the component is created', () => {
diff --git a/spec/frontend/releases/components/asset_links_form_spec.js b/spec/frontend/releases/components/asset_links_form_spec.js
index 582c0b32716..e5b8ed267a0 100644
--- a/spec/frontend/releases/components/asset_links_form_spec.js
+++ b/spec/frontend/releases/components/asset_links_form_spec.js
@@ -1,7 +1,7 @@
import Vuex from 'vuex';
import { mount, createLocalVue } from '@vue/test-utils';
+import { getJSONFixture } from 'helpers/fixtures';
import AssetLinksForm from '~/releases/components/asset_links_form.vue';
-import { release as originalRelease } from '../mock_data';
import * as commonUtils from '~/lib/utils/common_utils';
import { ENTER_KEY } from '~/lib/utils/keys';
import { ASSET_LINK_TYPE, DEFAULT_ASSET_LINK_TYPE } from '~/releases/constants';
@@ -9,6 +9,8 @@ import { ASSET_LINK_TYPE, DEFAULT_ASSET_LINK_TYPE } from '~/releases/constants';
const localVue = createLocalVue();
localVue.use(Vuex);
+const originalRelease = getJSONFixture('api/releases/release.json');
+
describe('Release edit component', () => {
let wrapper;
let release;
@@ -223,10 +225,18 @@ describe('Release edit component', () => {
});
});
- it('selects the default asset type if no type was provided by the backend', () => {
- const selected = wrapper.find({ ref: 'typeSelect' }).element.value;
+ describe('when no link type was provided by the backend', () => {
+ beforeEach(() => {
+ delete release.assets.links[0].linkType;
+
+ factory({ release });
+ });
+
+ it('selects the default asset type', () => {
+ const selected = wrapper.find({ ref: 'typeSelect' }).element.value;
- expect(selected).toBe(DEFAULT_ASSET_LINK_TYPE);
+ expect(selected).toBe(DEFAULT_ASSET_LINK_TYPE);
+ });
});
});
diff --git a/spec/frontend/releases/components/evidence_block_spec.js b/spec/frontend/releases/components/evidence_block_spec.js
index ba60a79e464..b8c78f90fc2 100644
--- a/spec/frontend/releases/components/evidence_block_spec.js
+++ b/spec/frontend/releases/components/evidence_block_spec.js
@@ -1,11 +1,13 @@
import { mount } from '@vue/test-utils';
import { GlLink, GlIcon } from '@gitlab/ui';
+import { getJSONFixture } from 'helpers/fixtures';
import { truncateSha } from '~/lib/utils/text_utility';
-import { release as originalRelease } from '../mock_data';
import EvidenceBlock from '~/releases/components/evidence_block.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+const originalRelease = getJSONFixture('api/releases/release.json');
+
describe('Evidence Block', () => {
let wrapper;
let release;
@@ -35,7 +37,7 @@ describe('Evidence Block', () => {
});
it('renders the title for the dowload link', () => {
- expect(wrapper.find(GlLink).text()).toBe('v1.1.2-evidences-1.json');
+ expect(wrapper.find(GlLink).text()).toBe(`v1.1-evidences-1.json`);
});
it('renders the correct hover text for the download', () => {
@@ -43,7 +45,7 @@ describe('Evidence Block', () => {
});
it('renders the correct file link for download', () => {
- expect(wrapper.find(GlLink).attributes().download).toBe('v1.1.2-evidences-1.json');
+ expect(wrapper.find(GlLink).attributes().download).toBe(`v1.1-evidences-1.json`);
});
describe('sha text', () => {
diff --git a/spec/frontend/releases/components/release_block_assets_spec.js b/spec/frontend/releases/components/release_block_assets_spec.js
index 3453ecbf8ab..adccd9d87ef 100644
--- a/spec/frontend/releases/components/release_block_assets_spec.js
+++ b/spec/frontend/releases/components/release_block_assets_spec.js
@@ -1,10 +1,12 @@
import { mount } from '@vue/test-utils';
import { GlCollapse } from '@gitlab/ui';
import { trimText } from 'helpers/text_helper';
-import { cloneDeep } from 'lodash';
+import { getJSONFixture } from 'helpers/fixtures';
+import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import ReleaseBlockAssets from '~/releases/components/release_block_assets.vue';
import { ASSET_LINK_TYPE } from '~/releases/constants';
-import { assets } from '../mock_data';
+
+const { assets } = getJSONFixture('api/releases/release.json');
describe('Release block assets', () => {
let wrapper;
@@ -31,7 +33,7 @@ describe('Release block assets', () => {
wrapper.findAll('h5').filter(h5 => h5.text() === sections[type]);
beforeEach(() => {
- defaultProps = { assets: cloneDeep(assets) };
+ defaultProps = { assets: convertObjectPropsToCamelCase(assets, { deep: true }) };
});
describe('with default props', () => {
@@ -43,7 +45,7 @@ describe('Release block assets', () => {
const accordionButton = findAccordionButton();
expect(accordionButton.exists()).toBe(true);
- expect(trimText(accordionButton.text())).toBe('Assets 5');
+ expect(trimText(accordionButton.text())).toBe('Assets 8');
});
it('renders the accordion as expanded by default', () => {
diff --git a/spec/frontend/releases/components/release_block_footer_spec.js b/spec/frontend/releases/components/release_block_footer_spec.js
index bde01cc0e00..f1c0c24f8ca 100644
--- a/spec/frontend/releases/components/release_block_footer_spec.js
+++ b/spec/frontend/releases/components/release_block_footer_spec.js
@@ -1,11 +1,13 @@
import { mount } from '@vue/test-utils';
import { GlLink, GlIcon } from '@gitlab/ui';
import { trimText } from 'helpers/text_helper';
+import { getJSONFixture } from 'helpers/fixtures';
import { cloneDeep } from 'lodash';
import ReleaseBlockFooter from '~/releases/components/release_block_footer.vue';
-import { release as originalRelease } from '../mock_data';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+const originalRelease = getJSONFixture('api/releases/release.json');
+
const mockFutureDate = new Date(9999, 0, 0).toISOString();
let mockIsFutureRelease = false;
diff --git a/spec/frontend/releases/components/release_block_header_spec.js b/spec/frontend/releases/components/release_block_header_spec.js
index 9c6cbc86d3c..f2159871395 100644
--- a/spec/frontend/releases/components/release_block_header_spec.js
+++ b/spec/frontend/releases/components/release_block_header_spec.js
@@ -1,11 +1,13 @@
import { shallowMount } from '@vue/test-utils';
import { merge } from 'lodash';
import { GlLink } from '@gitlab/ui';
+import { getJSONFixture } from 'helpers/fixtures';
import ReleaseBlockHeader from '~/releases/components/release_block_header.vue';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
-import { release as originalRelease } from '../mock_data';
import { BACK_URL_PARAM } from '~/releases/constants';
+const originalRelease = getJSONFixture('api/releases/release.json');
+
describe('Release block header', () => {
let wrapper;
let release;
@@ -49,7 +51,7 @@ describe('Release block header', () => {
});
it('renders the title as text', () => {
- expect(findHeader().text()).toBe(release.name);
+ expect(findHeader().text()).toContain(release.name);
expect(findHeaderLink().exists()).toBe(false);
});
});
diff --git a/spec/frontend/releases/components/release_block_metadata_spec.js b/spec/frontend/releases/components/release_block_metadata_spec.js
index 6f184e45600..9038553fc8e 100644
--- a/spec/frontend/releases/components/release_block_metadata_spec.js
+++ b/spec/frontend/releases/components/release_block_metadata_spec.js
@@ -1,10 +1,12 @@
import { mount } from '@vue/test-utils';
import { trimText } from 'helpers/text_helper';
+import { getJSONFixture } from 'helpers/fixtures';
import { cloneDeep } from 'lodash';
import ReleaseBlockMetadata from '~/releases/components/release_block_metadata.vue';
-import { release as originalRelease } from '../mock_data';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+const originalRelease = getJSONFixture('api/releases/release.json');
+
const mockFutureDate = new Date(9999, 0, 0).toISOString();
let mockIsFutureRelease = false;
diff --git a/spec/frontend/releases/components/release_block_milestone_info_spec.js b/spec/frontend/releases/components/release_block_milestone_info_spec.js
index 0e79c45b337..45f4eaa01a9 100644
--- a/spec/frontend/releases/components/release_block_milestone_info_spec.js
+++ b/spec/frontend/releases/components/release_block_milestone_info_spec.js
@@ -1,11 +1,13 @@
import { mount } from '@vue/test-utils';
import { GlProgressBar, GlLink, GlBadge, GlButton } from '@gitlab/ui';
import { trimText } from 'helpers/text_helper';
+import { getJSONFixture } from 'helpers/fixtures';
import ReleaseBlockMilestoneInfo from '~/releases/components/release_block_milestone_info.vue';
-import { milestones as originalMilestones } from '../mock_data';
import { MAX_MILESTONES_TO_DISPLAY } from '~/releases/constants';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+const { milestones: originalMilestones } = getJSONFixture('api/releases/release.json');
+
describe('Release block milestone info', () => {
let wrapper;
let milestones;
@@ -35,7 +37,7 @@ describe('Release block milestone info', () => {
beforeEach(() => factory({ milestones }));
it('renders the correct percentage', () => {
- expect(milestoneProgressBarContainer().text()).toContain('41% complete');
+ expect(milestoneProgressBarContainer().text()).toContain('44% complete');
});
it('renders a progress bar that displays the correct percentage', () => {
@@ -44,14 +46,24 @@ describe('Release block milestone info', () => {
expect(progressBar.exists()).toBe(true);
expect(progressBar.attributes()).toEqual(
expect.objectContaining({
- value: '22',
- max: '54',
+ value: '4',
+ max: '9',
}),
);
});
it('renders a list of links to all associated milestones', () => {
- expect(trimText(milestoneListContainer().text())).toContain('Milestones 13.6 • 13.5');
+ // The API currently returns the milestones in a non-deterministic order,
+ // which causes the frontend fixture used by this test to return the
+ // milestones in one order locally and a different order in the CI pipeline.
+ // This is a bug and is tracked here: https://gitlab.com/gitlab-org/gitlab/-/issues/259012
+ // When this bug is fixed this expectation should be updated to
+ // assert the expected order.
+ const containerText = trimText(milestoneListContainer().text());
+ expect(
+ containerText.includes('Milestones 12.4 • 12.3') ||
+ containerText.includes('Milestones 12.3 • 12.4'),
+ ).toBe(true);
milestones.forEach((m, i) => {
const milestoneLink = milestoneListContainer()
@@ -65,7 +77,7 @@ describe('Release block milestone info', () => {
});
it('renders the "Issues" section with a total count of issues associated to the milestone(s)', () => {
- const totalIssueCount = 54;
+ const totalIssueCount = 9;
const issuesContainerText = trimText(issuesContainer().text());
expect(issuesContainerText).toContain(`Issues ${totalIssueCount}`);
@@ -73,7 +85,7 @@ describe('Release block milestone info', () => {
const badge = issuesContainer().find(GlBadge);
expect(badge.text()).toBe(totalIssueCount.toString());
- expect(issuesContainerText).toContain('Open: 32 • Closed: 22');
+ expect(issuesContainerText).toContain('Open: 5 • Closed: 4');
});
});
diff --git a/spec/frontend/releases/components/release_block_spec.js b/spec/frontend/releases/components/release_block_spec.js
index a7f1388664b..af5e538b95e 100644
--- a/spec/frontend/releases/components/release_block_spec.js
+++ b/spec/frontend/releases/components/release_block_spec.js
@@ -1,15 +1,17 @@
import $ from 'jquery';
import { mount } from '@vue/test-utils';
import { GlIcon } from '@gitlab/ui';
+import { getJSONFixture } from 'helpers/fixtures';
import EvidenceBlock from '~/releases/components/evidence_block.vue';
import ReleaseBlock from '~/releases/components/release_block.vue';
import ReleaseBlockFooter from '~/releases/components/release_block_footer.vue';
import timeagoMixin from '~/vue_shared/mixins/timeago';
-import { release as originalRelease } from '../mock_data';
import * as commonUtils from '~/lib/utils/common_utils';
import { BACK_URL_PARAM } from '~/releases/constants';
import * as urlUtility from '~/lib/utils/url_utility';
+const originalRelease = getJSONFixture('api/releases/release.json');
+
describe('Release block', () => {
let wrapper;
let release;
@@ -46,7 +48,7 @@ describe('Release block', () => {
beforeEach(() => factory(release));
it("renders the block with an id equal to the release's tag name", () => {
- expect(wrapper.attributes().id).toBe('v0.3');
+ expect(wrapper.attributes().id).toBe(release.tagName);
});
it(`renders an edit button that links to the "Edit release" page with a "${BACK_URL_PARAM}" parameter`, () => {
@@ -107,7 +109,7 @@ describe('Release block', () => {
});
it('does not render external label when link is not external', () => {
- expect(wrapper.find('.js-assets-list li:nth-child(2) a').text()).not.toContain(
+ expect(wrapper.find('.js-assets-list li:nth-child(3) a').text()).not.toContain(
'external source',
);
});
diff --git a/spec/frontend/releases/components/release_skeleton_loader_spec.js b/spec/frontend/releases/components/release_skeleton_loader_spec.js
new file mode 100644
index 00000000000..7fbf864568a
--- /dev/null
+++ b/spec/frontend/releases/components/release_skeleton_loader_spec.js
@@ -0,0 +1,15 @@
+import { mount } from '@vue/test-utils';
+import { GlSkeletonLoader } from '@gitlab/ui';
+import ReleaseSkeletonLoader from '~/releases/components/release_skeleton_loader.vue';
+
+describe('release_skeleton_loader.vue', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = mount(ReleaseSkeletonLoader);
+ });
+
+ it('renders a GlSkeletonLoader', () => {
+ expect(wrapper.find(GlSkeletonLoader).exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/releases/components/releases_pagination_graphql_spec.js b/spec/frontend/releases/components/releases_pagination_graphql_spec.js
index b01a28eb6c3..bba5e532e5e 100644
--- a/spec/frontend/releases/components/releases_pagination_graphql_spec.js
+++ b/spec/frontend/releases/components/releases_pagination_graphql_spec.js
@@ -29,7 +29,7 @@ describe('~/releases/components/releases_pagination_graphql.vue', () => {
listModule.state.graphQlPageInfo = pageInfo;
- listModule.actions.fetchReleasesGraphQl = jest.fn();
+ listModule.actions.fetchReleases = jest.fn();
wrapper = mount(ReleasesPaginationGraphql, {
store: createStore({
@@ -141,8 +141,8 @@ describe('~/releases/components/releases_pagination_graphql.vue', () => {
findNextButton().trigger('click');
});
- it('calls fetchReleasesGraphQl with the correct after cursor', () => {
- expect(listModule.actions.fetchReleasesGraphQl.mock.calls).toEqual([
+ it('calls fetchReleases with the correct after cursor', () => {
+ expect(listModule.actions.fetchReleases.mock.calls).toEqual([
[expect.anything(), { after: cursors.endCursor }],
]);
});
@@ -159,8 +159,8 @@ describe('~/releases/components/releases_pagination_graphql.vue', () => {
findPrevButton().trigger('click');
});
- it('calls fetchReleasesGraphQl with the correct before cursor', () => {
- expect(listModule.actions.fetchReleasesGraphQl.mock.calls).toEqual([
+ it('calls fetchReleases with the correct before cursor', () => {
+ expect(listModule.actions.fetchReleases.mock.calls).toEqual([
[expect.anything(), { before: cursors.startCursor }],
]);
});
diff --git a/spec/frontend/releases/components/releases_pagination_rest_spec.js b/spec/frontend/releases/components/releases_pagination_rest_spec.js
index 4fd3e085fc9..59c0c31413a 100644
--- a/spec/frontend/releases/components/releases_pagination_rest_spec.js
+++ b/spec/frontend/releases/components/releases_pagination_rest_spec.js
@@ -20,9 +20,9 @@ describe('~/releases/components/releases_pagination_rest.vue', () => {
const createComponent = pageInfo => {
listModule = createListModule({ projectId });
- listModule.state.pageInfo = pageInfo;
+ listModule.state.restPageInfo = pageInfo;
- listModule.actions.fetchReleasesRest = jest.fn();
+ listModule.actions.fetchReleases = jest.fn();
wrapper = mount(ReleasesPaginationRest, {
store: createStore({
@@ -57,8 +57,8 @@ describe('~/releases/components/releases_pagination_rest.vue', () => {
findGlPagination().vm.$emit('input', newPage);
});
- it('calls fetchReleasesRest with the correct page', () => {
- expect(listModule.actions.fetchReleasesRest.mock.calls).toEqual([
+ it('calls fetchReleases with the correct page', () => {
+ expect(listModule.actions.fetchReleases.mock.calls).toEqual([
[expect.anything(), { page: newPage }],
]);
});
diff --git a/spec/frontend/releases/mock_data.js b/spec/frontend/releases/mock_data.js
index 58cd69a2f6a..c89182faa44 100644
--- a/spec/frontend/releases/mock_data.js
+++ b/spec/frontend/releases/mock_data.js
@@ -1,139 +1,3 @@
-import { ASSET_LINK_TYPE } from '~/releases/constants';
-
-export const milestones = [
- {
- id: 50,
- iid: 2,
- project_id: 18,
- title: '13.6',
- description: 'The 13.6 milestone!',
- state: 'active',
- created_at: '2019-08-27T17:22:38.280Z',
- updated_at: '2019-08-27T17:22:38.280Z',
- due_date: '2019-09-19',
- start_date: '2019-08-31',
- web_url: 'http://0.0.0.0:3001/root/release-test/-/milestones/2',
- issue_stats: {
- total: 33,
- closed: 19,
- },
- },
- {
- id: 49,
- iid: 1,
- project_id: 18,
- title: '13.5',
- description: 'The 13.5 milestone!',
- state: 'active',
- created_at: '2019-08-26T17:55:48.643Z',
- updated_at: '2019-08-26T17:55:48.643Z',
- due_date: '2019-10-11',
- start_date: '2019-08-19',
- web_url: 'http://0.0.0.0:3001/root/release-test/-/milestones/1',
- issue_stats: {
- total: 21,
- closed: 3,
- },
- },
-];
-
-export const release = {
- name: 'New release',
- tag_name: 'v0.3',
- tag_path: '/root/release-test/-/tags/v0.3',
- description: 'A super nice release!',
- description_html: '<p data-sourcepos="1:1-1:21" dir="auto">A super nice release!</p>',
- created_at: '2019-08-26T17:54:04.952Z',
- released_at: '2019-08-26T17:54:04.807Z',
- author: {
- id: 1,
- name: 'Administrator',
- username: 'root',
- state: 'active',
- avatar_url: 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- web_url: 'http://0.0.0.0:3001/root',
- },
- commit: {
- id: 'c22b0728d1b465f82898c884d32b01aa642f96c1',
- short_id: 'c22b0728',
- created_at: '2019-08-26T17:47:07.000Z',
- parent_ids: [],
- title: 'Initial commit',
- message: 'Initial commit',
- author_name: 'Administrator',
- author_email: 'admin@example.com',
- authored_date: '2019-08-26T17:47:07.000Z',
- committer_name: 'Administrator',
- committer_email: 'admin@example.com',
- committed_date: '2019-08-26T17:47:07.000Z',
- },
- commit_path: '/root/release-test/commit/c22b0728d1b465f82898c884d32b01aa642f96c1',
- upcoming_release: false,
- milestones,
- evidences: [
- {
- filepath:
- 'https://20592.qa-tunnel.gitlab.info/root/test-deployments/-/releases/v1.1.2/evidences/1.json',
- sha: 'fb3a125fd69a0e5048ebfb0ba43eb32ce4911520dd8d',
- collected_at: '2018-10-19 15:43:20 +0200',
- },
- {
- filepath:
- 'https://20592.qa-tunnel.gitlab.info/root/test-deployments/-/releases/v1.1.2/evidences/2.json',
- sha: '6ebd17a66e6a861175735416e49cf677678029805712dd71bb805c609e2d9108',
- collected_at: '2018-10-19 15:43:20 +0200',
- },
- {
- filepath:
- 'https://20592.qa-tunnel.gitlab.info/root/test-deployments/-/releases/v1.1.2/evidences/3.json',
- sha: '2f65beaf275c3cb4b4e24fb01d481cc475d69c957830833f15338384816b5cba',
- collected_at: '2018-10-19 15:43:20 +0200',
- },
- ],
- assets: {
- count: 5,
- sources: [
- {
- format: 'zip',
- url: 'http://0.0.0.0:3001/root/release-test/-/archive/v0.3/release-test-v0.3.zip',
- },
- {
- format: 'tar.gz',
- url: 'http://0.0.0.0:3001/root/release-test/-/archive/v0.3/release-test-v0.3.tar.gz',
- },
- {
- format: 'tar.bz2',
- url: 'http://0.0.0.0:3001/root/release-test/-/archive/v0.3/release-test-v0.3.tar.bz2',
- },
- {
- format: 'tar',
- url: 'http://0.0.0.0:3001/root/release-test/-/archive/v0.3/release-test-v0.3.tar',
- },
- ],
- links: [
- {
- id: 1,
- name: 'my link',
- url: 'https://google.com',
- direct_asset_url: 'https://redirected.google.com',
- external: true,
- },
- {
- id: 2,
- name: 'my second link',
- url:
- 'https://gitlab.com/gitlab-org/gitlab-foss/-/jobs/artifacts/v11.6.0-rc4/download?job=rspec-mysql+41%2F50',
- direct_asset_url: 'https://redirected.google.com',
- external: false,
- },
- ],
- },
- _links: {
- self: 'http://0.0.0.0:3001/root/release-test/-/releases/v0.3',
- edit_url: 'http://0.0.0.0:3001/root/release-test/-/releases/v0.3/edit',
- },
-};
-
export const pageInfoHeadersWithoutPagination = {
'X-NEXT-PAGE': '',
'X-PAGE': '1',
@@ -151,202 +15,3 @@ export const pageInfoHeadersWithPagination = {
'X-TOTAL': '21',
'X-TOTAL-PAGES': '2',
};
-
-export const assets = {
- count: 5,
- sources: [
- {
- format: 'zip',
- url: 'https://example.gitlab.com/path/to/zip',
- },
- ],
- links: [
- {
- linkType: ASSET_LINK_TYPE.IMAGE,
- url: 'https://example.gitlab.com/path/to/image',
- directAssetUrl: 'https://example.gitlab.com/path/to/image',
- name: 'Example image link',
- },
- {
- linkType: ASSET_LINK_TYPE.PACKAGE,
- url: 'https://example.gitlab.com/path/to/package',
- directAssetUrl: 'https://example.gitlab.com/path/to/package',
- name: 'Example package link',
- },
- {
- linkType: ASSET_LINK_TYPE.RUNBOOK,
- url: 'https://example.gitlab.com/path/to/runbook',
- directAssetUrl: 'https://example.gitlab.com/path/to/runbook',
- name: 'Example runbook link',
- },
- {
- linkType: ASSET_LINK_TYPE.OTHER,
- url: 'https://example.gitlab.com/path/to/link',
- directAssetUrl: 'https://example.gitlab.com/path/to/link',
- name: 'Example link',
- },
- ],
-};
-
-export const release2 = {
- name: 'Bionic Beaver',
- tag_name: '18.04',
- description: '## changelog\n\n* line 1\n* line2',
- description_html: '<div><h2>changelog</h2><ul><li>line1</li<li>line 2</li></ul></div>',
- author_name: 'Release bot',
- author_email: 'release-bot@example.com',
- created_at: '2012-05-28T05:00:00-07:00',
- commit: {
- id: '2695effb5807a22ff3d138d593fd856244e155e7',
- short_id: '2695effb',
- title: 'Initial commit',
- created_at: '2017-07-26T11:08:53.000+02:00',
- parent_ids: ['2a4b78934375d7f53875269ffd4f45fd83a84ebe'],
- message: 'Initial commit',
- author: {
- avatar_url: 'uploads/-/system/user/avatar/johndoe/avatar.png',
- id: 482476,
- name: 'John Doe',
- path: '/johndoe',
- state: 'active',
- status_tooltip_html: null,
- username: 'johndoe',
- web_url: 'https://gitlab.com/johndoe',
- },
- authored_date: '2012-05-28T04:42:42-07:00',
- committer_name: 'Jack Smith',
- committer_email: 'jack@example.com',
- committed_date: '2012-05-28T04:42:42-07:00',
- },
- assets,
-};
-
-export const releases = [release, release2];
-
-export const graphqlReleasesResponse = {
- data: {
- project: {
- releases: {
- count: 39,
- nodes: [
- {
- name: 'Release 1.0',
- tagName: 'v5.10',
- tagPath: '/root/release-test/-/tags/v5.10',
- descriptionHtml:
- '<p data-sourcepos="1:1-1:24" dir="auto">This is version <strong>1.0</strong>!</p>',
- releasedAt: '2020-08-21T20:15:18Z',
- upcomingRelease: false,
- assets: {
- count: 7,
- sources: {
- nodes: [
- {
- format: 'zip',
- url:
- 'http://0.0.0.0:3000/root/release-test/-/archive/v5.10/release-test-v5.10.zip',
- },
- {
- format: 'tar.gz',
- url:
- 'http://0.0.0.0:3000/root/release-test/-/archive/v5.10/release-test-v5.10.tar.gz',
- },
- {
- format: 'tar.bz2',
- url:
- 'http://0.0.0.0:3000/root/release-test/-/archive/v5.10/release-test-v5.10.tar.bz2',
- },
- {
- format: 'tar',
- url:
- 'http://0.0.0.0:3000/root/release-test/-/archive/v5.10/release-test-v5.10.tar',
- },
- ],
- },
- links: {
- nodes: [
- {
- id: 'gid://gitlab/Releases::Link/69',
- name: 'An example link',
- url: 'https://example.com/link',
- directAssetUrl:
- 'http://0.0.0.0:3000/root/release-test/-/releases/v5.32/permanent/path/to/runbook',
- linkType: 'OTHER',
- external: true,
- },
- {
- id: 'gid://gitlab/Releases::Link/68',
- name: 'An example package link',
- url: 'https://example.com/package',
- directAssetUrl: 'https://example.com/package',
- linkType: 'PACKAGE',
- external: true,
- },
- {
- id: 'gid://gitlab/Releases::Link/67',
- name: 'An example image',
- url: 'https://example.com/image',
- directAssetUrl: 'https://example.com/image',
- linkType: 'IMAGE',
- external: true,
- },
- ],
- },
- },
- evidences: {
- nodes: [
- {
- filepath:
- 'http://0.0.0.0:3000/root/release-test/-/releases/v5.10/evidences/34.json',
- collectedAt: '2020-08-21T20:15:19Z',
- sha: '22bde8e8b93d870a29ddc339287a1fbb598f45d1396d',
- },
- ],
- },
- links: {
- editUrl: 'http://0.0.0.0:3000/root/release-test/-/releases/v5.10/edit',
- issuesUrl: null,
- mergeRequestsUrl: null,
- selfUrl: 'http://0.0.0.0:3000/root/release-test/-/releases/v5.10',
- },
- commit: {
- sha: '92e7ea2ee4496fe0d00ff69830ba0564d3d1e5a7',
- webUrl:
- 'http://0.0.0.0:3000/root/release-test/-/commit/92e7ea2ee4496fe0d00ff69830ba0564d3d1e5a7',
- title: 'Testing a change.',
- },
- author: {
- webUrl: 'http://0.0.0.0:3000/root',
- avatarUrl: '/uploads/-/system/user/avatar/1/avatar.png',
- username: 'root',
- },
- milestones: {
- nodes: [
- {
- id: 'gid://gitlab/Milestone/60',
- title: '12.4',
- description: '',
- webPath: '/root/release-test/-/milestones/2',
- stats: {
- totalIssuesCount: 0,
- closedIssuesCount: 0,
- },
- },
- {
- id: 'gid://gitlab/Milestone/59',
- title: '12.3',
- description: 'Milestone 12.3',
- webPath: '/root/release-test/-/milestones/1',
- stats: {
- totalIssuesCount: 2,
- closedIssuesCount: 1,
- },
- },
- ],
- },
- },
- ],
- },
- },
- },
-};
diff --git a/spec/frontend/releases/stores/getters_spec.js b/spec/frontend/releases/stores/getters_spec.js
new file mode 100644
index 00000000000..01e10567cf0
--- /dev/null
+++ b/spec/frontend/releases/stores/getters_spec.js
@@ -0,0 +1,22 @@
+import * as getters from '~/releases/stores/getters';
+
+describe('~/releases/stores/getters.js', () => {
+ it.each`
+ graphqlReleaseData | graphqlReleasesPage | graphqlMilestoneStats | result
+ ${false} | ${false} | ${false} | ${false}
+ ${false} | ${false} | ${true} | ${false}
+ ${false} | ${true} | ${false} | ${false}
+ ${false} | ${true} | ${true} | ${false}
+ ${true} | ${false} | ${false} | ${false}
+ ${true} | ${false} | ${true} | ${false}
+ ${true} | ${true} | ${false} | ${false}
+ ${true} | ${true} | ${true} | ${true}
+ `(
+ 'returns $result with feature flag values graphqlReleaseData=$graphqlReleaseData, graphqlReleasesPage=$graphqlReleasesPage, and graphqlMilestoneStats=$graphqlMilestoneStats',
+ ({ result: expectedResult, ...featureFlags }) => {
+ const actualResult = getters.useGraphQLEndpoint({ featureFlags });
+
+ expect(actualResult).toBe(expectedResult);
+ },
+ );
+});
diff --git a/spec/frontend/releases/stores/modules/detail/actions_spec.js b/spec/frontend/releases/stores/modules/detail/actions_spec.js
index 1b2a705e8f4..955c761d35a 100644
--- a/spec/frontend/releases/stores/modules/detail/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/actions_spec.js
@@ -1,10 +1,10 @@
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
+import { getJSONFixture } from 'helpers/fixtures';
import { cloneDeep } from 'lodash';
import * as actions from '~/releases/stores/modules/detail/actions';
import * as types from '~/releases/stores/modules/detail/mutation_types';
-import { release as originalRelease } from '../../../mock_data';
import createState from '~/releases/stores/modules/detail/state';
import { deprecatedCreateFlash as createFlash } from '~/flash';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
@@ -21,6 +21,8 @@ jest.mock('~/lib/utils/url_utility', () => ({
joinPaths: jest.requireActual('~/lib/utils/url_utility').joinPaths,
}));
+const originalRelease = getJSONFixture('api/releases/release.json');
+
describe('Release detail actions', () => {
let state;
let release;
@@ -207,6 +209,15 @@ describe('Release detail actions', () => {
});
});
+ describe('updateReleaseGroupMilestones', () => {
+ it(`commits ${types.UPDATE_RELEASE_GROUP_MILESTONES} with the updated release group milestones`, () => {
+ const newReleaseGroupMilestones = ['v0.0', 'v0.1'];
+ return testAction(actions.updateReleaseGroupMilestones, newReleaseGroupMilestones, state, [
+ { type: types.UPDATE_RELEASE_GROUP_MILESTONES, payload: newReleaseGroupMilestones },
+ ]);
+ });
+ });
+
describe('addEmptyAssetLink', () => {
it(`commits ${types.ADD_EMPTY_ASSET_LINK}`, () => {
return testAction(actions.addEmptyAssetLink, undefined, state, [
diff --git a/spec/frontend/releases/stores/modules/detail/mutations_spec.js b/spec/frontend/releases/stores/modules/detail/mutations_spec.js
index cd7c6b7d275..f3e84262754 100644
--- a/spec/frontend/releases/stores/modules/detail/mutations_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/mutations_spec.js
@@ -1,10 +1,12 @@
+import { getJSONFixture } from 'helpers/fixtures';
import createState from '~/releases/stores/modules/detail/state';
import mutations from '~/releases/stores/modules/detail/mutations';
import * as types from '~/releases/stores/modules/detail/mutation_types';
-import { release as originalRelease } from '../../../mock_data';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import { ASSET_LINK_TYPE, DEFAULT_ASSET_LINK_TYPE } from '~/releases/constants';
+const originalRelease = getJSONFixture('api/releases/release.json');
+
describe('Release detail mutations', () => {
let state;
let release;
@@ -30,6 +32,7 @@ describe('Release detail mutations', () => {
name: '',
description: '',
milestones: [],
+ groupMilestones: [],
assets: {
links: [],
},
@@ -112,6 +115,26 @@ describe('Release detail mutations', () => {
});
});
+ describe(`${types.UPDATE_RELEASE_MILESTONES}`, () => {
+ it("updates the release's milestones", () => {
+ state.release = release;
+ const newReleaseMilestones = ['v0.0', 'v0.1'];
+ mutations[types.UPDATE_RELEASE_MILESTONES](state, newReleaseMilestones);
+
+ expect(state.release.milestones).toBe(newReleaseMilestones);
+ });
+ });
+
+ describe(`${types.UPDATE_RELEASE_GROUP_MILESTONES}`, () => {
+ it("updates the release's group milestones", () => {
+ state.release = release;
+ const newReleaseGroupMilestones = ['v0.0', 'v0.1'];
+ mutations[types.UPDATE_RELEASE_GROUP_MILESTONES](state, newReleaseGroupMilestones);
+
+ expect(state.release.groupMilestones).toBe(newReleaseGroupMilestones);
+ });
+ });
+
describe(`${types.REQUEST_SAVE_RELEASE}`, () => {
it('set state.isUpdatingRelease to true', () => {
mutations[types.REQUEST_SAVE_RELEASE](state);
diff --git a/spec/frontend/releases/stores/modules/list/actions_spec.js b/spec/frontend/releases/stores/modules/list/actions_spec.js
index 95e30659d6c..2068d7fee78 100644
--- a/spec/frontend/releases/stores/modules/list/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/list/actions_spec.js
@@ -1,31 +1,42 @@
import { cloneDeep } from 'lodash';
import testAction from 'helpers/vuex_action_helper';
+import { getJSONFixture } from 'helpers/fixtures';
import {
- requestReleases,
fetchReleases,
- receiveReleasesSuccess,
+ fetchReleasesGraphQl,
+ fetchReleasesRest,
receiveReleasesError,
} from '~/releases/stores/modules/list/actions';
import createState from '~/releases/stores/modules/list/state';
import * as types from '~/releases/stores/modules/list/mutation_types';
import api from '~/api';
import { gqClient, convertGraphQLResponse } from '~/releases/util';
-import { parseIntPagination, convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import {
- pageInfoHeadersWithoutPagination,
- releases as originalReleases,
- graphqlReleasesResponse as originalGraphqlReleasesResponse,
-} from '../../../mock_data';
+ normalizeHeaders,
+ parseIntPagination,
+ convertObjectPropsToCamelCase,
+} from '~/lib/utils/common_utils';
+import { pageInfoHeadersWithoutPagination } from '../../../mock_data';
import allReleasesQuery from '~/releases/queries/all_releases.query.graphql';
+import { PAGE_SIZE } from '~/releases/constants';
+
+const originalRelease = getJSONFixture('api/releases/release.json');
+const originalReleases = [originalRelease];
+
+const originalGraphqlReleasesResponse = getJSONFixture(
+ 'graphql/releases/queries/all_releases.query.graphql.json',
+);
describe('Releases State actions', () => {
let mockedState;
- let pageInfo;
let releases;
let graphqlReleasesResponse;
const projectPath = 'root/test-project';
const projectId = 19;
+ const before = 'testBeforeCursor';
+ const after = 'testAfterCursor';
+ const page = 2;
beforeEach(() => {
mockedState = {
@@ -33,178 +44,261 @@ describe('Releases State actions', () => {
projectId,
projectPath,
}),
- featureFlags: {
- graphqlReleaseData: true,
- graphqlReleasesPage: true,
- graphqlMilestoneStats: true,
- },
};
- pageInfo = parseIntPagination(pageInfoHeadersWithoutPagination);
releases = convertObjectPropsToCamelCase(originalReleases, { deep: true });
graphqlReleasesResponse = cloneDeep(originalGraphqlReleasesResponse);
});
- describe('requestReleases', () => {
- it('should commit REQUEST_RELEASES mutation', done => {
- testAction(requestReleases, null, mockedState, [{ type: types.REQUEST_RELEASES }], [], done);
+ describe('when all the necessary GraphQL feature flags are enabled', () => {
+ beforeEach(() => {
+ mockedState.useGraphQLEndpoint = true;
+ });
+
+ describe('fetchReleases', () => {
+ it('dispatches fetchReleasesGraphQl with before and after parameters', () => {
+ return testAction(
+ fetchReleases,
+ { before, after, page },
+ mockedState,
+ [],
+ [
+ {
+ type: 'fetchReleasesGraphQl',
+ payload: { before, after },
+ },
+ ],
+ );
+ });
});
});
- describe('fetchReleases', () => {
- describe('success', () => {
- it('dispatches requestReleases and receiveReleasesSuccess', done => {
- jest.spyOn(gqClient, 'query').mockImplementation(({ query, variables }) => {
- expect(query).toBe(allReleasesQuery);
- expect(variables).toEqual({
- fullPath: projectPath,
+ describe('when at least one of the GraphQL feature flags is disabled', () => {
+ beforeEach(() => {
+ mockedState.useGraphQLEndpoint = false;
+ });
+
+ describe('fetchReleases', () => {
+ it('dispatches fetchReleasesRest with a page parameter', () => {
+ return testAction(
+ fetchReleases,
+ { before, after, page },
+ mockedState,
+ [],
+ [
+ {
+ type: 'fetchReleasesRest',
+ payload: { page },
+ },
+ ],
+ );
+ });
+ });
+ });
+
+ describe('fetchReleasesGraphQl', () => {
+ describe('GraphQL query variables', () => {
+ let vuexParams;
+
+ beforeEach(() => {
+ jest.spyOn(gqClient, 'query');
+
+ vuexParams = { dispatch: jest.fn(), commit: jest.fn(), state: mockedState };
+ });
+
+ describe('when neither a before nor an after parameter is provided', () => {
+ beforeEach(() => {
+ fetchReleasesGraphQl(vuexParams, { before: undefined, after: undefined });
+ });
+
+ it('makes a GraphQl query with a first variable', () => {
+ expect(gqClient.query).toHaveBeenCalledWith({
+ query: allReleasesQuery,
+ variables: { fullPath: projectPath, first: PAGE_SIZE },
});
- return Promise.resolve(graphqlReleasesResponse);
});
+ });
- testAction(
- fetchReleases,
+ describe('when only a before parameter is provided', () => {
+ beforeEach(() => {
+ fetchReleasesGraphQl(vuexParams, { before, after: undefined });
+ });
+
+ it('makes a GraphQl query with last and before variables', () => {
+ expect(gqClient.query).toHaveBeenCalledWith({
+ query: allReleasesQuery,
+ variables: { fullPath: projectPath, last: PAGE_SIZE, before },
+ });
+ });
+ });
+
+ describe('when only an after parameter is provided', () => {
+ beforeEach(() => {
+ fetchReleasesGraphQl(vuexParams, { before: undefined, after });
+ });
+
+ it('makes a GraphQl query with first and after variables', () => {
+ expect(gqClient.query).toHaveBeenCalledWith({
+ query: allReleasesQuery,
+ variables: { fullPath: projectPath, first: PAGE_SIZE, after },
+ });
+ });
+ });
+
+ describe('when both before and after parameters are provided', () => {
+ it('throws an error', () => {
+ const callFetchReleasesGraphQl = () => {
+ fetchReleasesGraphQl(vuexParams, { before, after });
+ };
+
+ expect(callFetchReleasesGraphQl).toThrowError(
+ 'Both a `before` and an `after` parameter were provided to fetchReleasesGraphQl. These parameters cannot be used together.',
+ );
+ });
+ });
+ });
+
+ describe('when the request is successful', () => {
+ beforeEach(() => {
+ jest.spyOn(gqClient, 'query').mockResolvedValue(graphqlReleasesResponse);
+ });
+
+ it(`commits ${types.REQUEST_RELEASES} and ${types.RECEIVE_RELEASES_SUCCESS}`, () => {
+ const convertedResponse = convertGraphQLResponse(graphqlReleasesResponse);
+
+ return testAction(
+ fetchReleasesGraphQl,
{},
mockedState,
- [],
[
{
- type: 'requestReleases',
+ type: types.REQUEST_RELEASES,
},
{
- payload: convertGraphQLResponse(graphqlReleasesResponse),
- type: 'receiveReleasesSuccess',
+ type: types.RECEIVE_RELEASES_SUCCESS,
+ payload: {
+ data: convertedResponse.data,
+ graphQlPageInfo: convertedResponse.paginationInfo,
+ },
},
],
- done,
+ [],
);
});
});
- describe('error', () => {
- it('dispatches requestReleases and receiveReleasesError', done => {
- jest.spyOn(gqClient, 'query').mockRejectedValue();
+ describe('when the request fails', () => {
+ beforeEach(() => {
+ jest.spyOn(gqClient, 'query').mockRejectedValue(new Error('Something went wrong!'));
+ });
- testAction(
- fetchReleases,
+ it(`commits ${types.REQUEST_RELEASES} and dispatch receiveReleasesError`, () => {
+ return testAction(
+ fetchReleasesGraphQl,
{},
mockedState,
- [],
[
{
- type: 'requestReleases',
+ type: types.REQUEST_RELEASES,
},
+ ],
+ [
{
type: 'receiveReleasesError',
},
],
- done,
);
});
});
+ });
+
+ describe('fetchReleasesRest', () => {
+ describe('REST query parameters', () => {
+ let vuexParams;
- describe('when the graphqlReleaseData feature flag is disabled', () => {
beforeEach(() => {
- mockedState.featureFlags.graphqlReleasesPage = false;
- });
+ jest
+ .spyOn(api, 'releases')
+ .mockResolvedValue({ data: releases, headers: pageInfoHeadersWithoutPagination });
- describe('success', () => {
- it('dispatches requestReleases and receiveReleasesSuccess', done => {
- jest.spyOn(api, 'releases').mockImplementation((id, options) => {
- expect(id).toBe(projectId);
- expect(options.page).toBe('1');
- return Promise.resolve({ data: releases, headers: pageInfoHeadersWithoutPagination });
- });
+ vuexParams = { dispatch: jest.fn(), commit: jest.fn(), state: mockedState };
+ });
- testAction(
- fetchReleases,
- {},
- mockedState,
- [],
- [
- {
- type: 'requestReleases',
- },
- {
- payload: { data: releases, headers: pageInfoHeadersWithoutPagination },
- type: 'receiveReleasesSuccess',
- },
- ],
- done,
- );
+ describe('when a page parameter is provided', () => {
+ beforeEach(() => {
+ fetchReleasesRest(vuexParams, { page: 2 });
});
- it('dispatches requestReleases and receiveReleasesSuccess on page two', done => {
- jest.spyOn(api, 'releases').mockImplementation((_, options) => {
- expect(options.page).toBe('2');
- return Promise.resolve({ data: releases, headers: pageInfoHeadersWithoutPagination });
- });
-
- testAction(
- fetchReleases,
- { page: '2' },
- mockedState,
- [],
- [
- {
- type: 'requestReleases',
- },
- {
- payload: { data: releases, headers: pageInfoHeadersWithoutPagination },
- type: 'receiveReleasesSuccess',
- },
- ],
- done,
- );
+ it('makes a REST query with a page query parameter', () => {
+ expect(api.releases).toHaveBeenCalledWith(projectId, { page });
});
});
+ });
- describe('error', () => {
- it('dispatches requestReleases and receiveReleasesError', done => {
- jest.spyOn(api, 'releases').mockReturnValue(Promise.reject());
+ describe('when the request is successful', () => {
+ beforeEach(() => {
+ jest
+ .spyOn(api, 'releases')
+ .mockResolvedValue({ data: releases, headers: pageInfoHeadersWithoutPagination });
+ });
- testAction(
- fetchReleases,
- {},
- mockedState,
- [],
- [
- {
- type: 'requestReleases',
- },
- {
- type: 'receiveReleasesError',
+ it(`commits ${types.REQUEST_RELEASES} and ${types.RECEIVE_RELEASES_SUCCESS}`, () => {
+ return testAction(
+ fetchReleasesRest,
+ {},
+ mockedState,
+ [
+ {
+ type: types.REQUEST_RELEASES,
+ },
+ {
+ type: types.RECEIVE_RELEASES_SUCCESS,
+ payload: {
+ data: convertObjectPropsToCamelCase(releases, { deep: true }),
+ restPageInfo: parseIntPagination(
+ normalizeHeaders(pageInfoHeadersWithoutPagination),
+ ),
},
- ],
- done,
- );
- });
+ },
+ ],
+ [],
+ );
});
});
- });
- describe('receiveReleasesSuccess', () => {
- it('should commit RECEIVE_RELEASES_SUCCESS mutation', done => {
- testAction(
- receiveReleasesSuccess,
- { data: releases, headers: pageInfoHeadersWithoutPagination },
- mockedState,
- [{ type: types.RECEIVE_RELEASES_SUCCESS, payload: { pageInfo, data: releases } }],
- [],
- done,
- );
+ describe('when the request fails', () => {
+ beforeEach(() => {
+ jest.spyOn(api, 'releases').mockRejectedValue(new Error('Something went wrong!'));
+ });
+
+ it(`commits ${types.REQUEST_RELEASES} and dispatch receiveReleasesError`, () => {
+ return testAction(
+ fetchReleasesRest,
+ {},
+ mockedState,
+ [
+ {
+ type: types.REQUEST_RELEASES,
+ },
+ ],
+ [
+ {
+ type: 'receiveReleasesError',
+ },
+ ],
+ );
+ });
});
});
describe('receiveReleasesError', () => {
- it('should commit RECEIVE_RELEASES_ERROR mutation', done => {
- testAction(
+ it('should commit RECEIVE_RELEASES_ERROR mutation', () => {
+ return testAction(
receiveReleasesError,
null,
mockedState,
[{ type: types.RECEIVE_RELEASES_ERROR }],
[],
- done,
);
});
});
diff --git a/spec/frontend/releases/stores/modules/list/mutations_spec.js b/spec/frontend/releases/stores/modules/list/mutations_spec.js
index 27ad05846e7..914f69ec194 100644
--- a/spec/frontend/releases/stores/modules/list/mutations_spec.js
+++ b/spec/frontend/releases/stores/modules/list/mutations_spec.js
@@ -1,16 +1,29 @@
+import { getJSONFixture } from 'helpers/fixtures';
import createState from '~/releases/stores/modules/list/state';
import mutations from '~/releases/stores/modules/list/mutations';
import * as types from '~/releases/stores/modules/list/mutation_types';
-import { parseIntPagination } from '~/lib/utils/common_utils';
-import { pageInfoHeadersWithoutPagination, releases } from '../../../mock_data';
+import { parseIntPagination, convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+import { pageInfoHeadersWithoutPagination } from '../../../mock_data';
+import { convertGraphQLResponse } from '~/releases/util';
+
+const originalRelease = getJSONFixture('api/releases/release.json');
+const originalReleases = [originalRelease];
+
+const graphqlReleasesResponse = getJSONFixture(
+ 'graphql/releases/queries/all_releases.query.graphql.json',
+);
describe('Releases Store Mutations', () => {
let stateCopy;
- let pageInfo;
+ let restPageInfo;
+ let graphQlPageInfo;
+ let releases;
beforeEach(() => {
stateCopy = createState({});
- pageInfo = parseIntPagination(pageInfoHeadersWithoutPagination);
+ restPageInfo = parseIntPagination(pageInfoHeadersWithoutPagination);
+ graphQlPageInfo = convertGraphQLResponse(graphqlReleasesResponse).paginationInfo;
+ releases = convertObjectPropsToCamelCase(originalReleases, { deep: true });
});
describe('REQUEST_RELEASES', () => {
@@ -23,7 +36,11 @@ describe('Releases Store Mutations', () => {
describe('RECEIVE_RELEASES_SUCCESS', () => {
beforeEach(() => {
- mutations[types.RECEIVE_RELEASES_SUCCESS](stateCopy, { pageInfo, data: releases });
+ mutations[types.RECEIVE_RELEASES_SUCCESS](stateCopy, {
+ restPageInfo,
+ graphQlPageInfo,
+ data: releases,
+ });
});
it('sets is loading to false', () => {
@@ -38,18 +55,29 @@ describe('Releases Store Mutations', () => {
expect(stateCopy.releases).toEqual(releases);
});
- it('sets pageInfo', () => {
- expect(stateCopy.pageInfo).toEqual(pageInfo);
+ it('sets restPageInfo', () => {
+ expect(stateCopy.restPageInfo).toEqual(restPageInfo);
+ });
+
+ it('sets graphQlPageInfo', () => {
+ expect(stateCopy.graphQlPageInfo).toEqual(graphQlPageInfo);
});
});
describe('RECEIVE_RELEASES_ERROR', () => {
it('resets data', () => {
+ mutations[types.RECEIVE_RELEASES_SUCCESS](stateCopy, {
+ restPageInfo,
+ graphQlPageInfo,
+ data: releases,
+ });
+
mutations[types.RECEIVE_RELEASES_ERROR](stateCopy);
expect(stateCopy.isLoading).toEqual(false);
expect(stateCopy.releases).toEqual([]);
- expect(stateCopy.pageInfo).toEqual({});
+ expect(stateCopy.restPageInfo).toEqual({});
+ expect(stateCopy.graphQlPageInfo).toEqual({});
});
});
});
diff --git a/spec/frontend/releases/util_spec.js b/spec/frontend/releases/util_spec.js
index f40e5729188..a9d0b61695d 100644
--- a/spec/frontend/releases/util_spec.js
+++ b/spec/frontend/releases/util_spec.js
@@ -1,6 +1,10 @@
import { cloneDeep } from 'lodash';
+import { getJSONFixture } from 'helpers/fixtures';
import { releaseToApiJson, apiJsonToRelease, convertGraphQLResponse } from '~/releases/util';
-import { graphqlReleasesResponse as originalGraphqlReleasesResponse } from './mock_data';
+
+const originalGraphqlReleasesResponse = getJSONFixture(
+ 'graphql/releases/queries/all_releases.query.graphql.json',
+);
describe('releases/util.js', () => {
describe('releaseToApiJson', () => {
diff --git a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
index cf2e6b00800..aaa8bf168f2 100644
--- a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
+++ b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
@@ -77,24 +77,31 @@ exports[`Repository last commit component renders commit widget 1`] = `
</gl-link-stub>
</div>
- <div
- class="commit-sha-group d-flex"
+ <gl-button-group-stub
+ class="gl-ml-4 js-commit-sha-group"
>
- <div
- class="label label-monospace monospace"
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
+ class="gl-font-monospace"
+ data-testid="last-commit-id-label"
+ icon=""
+ label="true"
+ size="medium"
+ variant="default"
>
-
- 12345678
-
- </div>
+ 12345678
+ </gl-button-stub>
<clipboard-button-stub
- cssclass="btn-default"
+ category="secondary"
+ class="input-group-text"
+ size="medium"
text="123456789"
title="Copy commit SHA"
- tooltipplacement="bottom"
+ tooltipplacement="top"
/>
- </div>
+ </gl-button-group-stub>
</div>
</div>
</div>
@@ -181,24 +188,31 @@ exports[`Repository last commit component renders the signature HTML as returned
</gl-link-stub>
</div>
- <div
- class="commit-sha-group d-flex"
+ <gl-button-group-stub
+ class="gl-ml-4 js-commit-sha-group"
>
- <div
- class="label label-monospace monospace"
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
+ class="gl-font-monospace"
+ data-testid="last-commit-id-label"
+ icon=""
+ label="true"
+ size="medium"
+ variant="default"
>
-
- 12345678
-
- </div>
+ 12345678
+ </gl-button-stub>
<clipboard-button-stub
- cssclass="btn-default"
+ category="secondary"
+ class="input-group-text"
+ size="medium"
text="123456789"
title="Copy commit SHA"
- tooltipplacement="bottom"
+ tooltipplacement="top"
/>
- </div>
+ </gl-button-group-stub>
</div>
</div>
</div>
diff --git a/spec/frontend/repository/components/last_commit_spec.js b/spec/frontend/repository/components/last_commit_spec.js
index c14a7f0e061..ccba0982c26 100644
--- a/spec/frontend/repository/components/last_commit_spec.js
+++ b/spec/frontend/repository/components/last_commit_spec.js
@@ -78,7 +78,7 @@ describe('Repository last commit component', () => {
factory();
return vm.vm.$nextTick(() => {
- expect(vm.find('.label-monospace').text()).toEqual('12345678');
+ expect(vm.find('[data-testid="last-commit-id-label"]').text()).toEqual('12345678');
});
});
diff --git a/spec/frontend/repository/log_tree_spec.js b/spec/frontend/repository/log_tree_spec.js
index 954424b5c8a..ddc95feccd6 100644
--- a/spec/frontend/repository/log_tree_spec.js
+++ b/spec/frontend/repository/log_tree_spec.js
@@ -84,6 +84,14 @@ describe('fetchLogsTree', () => {
expect(axios.get.mock.calls.length).toEqual(1);
}));
+ it('calls axios for each path', () =>
+ Promise.all([
+ fetchLogsTree(client, '', '0', resolver),
+ fetchLogsTree(client, '/test', '0', resolver),
+ ]).then(() => {
+ expect(axios.get.mock.calls.length).toEqual(2);
+ }));
+
it('calls entry resolver', () =>
fetchLogsTree(client, '', '0', resolver).then(() => {
expect(resolver.resolve).toHaveBeenCalledWith(
diff --git a/spec/frontend/right_sidebar_spec.js b/spec/frontend/right_sidebar_spec.js
index d80d80152a5..3490a99afb4 100644
--- a/spec/frontend/right_sidebar_spec.js
+++ b/spec/frontend/right_sidebar_spec.js
@@ -6,7 +6,9 @@ import Sidebar from '~/right_sidebar';
let $aside = null;
let $toggle = null;
-let $icon = null;
+let $toggleContainer = null;
+let $expandIcon = null;
+let $collapseIcon = null;
let $page = null;
let $labelsIcon = null;
@@ -15,10 +17,11 @@ const assertSidebarState = state => {
const shouldBeCollapsed = state === 'collapsed';
expect($aside.hasClass('right-sidebar-expanded')).toBe(shouldBeExpanded);
expect($page.hasClass('right-sidebar-expanded')).toBe(shouldBeExpanded);
- expect($icon.hasClass('fa-angle-double-right')).toBe(shouldBeExpanded);
+ expect($toggleContainer.data('is-expanded')).toBe(shouldBeExpanded);
+ expect($expandIcon.hasClass('hidden')).toBe(shouldBeExpanded);
expect($aside.hasClass('right-sidebar-collapsed')).toBe(shouldBeCollapsed);
expect($page.hasClass('right-sidebar-collapsed')).toBe(shouldBeCollapsed);
- expect($icon.hasClass('fa-angle-double-left')).toBe(shouldBeCollapsed);
+ expect($collapseIcon.hasClass('hidden')).toBe(shouldBeCollapsed);
};
describe('RightSidebar', () => {
@@ -33,7 +36,9 @@ describe('RightSidebar', () => {
new Sidebar(); // eslint-disable-line no-new
$aside = $('.right-sidebar');
$page = $('.layout-page');
- $icon = $aside.find('i');
+ $toggleContainer = $('.js-sidebar-toggle-container');
+ $expandIcon = $aside.find('.js-sidebar-expand');
+ $collapseIcon = $aside.find('.js-sidebar-collapse');
$toggle = $aside.find('.js-sidebar-toggle');
$labelsIcon = $aside.find('.sidebar-collapsed-icon');
});
diff --git a/spec/frontend/search/components/state_filter_spec.js b/spec/frontend/search/components/dropdown_filter_spec.js
index 26344f2b592..ffac038e1c5 100644
--- a/spec/frontend/search/components/state_filter_spec.js
+++ b/spec/frontend/search/components/dropdown_filter_spec.js
@@ -1,11 +1,11 @@
import { shallowMount } from '@vue/test-utils';
import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
-import StateFilter from '~/search/state_filter/components/state_filter.vue';
+import DropdownFilter from '~/search/components/dropdown_filter.vue';
import {
FILTER_STATES,
- SCOPES,
FILTER_STATES_BY_SCOPE,
- FILTER_TEXT,
+ FILTER_HEADER,
+ SCOPES,
} from '~/search/state_filter/constants';
import * as urlUtils from '~/lib/utils/url_utility';
@@ -15,14 +15,19 @@ jest.mock('~/lib/utils/url_utility', () => ({
}));
function createComponent(props = { scope: 'issues' }) {
- return shallowMount(StateFilter, {
+ return shallowMount(DropdownFilter, {
propsData: {
+ filtersArray: FILTER_STATES_BY_SCOPE.issues,
+ filters: FILTER_STATES,
+ header: FILTER_HEADER,
+ param: 'state',
+ supportedScopes: Object.values(SCOPES),
...props,
},
});
}
-describe('StateFilter', () => {
+describe('DropdownFilter', () => {
let wrapper;
beforeEach(() => {
@@ -41,7 +46,7 @@ describe('StateFilter', () => {
describe('template', () => {
describe.each`
- scope | showStateDropdown
+ scope | showDropdown
${'issues'} | ${true}
${'merge_requests'} | ${true}
${'projects'} | ${false}
@@ -50,26 +55,25 @@ describe('StateFilter', () => {
${'notes'} | ${false}
${'wiki_blobs'} | ${false}
${'blobs'} | ${false}
- `(`state dropdown`, ({ scope, showStateDropdown }) => {
+ `(`dropdown`, ({ scope, showDropdown }) => {
beforeEach(() => {
wrapper = createComponent({ scope });
});
- it(`does${showStateDropdown ? '' : ' not'} render when scope is ${scope}`, () => {
- expect(findGlDropdown().exists()).toBe(showStateDropdown);
+ it(`does${showDropdown ? '' : ' not'} render when scope is ${scope}`, () => {
+ expect(findGlDropdown().exists()).toBe(showDropdown);
});
});
describe.each`
- state | label
- ${FILTER_STATES.ANY.value} | ${FILTER_TEXT}
+ initialFilter | label
+ ${FILTER_STATES.ANY.value} | ${`Any ${FILTER_HEADER}`}
${FILTER_STATES.OPEN.value} | ${FILTER_STATES.OPEN.label}
${FILTER_STATES.CLOSED.value} | ${FILTER_STATES.CLOSED.label}
- ${FILTER_STATES.MERGED.value} | ${FILTER_STATES.MERGED.label}
- `(`filter text`, ({ state, label }) => {
- describe(`when state is ${state}`, () => {
+ `(`filter text`, ({ initialFilter, label }) => {
+ describe(`when initialFilter is ${initialFilter}`, () => {
beforeEach(() => {
- wrapper = createComponent({ scope: 'issues', state });
+ wrapper = createComponent({ scope: 'issues', initialFilter });
});
it(`sets dropdown label to ${label}`, () => {
diff --git a/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap b/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap
index f4ac2f57261..02d5ca6bdb3 100644
--- a/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap
+++ b/spec/frontend/self_monitor/components/__snapshots__/self_monitor_form_spec.js.snap
@@ -15,13 +15,16 @@ exports[`self monitor component When the self monitor project has not been creat
</h4>
- <gl-deprecated-button-stub
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
class="js-settings-toggle"
- size="md"
- variant="secondary"
+ icon=""
+ size="medium"
+ variant="default"
>
Expand
- </gl-deprecated-button-stub>
+ </gl-button-stub>
<p
class="js-section-sub-header"
@@ -56,6 +59,7 @@ exports[`self monitor component When the self monitor project has not been creat
<gl-modal-stub
cancel-title="Cancel"
+ category="primary"
modalclass=""
modalid="delete-self-monitor-modal"
ok-title="Delete project"
diff --git a/spec/frontend/self_monitor/components/self_monitor_form_spec.js b/spec/frontend/self_monitor/components/self_monitor_form_spec.js
index ec5f7b0a394..618cc16cdf4 100644
--- a/spec/frontend/self_monitor/components/self_monitor_form_spec.js
+++ b/spec/frontend/self_monitor/components/self_monitor_form_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDeprecatedButton } from '@gitlab/ui';
+import { GlButton } from '@gitlab/ui';
import { TEST_HOST } from 'helpers/test_constants';
import SelfMonitor from '~/self_monitor/components/self_monitor_form.vue';
import { createStore } from '~/self_monitor/store';
@@ -42,7 +42,7 @@ describe('self monitor component', () => {
it('renders as an expand button by default', () => {
wrapper = shallowMount(SelfMonitor, { store });
- const button = wrapper.find(GlDeprecatedButton);
+ const button = wrapper.find(GlButton);
expect(button.text()).toBe('Expand');
});
diff --git a/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap b/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap
index 22689080063..6b3d65ff037 100644
--- a/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap
+++ b/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap
@@ -11,7 +11,7 @@ exports[`EmptyStateComponent should render content 1`] = `
<p>In order to start using functions as a service, you must first install Knative on your Kubernetes cluster. <gl-link-stub href=\\"/help\\">More information</gl-link-stub>
</p>
<div>
- <gl-button-stub category=\\"primary\\" variant=\\"success\\" size=\\"medium\\" icon=\\"\\" href=\\"/clusters\\">Install Knative</gl-button-stub>
+ <gl-button-stub category=\\"primary\\" variant=\\"success\\" size=\\"medium\\" icon=\\"\\" buttontextclasses=\\"\\" href=\\"/clusters\\">Install Knative</gl-button-stub>
<!---->
</div>
</div>
diff --git a/spec/frontend/serverless/components/missing_prometheus_spec.js b/spec/frontend/serverless/components/missing_prometheus_spec.js
index 9ca4a45dd5f..0bd2e96a068 100644
--- a/spec/frontend/serverless/components/missing_prometheus_spec.js
+++ b/spec/frontend/serverless/components/missing_prometheus_spec.js
@@ -1,4 +1,4 @@
-import { GlDeprecatedButton } from '@gitlab/ui';
+import { GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { createStore } from '~/serverless/store';
import missingPrometheusComponent from '~/serverless/components/missing_prometheus.vue';
@@ -24,7 +24,7 @@ describe('missingPrometheusComponent', () => {
'Function invocation metrics require Prometheus to be installed first.',
);
- expect(wrapper.find(GlDeprecatedButton).attributes('variant')).toBe('success');
+ expect(wrapper.find(GlButton).attributes('variant')).toBe('success');
});
it('should render no prometheus data message', () => {
diff --git a/spec/frontend/sidebar/confidential/edit_form_buttons_spec.js b/spec/frontend/sidebar/confidential/edit_form_buttons_spec.js
index 2f11c6a07c2..8c868205295 100644
--- a/spec/frontend/sidebar/confidential/edit_form_buttons_spec.js
+++ b/spec/frontend/sidebar/confidential/edit_form_buttons_spec.js
@@ -1,5 +1,4 @@
import { shallowMount } from '@vue/test-utils';
-import { GlLoadingIcon } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
import EditFormButtons from '~/sidebar/components/confidential/edit_form_buttons.vue';
import eventHub from '~/sidebar/event_hub';
@@ -56,11 +55,11 @@ describe('Edit Form Buttons', () => {
});
it('disables the toggle button', () => {
- expect(findConfidentialToggle().attributes('disabled')).toBe('disabled');
+ expect(findConfidentialToggle().props('disabled')).toBe(true);
});
- it('finds the GlLoadingIcon', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ it('sets loading on the toggle button', () => {
+ expect(findConfidentialToggle().props('loading')).toBe(true);
});
});
@@ -99,7 +98,7 @@ describe('Edit Form Buttons', () => {
describe('when succeeds', () => {
beforeEach(() => {
createComponent({ data: { isLoading: false }, props: { confidential: true } });
- findConfidentialToggle().trigger('click');
+ findConfidentialToggle().vm.$emit('click', new Event('click'));
});
it('dispatches the correct action', () => {
@@ -109,9 +108,9 @@ describe('Edit Form Buttons', () => {
});
});
- it('resets loading', () => {
+ it('resets loading on the toggle button', () => {
return waitForPromises().then(() => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(findConfidentialToggle().props('loading')).toBe(false);
});
});
@@ -135,7 +134,7 @@ describe('Edit Form Buttons', () => {
props: { confidential: true },
resolved: false,
});
- findConfidentialToggle().trigger('click');
+ findConfidentialToggle().vm.$emit('click', new Event('click'));
});
it('calls flash with the correct message', () => {
diff --git a/spec/frontend/sidebar/lock/edit_form_buttons_spec.js b/spec/frontend/sidebar/lock/edit_form_buttons_spec.js
index de1da3456f8..913646c8f8d 100644
--- a/spec/frontend/sidebar/lock/edit_form_buttons_spec.js
+++ b/spec/frontend/sidebar/lock/edit_form_buttons_spec.js
@@ -1,5 +1,4 @@
-import { shallowMount } from '@vue/test-utils';
-import { GlLoadingIcon } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
import EditFormButtons from '~/sidebar/components/lock/edit_form_buttons.vue';
import eventHub from '~/sidebar/event_hub';
import { deprecatedCreateFlash as flash } from '~/flash';
@@ -22,7 +21,6 @@ describe('EditFormButtons', () => {
};
const findLockToggle = () => wrapper.find('[data-testid="lock-toggle"]');
- const findGlLoadingIcon = () => wrapper.find(GlLoadingIcon);
const createComponent = ({ props = {}, data = {}, resolved = true }) => {
store = issuableType === ISSUABLE_TYPE_ISSUE ? createStore() : createMrStore();
@@ -33,7 +31,7 @@ describe('EditFormButtons', () => {
jest.spyOn(store, 'dispatch').mockRejectedValue();
}
- wrapper = shallowMount(EditFormButtons, {
+ wrapper = mount(EditFormButtons, {
store,
provide: {
fullPath: '',
@@ -78,8 +76,8 @@ describe('EditFormButtons', () => {
expect(findLockToggle().attributes('disabled')).toBe('disabled');
});
- it('displays the GlLoadingIcon', () => {
- expect(findGlLoadingIcon().exists()).toBe(true);
+ it('sets loading on the toggle button', () => {
+ expect(findLockToggle().props('loading')).toBe(true);
});
});
@@ -121,7 +119,7 @@ describe('EditFormButtons', () => {
it('resets loading', async () => {
await wrapper.vm.$nextTick().then(() => {
- expect(findGlLoadingIcon().exists()).toBe(false);
+ expect(findLockToggle().props('loading')).toBe(false);
});
});
@@ -156,7 +154,7 @@ describe('EditFormButtons', () => {
it('resets loading', async () => {
await wrapper.vm.$nextTick().then(() => {
- expect(findGlLoadingIcon().exists()).toBe(false);
+ expect(findLockToggle().props('loading')).toBe(false);
});
});
diff --git a/spec/frontend/sidebar/reviewer_title_spec.js b/spec/frontend/sidebar/reviewer_title_spec.js
new file mode 100644
index 00000000000..eae266688d5
--- /dev/null
+++ b/spec/frontend/sidebar/reviewer_title_spec.js
@@ -0,0 +1,116 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLoadingIcon } from '@gitlab/ui';
+import { mockTracking, triggerEvent } from 'helpers/tracking_helper';
+import Component from '~/sidebar/components/reviewers/reviewer_title.vue';
+
+describe('ReviewerTitle component', () => {
+ let wrapper;
+
+ const createComponent = props => {
+ return shallowMount(Component, {
+ propsData: {
+ numberOfReviewers: 0,
+ editable: false,
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('reviewer title', () => {
+ it('renders reviewer', () => {
+ wrapper = createComponent({
+ numberOfReviewers: 1,
+ editable: false,
+ });
+
+ expect(wrapper.vm.$el.innerText.trim()).toEqual('Reviewer');
+ });
+
+ it('renders 2 reviewers', () => {
+ wrapper = createComponent({
+ numberOfReviewers: 2,
+ editable: false,
+ });
+
+ expect(wrapper.vm.$el.innerText.trim()).toEqual('2 Reviewers');
+ });
+ });
+
+ describe('gutter toggle', () => {
+ it('does not show toggle by default', () => {
+ wrapper = createComponent({
+ numberOfReviewers: 2,
+ editable: false,
+ });
+
+ expect(wrapper.vm.$el.querySelector('.gutter-toggle')).toBeNull();
+ });
+
+ it('shows toggle when showToggle is true', () => {
+ wrapper = createComponent({
+ numberOfReviewers: 2,
+ editable: false,
+ showToggle: true,
+ });
+
+ expect(wrapper.vm.$el.querySelector('.gutter-toggle')).toEqual(expect.any(Object));
+ });
+ });
+
+ it('does not render spinner by default', () => {
+ wrapper = createComponent({
+ numberOfReviewers: 0,
+ editable: false,
+ });
+
+ expect(wrapper.find(GlLoadingIcon).exists()).toBeFalsy();
+ });
+
+ it('renders spinner when loading', () => {
+ wrapper = createComponent({
+ loading: true,
+ numberOfReviewers: 0,
+ editable: false,
+ });
+
+ expect(wrapper.find(GlLoadingIcon).exists()).toBeTruthy();
+ });
+
+ it('does not render edit link when not editable', () => {
+ wrapper = createComponent({
+ numberOfReviewers: 0,
+ editable: false,
+ });
+
+ expect(wrapper.vm.$el.querySelector('.edit-link')).toBeNull();
+ });
+
+ it('renders edit link when editable', () => {
+ wrapper = createComponent({
+ numberOfReviewers: 0,
+ editable: true,
+ });
+
+ expect(wrapper.vm.$el.querySelector('.edit-link')).not.toBeNull();
+ });
+
+ it('tracks the event when edit is clicked', () => {
+ wrapper = createComponent({
+ numberOfReviewers: 0,
+ editable: true,
+ });
+
+ const spy = mockTracking('_category_', wrapper.element, jest.spyOn);
+ triggerEvent('.js-sidebar-dropdown-toggle');
+
+ expect(spy).toHaveBeenCalledWith('_category_', 'click_edit_button', {
+ label: 'right_sidebar',
+ property: 'reviewer',
+ });
+ });
+});
diff --git a/spec/frontend/sidebar/reviewers_spec.js b/spec/frontend/sidebar/reviewers_spec.js
new file mode 100644
index 00000000000..effcac266f0
--- /dev/null
+++ b/spec/frontend/sidebar/reviewers_spec.js
@@ -0,0 +1,169 @@
+import { mount } from '@vue/test-utils';
+import { trimText } from 'helpers/text_helper';
+import { GlIcon } from '@gitlab/ui';
+import Reviewer from '~/sidebar/components/reviewers/reviewers.vue';
+import UsersMock from './mock_data';
+import UsersMockHelper from '../helpers/user_mock_data_helper';
+
+describe('Reviewer component', () => {
+ const getDefaultProps = () => ({
+ rootPath: 'http://localhost:3000',
+ users: [],
+ editable: false,
+ });
+ let wrapper;
+
+ const createWrapper = (propsData = getDefaultProps()) => {
+ wrapper = mount(Reviewer, {
+ propsData,
+ });
+ };
+
+ const findCollapsedChildren = () => wrapper.findAll('.sidebar-collapsed-icon > *');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('No reviewers/users', () => {
+ it('displays no reviewer icon when collapsed', () => {
+ createWrapper();
+ const collapsedChildren = findCollapsedChildren();
+ const userIcon = collapsedChildren.at(0).find(GlIcon);
+
+ expect(collapsedChildren.length).toBe(1);
+ expect(collapsedChildren.at(0).attributes('aria-label')).toBe('None');
+ expect(userIcon.exists()).toBe(true);
+ expect(userIcon.props('name')).toBe('user');
+ });
+ });
+
+ describe('One reviewer/user', () => {
+ it('displays one reviewer icon when collapsed', () => {
+ createWrapper({
+ ...getDefaultProps(),
+ users: [UsersMock.user],
+ });
+
+ const collapsedChildren = findCollapsedChildren();
+ const reviewer = collapsedChildren.at(0);
+
+ expect(collapsedChildren.length).toBe(1);
+ expect(reviewer.find('.avatar').attributes('src')).toBe(UsersMock.user.avatar);
+ expect(reviewer.find('.avatar').attributes('alt')).toBe(`${UsersMock.user.name}'s avatar`);
+
+ expect(trimText(reviewer.find('.author').text())).toBe(UsersMock.user.name);
+ });
+ });
+
+ describe('Two or more reviewers/users', () => {
+ it('displays two reviewer icons when collapsed', () => {
+ const users = UsersMockHelper.createNumberRandomUsers(2);
+ createWrapper({
+ ...getDefaultProps(),
+ users,
+ });
+
+ const collapsedChildren = findCollapsedChildren();
+
+ expect(collapsedChildren.length).toBe(2);
+
+ const first = collapsedChildren.at(0);
+
+ expect(first.find('.avatar').attributes('src')).toBe(users[0].avatar_url);
+ expect(first.find('.avatar').attributes('alt')).toBe(`${users[0].name}'s avatar`);
+
+ expect(trimText(first.find('.author').text())).toBe(users[0].name);
+
+ const second = collapsedChildren.at(1);
+
+ expect(second.find('.avatar').attributes('src')).toBe(users[1].avatar_url);
+ expect(second.find('.avatar').attributes('alt')).toBe(`${users[1].name}'s avatar`);
+
+ expect(trimText(second.find('.author').text())).toBe(users[1].name);
+ });
+
+ it('displays one reviewer icon and counter when collapsed', () => {
+ const users = UsersMockHelper.createNumberRandomUsers(3);
+ createWrapper({
+ ...getDefaultProps(),
+ users,
+ });
+
+ const collapsedChildren = findCollapsedChildren();
+
+ expect(collapsedChildren.length).toBe(2);
+
+ const first = collapsedChildren.at(0);
+
+ expect(first.find('.avatar').attributes('src')).toBe(users[0].avatar_url);
+ expect(first.find('.avatar').attributes('alt')).toBe(`${users[0].name}'s avatar`);
+
+ expect(trimText(first.find('.author').text())).toBe(users[0].name);
+
+ const second = collapsedChildren.at(1);
+
+ expect(trimText(second.find('.avatar-counter').text())).toBe('+2');
+ });
+
+ it('Shows two reviewers', () => {
+ const users = UsersMockHelper.createNumberRandomUsers(2);
+ createWrapper({
+ ...getDefaultProps(),
+ users,
+ editable: true,
+ });
+
+ expect(wrapper.findAll('.user-item').length).toBe(users.length);
+ expect(wrapper.find('.user-list-more').exists()).toBe(false);
+ });
+
+ it('shows sorted reviewer where "can merge" users are sorted first', () => {
+ const users = UsersMockHelper.createNumberRandomUsers(3);
+ users[0].can_merge = false;
+ users[1].can_merge = false;
+ users[2].can_merge = true;
+
+ createWrapper({
+ ...getDefaultProps(),
+ users,
+ editable: true,
+ });
+
+ expect(wrapper.vm.sortedReviewers[0].can_merge).toBe(true);
+ });
+
+ it('passes the sorted reviewers to the uncollapsed-reviewer-list', () => {
+ const users = UsersMockHelper.createNumberRandomUsers(3);
+ users[0].can_merge = false;
+ users[1].can_merge = false;
+ users[2].can_merge = true;
+
+ createWrapper({
+ ...getDefaultProps(),
+ users,
+ });
+
+ const userItems = wrapper.findAll('.user-list .user-item a');
+
+ expect(userItems.length).toBe(3);
+ expect(userItems.at(0).attributes('title')).toBe(users[2].name);
+ });
+
+ it('passes the sorted reviewers to the collapsed-reviewer-list', () => {
+ const users = UsersMockHelper.createNumberRandomUsers(3);
+ users[0].can_merge = false;
+ users[1].can_merge = false;
+ users[2].can_merge = true;
+
+ createWrapper({
+ ...getDefaultProps(),
+ users,
+ });
+
+ const collapsedButton = wrapper.find('.sidebar-collapsed-user button');
+
+ expect(trimText(collapsedButton.text())).toBe(users[2].name);
+ });
+ });
+});
diff --git a/spec/frontend/sidebar/sidebar_labels_spec.js b/spec/frontend/sidebar/sidebar_labels_spec.js
index 29333a344e1..9d59dc750fb 100644
--- a/spec/frontend/sidebar/sidebar_labels_spec.js
+++ b/spec/frontend/sidebar/sidebar_labels_spec.js
@@ -114,7 +114,7 @@ describe('sidebar labels', () => {
const expected = {
[defaultProps.issuableType]: {
- label_ids: [27, 28, 40],
+ label_ids: [27, 28, 29, 40],
},
};
diff --git a/spec/frontend/snippet/snippet_edit_spec.js b/spec/frontend/snippet/snippet_edit_spec.js
index 7c12c0cac03..42a55ac0d3e 100644
--- a/spec/frontend/snippet/snippet_edit_spec.js
+++ b/spec/frontend/snippet/snippet_edit_spec.js
@@ -5,6 +5,7 @@ import initSnippet from '~/snippet/snippet_bundle';
jest.mock('~/snippet/snippet_bundle');
jest.mock('~/snippets');
+jest.mock('~/gl_form');
describe('Snippet edit form initialization', () => {
const setFF = flag => {
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
index 1cf1ee74ddf..e742a6b9eaf 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
@@ -3,6 +3,7 @@
exports[`Snippet Blob Edit component with loaded blob matches snapshot 1`] = `
<div
class="file-holder snippet"
+ data-qa-selector="file_holder_container"
>
<blob-header-edit-stub
candelete="true"
diff --git a/spec/frontend/snippets/components/edit_spec.js b/spec/frontend/snippets/components/edit_spec.js
index b6abb9f389a..c1fad8cebe6 100644
--- a/spec/frontend/snippets/components/edit_spec.js
+++ b/spec/frontend/snippets/components/edit_spec.js
@@ -148,17 +148,17 @@ describe('Snippet Edit app', () => {
// Ideally we wouldn't call this method directly, but we don't have a way to trigger
// apollo responses yet.
- const loadSnippet = (...edges) => {
- if (edges.length) {
+ const loadSnippet = (...nodes) => {
+ if (nodes.length) {
wrapper.setData({
- snippet: edges[0],
+ snippet: nodes[0],
});
}
wrapper.vm.onSnippetFetch({
data: {
snippets: {
- edges,
+ nodes,
},
},
});
diff --git a/spec/frontend/snippets/components/snippet_blob_view_spec.js b/spec/frontend/snippets/components/snippet_blob_view_spec.js
index 9c4b2734a3f..1ccecd7b5ba 100644
--- a/spec/frontend/snippets/components/snippet_blob_view_spec.js
+++ b/spec/frontend/snippets/components/snippet_blob_view_spec.js
@@ -140,10 +140,10 @@ describe('Blob Embeddable', () => {
async ({ snippetBlobs, currentBlob, expectedContent }) => {
const apolloData = {
snippets: {
- edges: [
+ nodes: [
{
- node: {
- blobs: snippetBlobs,
+ blobs: {
+ nodes: snippetBlobs,
},
},
],
diff --git a/spec/frontend/static_site_editor/mock_data.js b/spec/frontend/static_site_editor/mock_data.js
index d861f6c9cd7..0f2456cd9ea 100644
--- a/spec/frontend/static_site_editor/mock_data.js
+++ b/spec/frontend/static_site_editor/mock_data.js
@@ -23,7 +23,10 @@ export const username = 'gitlabuser';
export const projectId = '123456';
export const returnUrl = 'https://www.gitlab.com';
export const sourcePath = 'foobar.md.html';
-
+export const mergeRequestMeta = {
+ title: `Update ${sourcePath} file`,
+ description: 'Copy update',
+};
export const savedContentMeta = {
branch: {
label: 'foobar',
diff --git a/spec/frontend/static_site_editor/pages/home_spec.js b/spec/frontend/static_site_editor/pages/home_spec.js
index 41f8a1075c0..10d34d9651c 100644
--- a/spec/frontend/static_site_editor/pages/home_spec.js
+++ b/spec/frontend/static_site_editor/pages/home_spec.js
@@ -1,4 +1,3 @@
-import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import Home from '~/static_site_editor/pages/home.vue';
@@ -7,6 +6,7 @@ import EditArea from '~/static_site_editor/components/edit_area.vue';
import InvalidContentMessage from '~/static_site_editor/components/invalid_content_message.vue';
import SubmitChangesError from '~/static_site_editor/components/submit_changes_error.vue';
import submitContentChangesMutation from '~/static_site_editor/graphql/mutations/submit_content_changes.mutation.graphql';
+import hasSubmittedChangesMutation from '~/static_site_editor/graphql/mutations/has_submitted_changes.mutation.graphql';
import { SUCCESS_ROUTE } from '~/static_site_editor/router/constants';
import { TRACKING_ACTION_INITIALIZE_EDITOR } from '~/static_site_editor/constants';
@@ -17,6 +17,7 @@ import {
sourceContentTitle as title,
sourcePath,
username,
+ mergeRequestMeta,
savedContentMeta,
submitChangesError,
trackingCategory,
@@ -24,8 +25,6 @@ import {
const localVue = createLocalVue();
-localVue.use(Vuex);
-
describe('static_site_editor/pages/home', () => {
let wrapper;
let store;
@@ -33,6 +32,19 @@ describe('static_site_editor/pages/home', () => {
let $router;
let mutateMock;
let trackingSpy;
+ const defaultAppData = {
+ isSupportedContent: true,
+ hasSubmittedChanges: false,
+ returnUrl,
+ project,
+ username,
+ sourcePath,
+ };
+ const hasSubmittedChangesMutationPayload = {
+ data: {
+ appData: { ...defaultAppData, hasSubmittedChanges: true },
+ },
+ };
const buildApollo = (queries = {}) => {
mutateMock = jest.fn();
@@ -64,7 +76,7 @@ describe('static_site_editor/pages/home', () => {
},
data() {
return {
- appData: { isSupportedContent: true, returnUrl, project, username, sourcePath },
+ appData: { ...defaultAppData },
sourceContent: { title, content },
...data,
};
@@ -152,8 +164,14 @@ describe('static_site_editor/pages/home', () => {
});
describe('when submitting changes fails', () => {
+ const setupMutateMock = () => {
+ mutateMock
+ .mockResolvedValueOnce(hasSubmittedChangesMutationPayload)
+ .mockRejectedValueOnce(new Error(submitChangesError));
+ };
+
beforeEach(() => {
- mutateMock.mockRejectedValue(new Error(submitChangesError));
+ setupMutateMock();
buildWrapper();
findEditArea().vm.$emit('submit', { content });
@@ -166,6 +184,8 @@ describe('static_site_editor/pages/home', () => {
});
it('retries submitting changes when retry button is clicked', () => {
+ setupMutateMock();
+
findSubmitChangesError().vm.$emit('retry');
expect(mutateMock).toHaveBeenCalled();
@@ -190,7 +210,11 @@ describe('static_site_editor/pages/home', () => {
const newContent = `new ${content}`;
beforeEach(() => {
- mutateMock.mockResolvedValueOnce({ data: { submitContentChanges: savedContentMeta } });
+ mutateMock.mockResolvedValueOnce(hasSubmittedChangesMutationPayload).mockResolvedValueOnce({
+ data: {
+ submitContentChanges: savedContentMeta,
+ },
+ });
buildWrapper();
findEditArea().vm.$emit('submit', { content: newContent });
@@ -198,8 +222,19 @@ describe('static_site_editor/pages/home', () => {
return wrapper.vm.$nextTick();
});
+ it('dispatches hasSubmittedChanges mutation', () => {
+ expect(mutateMock).toHaveBeenNthCalledWith(1, {
+ mutation: hasSubmittedChangesMutation,
+ variables: {
+ input: {
+ hasSubmittedChanges: true,
+ },
+ },
+ });
+ });
+
it('dispatches submitContentChanges mutation', () => {
- expect(mutateMock).toHaveBeenCalledWith({
+ expect(mutateMock).toHaveBeenNthCalledWith(2, {
mutation: submitContentChangesMutation,
variables: {
input: {
@@ -207,6 +242,8 @@ describe('static_site_editor/pages/home', () => {
project,
sourcePath,
username,
+ images: undefined,
+ mergeRequestMeta,
},
},
});
diff --git a/spec/frontend/static_site_editor/pages/success_spec.js b/spec/frontend/static_site_editor/pages/success_spec.js
index 3e19e2413e7..3fc69dc4586 100644
--- a/spec/frontend/static_site_editor/pages/success_spec.js
+++ b/spec/frontend/static_site_editor/pages/success_spec.js
@@ -1,10 +1,10 @@
import { shallowMount } from '@vue/test-utils';
-import { GlEmptyState, GlButton } from '@gitlab/ui';
+import { GlButton, GlEmptyState, GlLoadingIcon } from '@gitlab/ui';
import Success from '~/static_site_editor/pages/success.vue';
import { savedContentMeta, returnUrl, sourcePath } from '../mock_data';
import { HOME_ROUTE } from '~/static_site_editor/router/constants';
-describe('static_site_editor/pages/success', () => {
+describe('~/static_site_editor/pages/success.vue', () => {
const mergeRequestsIllustrationPath = 'illustrations/merge_requests.svg';
let wrapper;
let router;
@@ -15,14 +15,15 @@ describe('static_site_editor/pages/success', () => {
};
};
- const buildWrapper = (data = {}) => {
+ const buildWrapper = (data = {}, appData = {}) => {
wrapper = shallowMount(Success, {
mocks: {
$router: router,
},
stubs: {
- GlEmptyState,
GlButton,
+ GlEmptyState,
+ GlLoadingIcon,
},
propsData: {
mergeRequestsIllustrationPath,
@@ -33,6 +34,8 @@ describe('static_site_editor/pages/success', () => {
appData: {
returnUrl,
sourcePath,
+ hasSubmittedChanges: true,
+ ...appData,
},
...data,
};
@@ -40,8 +43,9 @@ describe('static_site_editor/pages/success', () => {
});
};
- const findEmptyState = () => wrapper.find(GlEmptyState);
const findReturnUrlButton = () => wrapper.find(GlButton);
+ const findEmptyState = () => wrapper.find(GlEmptyState);
+ const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
beforeEach(() => {
buildRouter();
@@ -52,50 +56,76 @@ describe('static_site_editor/pages/success', () => {
wrapper = null;
});
- it('renders empty state with a link to the created merge request', () => {
- buildWrapper();
+ describe('when savedContentMeta is valid', () => {
+ it('renders empty state with a link to the created merge request', () => {
+ buildWrapper();
+
+ expect(findEmptyState().exists()).toBe(true);
+ expect(findEmptyState().props()).toMatchObject({
+ primaryButtonText: 'View merge request',
+ primaryButtonLink: savedContentMeta.mergeRequest.url,
+ title: 'Your merge request has been created',
+ svgPath: mergeRequestsIllustrationPath,
+ svgHeight: 146,
+ });
+ });
- expect(findEmptyState().exists()).toBe(true);
- expect(findEmptyState().props()).toMatchObject({
- primaryButtonText: 'View merge request',
- primaryButtonLink: savedContentMeta.mergeRequest.url,
- title: 'Your merge request has been created',
- svgPath: mergeRequestsIllustrationPath,
+ it('displays merge request instructions in the empty state', () => {
+ buildWrapper();
+
+ expect(findEmptyState().text()).toContain(
+ 'To see your changes live you will need to do the following things:',
+ );
+ expect(findEmptyState().text()).toContain('1. Add a clear title to describe the change.');
+ expect(findEmptyState().text()).toContain(
+ '2. Add a description to explain why the change is being made.',
+ );
+ expect(findEmptyState().text()).toContain(
+ '3. Assign a person to review and accept the merge request.',
+ );
});
- });
- it('displays merge request instructions in the empty state', () => {
- buildWrapper();
-
- expect(findEmptyState().text()).toContain(
- 'To see your changes live you will need to do the following things:',
- );
- expect(findEmptyState().text()).toContain('1. Add a clear title to describe the change.');
- expect(findEmptyState().text()).toContain(
- '2. Add a description to explain why the change is being made.',
- );
- expect(findEmptyState().text()).toContain(
- '3. Assign a person to review and accept the merge request.',
- );
- });
+ it('displays return to site button', () => {
+ buildWrapper();
+
+ expect(findReturnUrlButton().text()).toBe('Return to site');
+ expect(findReturnUrlButton().attributes().href).toBe(returnUrl);
+ });
- it('displays return to site button', () => {
- buildWrapper();
+ it('displays source path', () => {
+ buildWrapper();
- expect(findReturnUrlButton().text()).toBe('Return to site');
- expect(findReturnUrlButton().attributes().href).toBe(returnUrl);
+ expect(wrapper.text()).toContain(`Update ${sourcePath} file`);
+ });
});
- it('displays source path', () => {
- buildWrapper();
+ describe('when savedContentMeta is invalid', () => {
+ it('renders empty state with a loader', () => {
+ buildWrapper({ savedContentMeta: null });
- expect(wrapper.text()).toContain(`Update ${sourcePath} file`);
- });
+ expect(findEmptyState().exists()).toBe(true);
+ expect(findEmptyState().props()).toMatchObject({
+ title: 'Creating your merge request',
+ svgPath: mergeRequestsIllustrationPath,
+ });
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
- it('redirects to the HOME route when content has not been submitted', () => {
- buildWrapper({ savedContentMeta: null });
+ it('displays helper info in the empty state', () => {
+ buildWrapper({ savedContentMeta: null });
- expect(router.push).toHaveBeenCalledWith(HOME_ROUTE);
- expect(wrapper.html()).toBe('');
+ expect(findEmptyState().text()).toContain(
+ 'You can set an assignee to get your changes reviewed and deployed once your merge request is created',
+ );
+ expect(findEmptyState().text()).toContain(
+ 'A link to view the merge request will appear once ready',
+ );
+ });
+
+ it('redirects to the HOME route when content has not been submitted', () => {
+ buildWrapper({ savedContentMeta: null }, { hasSubmittedChanges: false });
+
+ expect(router.push).toHaveBeenCalledWith(HOME_ROUTE);
+ });
});
});
diff --git a/spec/frontend/static_site_editor/services/front_matterify_spec.js b/spec/frontend/static_site_editor/services/front_matterify_spec.js
new file mode 100644
index 00000000000..dbaedc30849
--- /dev/null
+++ b/spec/frontend/static_site_editor/services/front_matterify_spec.js
@@ -0,0 +1,47 @@
+import {
+ sourceContentYAML as content,
+ sourceContentHeaderObjYAML as yamlFrontMatterObj,
+ sourceContentSpacing as spacing,
+ sourceContentBody as body,
+} from '../mock_data';
+
+import { frontMatterify, stringify } from '~/static_site_editor/services/front_matterify';
+
+describe('static_site_editor/services/front_matterify', () => {
+ const frontMatterifiedContent = {
+ source: content,
+ matter: yamlFrontMatterObj,
+ spacing,
+ content: body,
+ delimiter: '---',
+ type: 'yaml',
+ };
+ const frontMatterifiedBody = {
+ source: body,
+ matter: null,
+ spacing: null,
+ content: body,
+ delimiter: null,
+ type: null,
+ };
+
+ describe('frontMatterify', () => {
+ it.each`
+ frontMatterified | target
+ ${frontMatterify(content)} | ${frontMatterifiedContent}
+ ${frontMatterify(body)} | ${frontMatterifiedBody}
+ `('returns $target from $frontMatterified', ({ frontMatterified, target }) => {
+ expect(frontMatterified).toEqual(target);
+ });
+ });
+
+ describe('stringify', () => {
+ it.each`
+ stringified | target
+ ${stringify(frontMatterifiedContent)} | ${content}
+ ${stringify(frontMatterifiedBody)} | ${body}
+ `('returns $target from $stringified', ({ stringified, target }) => {
+ expect(stringified).toBe(target);
+ });
+ });
+});
diff --git a/spec/frontend/static_site_editor/services/submit_content_changes_spec.js b/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
index d464e6b1895..5018da7300b 100644
--- a/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
+++ b/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
@@ -19,6 +19,7 @@ import {
commitBranchResponse,
commitMultipleResponse,
createMergeRequestResponse,
+ mergeRequestMeta,
sourcePath,
sourceContentYAML as content,
trackingCategory,
@@ -28,11 +29,20 @@ import {
jest.mock('~/static_site_editor/services/generate_branch_name');
describe('submitContentChanges', () => {
- const mergeRequestTitle = `Update ${sourcePath} file`;
const branch = 'branch-name';
let trackingSpy;
let origPage;
+ const buildPayload = (overrides = {}) => ({
+ username,
+ projectId,
+ sourcePath,
+ content,
+ images,
+ mergeRequestMeta,
+ ...overrides,
+ });
+
beforeEach(() => {
jest.spyOn(Api, 'createBranch').mockResolvedValue({ data: commitBranchResponse });
jest.spyOn(Api, 'commitMultiple').mockResolvedValue({ data: commitMultipleResponse });
@@ -53,7 +63,7 @@ describe('submitContentChanges', () => {
});
it('creates a branch named after the username and target branch', () => {
- return submitContentChanges({ username, projectId }).then(() => {
+ return submitContentChanges(buildPayload()).then(() => {
expect(Api.createBranch).toHaveBeenCalledWith(projectId, {
ref: DEFAULT_TARGET_BRANCH,
branch,
@@ -64,16 +74,16 @@ describe('submitContentChanges', () => {
it('notifies error when branch could not be created', () => {
Api.createBranch.mockRejectedValueOnce();
- return expect(submitContentChanges({ username, projectId })).rejects.toThrow(
+ return expect(submitContentChanges(buildPayload())).rejects.toThrow(
SUBMIT_CHANGES_BRANCH_ERROR,
);
});
it('commits the content changes to the branch when creating branch succeeds', () => {
- return submitContentChanges({ username, projectId, sourcePath, content, images }).then(() => {
+ return submitContentChanges(buildPayload()).then(() => {
expect(Api.commitMultiple).toHaveBeenCalledWith(projectId, {
branch,
- commit_message: mergeRequestTitle,
+ commit_message: mergeRequestMeta.title,
actions: [
{
action: 'update',
@@ -93,16 +103,11 @@ describe('submitContentChanges', () => {
it('does not commit an image if it has been removed from the content', () => {
const contentWithoutImages = '## Content without images';
- return submitContentChanges({
- username,
- projectId,
- sourcePath,
- content: contentWithoutImages,
- images,
- }).then(() => {
+ const payload = buildPayload({ content: contentWithoutImages });
+ return submitContentChanges(payload).then(() => {
expect(Api.commitMultiple).toHaveBeenCalledWith(projectId, {
branch,
- commit_message: mergeRequestTitle,
+ commit_message: mergeRequestMeta.title,
actions: [
{
action: 'update',
@@ -117,17 +122,19 @@ describe('submitContentChanges', () => {
it('notifies error when content could not be committed', () => {
Api.commitMultiple.mockRejectedValueOnce();
- return expect(submitContentChanges({ username, projectId, images })).rejects.toThrow(
+ return expect(submitContentChanges(buildPayload())).rejects.toThrow(
SUBMIT_CHANGES_COMMIT_ERROR,
);
});
- it('creates a merge request when commiting changes succeeds', () => {
- return submitContentChanges({ username, projectId, sourcePath, content, images }).then(() => {
+ it('creates a merge request when committing changes succeeds', () => {
+ return submitContentChanges(buildPayload()).then(() => {
+ const { title, description } = mergeRequestMeta;
expect(Api.createProjectMergeRequest).toHaveBeenCalledWith(
projectId,
convertObjectPropsToSnakeCase({
- title: mergeRequestTitle,
+ title,
+ description,
targetBranch: DEFAULT_TARGET_BRANCH,
sourceBranch: branch,
}),
@@ -138,7 +145,7 @@ describe('submitContentChanges', () => {
it('notifies error when merge request could not be created', () => {
Api.createProjectMergeRequest.mockRejectedValueOnce();
- return expect(submitContentChanges({ username, projectId, images })).rejects.toThrow(
+ return expect(submitContentChanges(buildPayload())).rejects.toThrow(
SUBMIT_CHANGES_MERGE_REQUEST_ERROR,
);
});
@@ -147,11 +154,9 @@ describe('submitContentChanges', () => {
let result;
beforeEach(() => {
- return submitContentChanges({ username, projectId, sourcePath, content, images }).then(
- _result => {
- result = _result;
- },
- );
+ return submitContentChanges(buildPayload()).then(_result => {
+ result = _result;
+ });
});
it('returns the branch name', () => {
@@ -179,7 +184,7 @@ describe('submitContentChanges', () => {
describe('sends the correct tracking event', () => {
beforeEach(() => {
- return submitContentChanges({ username, projectId, sourcePath, content, images });
+ return submitContentChanges(buildPayload());
});
it('for committing changes', () => {
diff --git a/spec/frontend/static_site_editor/services/templater_spec.js b/spec/frontend/static_site_editor/services/templater_spec.js
index 1e7ae872b7e..cb3a0a0c106 100644
--- a/spec/frontend/static_site_editor/services/templater_spec.js
+++ b/spec/frontend/static_site_editor/services/templater_spec.js
@@ -39,6 +39,10 @@ Below this line is a codeblock of the same HTML that should be ignored and prese
<p>Some paragraph...</p>
</div>
\`\`\`
+
+Below this line is a iframe that should be ignored and preserved
+
+<iframe></iframe>
`;
const sourceTemplated = `Below this line is a simple ERB (single-line erb block) example.
@@ -87,6 +91,10 @@ Below this line is a codeblock of the same HTML that should be ignored and prese
<p>Some paragraph...</p>
</div>
\`\`\`
+
+Below this line is a iframe that should be ignored and preserved
+
+<iframe></iframe>
`;
it.each`
diff --git a/spec/frontend/test_setup.js b/spec/frontend/test_setup.js
index 544c19da57b..eebec7de9d4 100644
--- a/spec/frontend/test_setup.js
+++ b/spec/frontend/test_setup.js
@@ -1,4 +1,6 @@
import Vue from 'vue';
+import 'jquery';
+
import * as jqueryMatchers from 'custom-jquery-matchers';
import { config as testUtilsConfig } from '@vue/test-utils';
import Translate from '~/vue_shared/translate';
@@ -9,7 +11,6 @@ import customMatchers from './matchers';
import './helpers/dom_shims';
import './helpers/jquery';
-import '~/commons/jquery';
import '~/commons/bootstrap';
process.on('unhandledRejection', global.promiseRejectionHandler);
diff --git a/spec/frontend/user_lists/components/add_user_modal_spec.js b/spec/frontend/user_lists/components/add_user_modal_spec.js
new file mode 100644
index 00000000000..82ce195d7cd
--- /dev/null
+++ b/spec/frontend/user_lists/components/add_user_modal_spec.js
@@ -0,0 +1,50 @@
+import { mount } from '@vue/test-utils';
+import AddUserModal from '~/user_lists/components/add_user_modal.vue';
+
+describe('Add User Modal', () => {
+ let wrapper;
+
+ const click = testId => wrapper.find(`[data-testid="${testId}"]`).trigger('click');
+
+ beforeEach(() => {
+ wrapper = mount(AddUserModal, {
+ propsData: { visible: true },
+ });
+ });
+
+ it('should explain the format of user IDs to enter', () => {
+ expect(wrapper.find('[data-testid="add-userids-description"]').text()).toContain(
+ 'Enter a comma separated list of user IDs',
+ );
+ });
+
+ describe('events', () => {
+ beforeEach(() => {
+ wrapper.find('#add-user-ids').setValue('1, 2, 3, 4');
+ });
+
+ it('should emit the users entered when Add Users is clicked', () => {
+ click('confirm-add-user-ids');
+ expect(wrapper.emitted('addUsers')).toContainEqual(['1, 2, 3, 4']);
+ });
+
+ it('should clear the input after emitting', async () => {
+ click('confirm-add-user-ids');
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find('#add-user-ids').element.value).toBe('');
+ });
+
+ it('should not emit the users entered if cancel is clicked', () => {
+ click('cancel-add-user-ids');
+ expect(wrapper.emitted('addUsers')).toBeUndefined();
+ });
+
+ it('should clear the input after cancelling', async () => {
+ click('cancel-add-user-ids');
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find('#add-user-ids').element.value).toBe('');
+ });
+ });
+});
diff --git a/spec/frontend/user_lists/components/edit_user_list_spec.js b/spec/frontend/user_lists/components/edit_user_list_spec.js
new file mode 100644
index 00000000000..51a38e12916
--- /dev/null
+++ b/spec/frontend/user_lists/components/edit_user_list_spec.js
@@ -0,0 +1,150 @@
+import Vue from 'vue';
+import Vuex from 'vuex';
+import { createLocalVue, mount } from '@vue/test-utils';
+import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
+import waitForPromises from 'helpers/wait_for_promises';
+import Api from '~/api';
+import createStore from '~/user_lists/store/edit';
+import EditUserList from '~/user_lists/components/edit_user_list.vue';
+import UserListForm from '~/user_lists/components/user_list_form.vue';
+import { userList } from '../../feature_flags/mock_data';
+import { redirectTo } from '~/lib/utils/url_utility';
+
+jest.mock('~/api');
+jest.mock('~/lib/utils/url_utility');
+
+const localVue = createLocalVue(Vue);
+localVue.use(Vuex);
+
+describe('user_lists/components/edit_user_list', () => {
+ let wrapper;
+
+ const setInputValue = value => wrapper.find('[data-testid="user-list-name"]').setValue(value);
+
+ const click = button => wrapper.find(`[data-testid="${button}"]`).trigger('click');
+ const clickSave = () => click('save-user-list');
+
+ const destroy = () => wrapper?.destroy();
+
+ const factory = () => {
+ destroy();
+
+ wrapper = mount(EditUserList, {
+ localVue,
+ store: createStore({ projectId: '1', userListIid: '2' }),
+ provide: {
+ userListsDocsPath: '/docs/user_lists',
+ },
+ });
+ };
+
+ afterEach(() => {
+ destroy();
+ });
+
+ describe('loading', () => {
+ beforeEach(() => {
+ Api.fetchFeatureFlagUserList.mockReturnValue(new Promise(() => {}));
+ factory();
+ });
+
+ it('should show a loading icon', () => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ });
+ });
+
+ describe('loading error', () => {
+ const message = 'error creating list';
+ let alert;
+
+ beforeEach(async () => {
+ Api.fetchFeatureFlagUserList.mockRejectedValue({ message });
+ factory();
+ await waitForPromises();
+
+ alert = wrapper.find(GlAlert);
+ });
+
+ it('should show a flash with the error respopnse', () => {
+ expect(alert.text()).toContain(message);
+ });
+
+ it('should not be dismissible', async () => {
+ expect(alert.props('dismissible')).toBe(false);
+ });
+
+ it('should not show a user list form', () => {
+ expect(wrapper.find(UserListForm).exists()).toBe(false);
+ });
+ });
+
+ describe('update', () => {
+ beforeEach(() => {
+ Api.fetchFeatureFlagUserList.mockResolvedValue({ data: userList });
+ factory();
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('should link to the documentation', () => {
+ const link = wrapper.find('[data-testid="user-list-docs-link"]');
+ expect(link.attributes('href')).toBe('/docs/user_lists');
+ });
+
+ it('should link the cancel button to the user list details path', () => {
+ const link = wrapper.find('[data-testid="user-list-cancel"]');
+ expect(link.attributes('href')).toBe(userList.path);
+ });
+
+ it('should show the user list name in the title', () => {
+ expect(wrapper.find('[data-testid="user-list-title"]').text()).toBe(`Edit ${userList.name}`);
+ });
+
+ describe('success', () => {
+ beforeEach(() => {
+ Api.updateFeatureFlagUserList.mockResolvedValue({ data: userList });
+ setInputValue('test');
+ clickSave();
+ return wrapper.vm.$nextTick();
+ });
+
+ it('should create a user list with the entered name', () => {
+ expect(Api.updateFeatureFlagUserList).toHaveBeenCalledWith('1', {
+ name: 'test',
+ iid: userList.iid,
+ });
+ });
+
+ it('should redirect to the feature flag details page', () => {
+ expect(redirectTo).toHaveBeenCalledWith(userList.path);
+ });
+ });
+
+ describe('error', () => {
+ let alert;
+ let message;
+
+ beforeEach(async () => {
+ message = 'error creating list';
+ Api.updateFeatureFlagUserList.mockRejectedValue({ message });
+ setInputValue('test');
+ clickSave();
+ await waitForPromises();
+
+ alert = wrapper.find(GlAlert);
+ });
+
+ it('should show a flash with the error respopnse', () => {
+ expect(alert.text()).toContain(message);
+ });
+
+ it('should dismiss the error if dismiss is clicked', async () => {
+ alert.find('button').trigger('click');
+
+ await wrapper.vm.$nextTick();
+
+ expect(alert.exists()).toBe(false);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/user_lists/components/new_user_list_spec.js b/spec/frontend/user_lists/components/new_user_list_spec.js
new file mode 100644
index 00000000000..62fb0ca0859
--- /dev/null
+++ b/spec/frontend/user_lists/components/new_user_list_spec.js
@@ -0,0 +1,93 @@
+import { mount, createLocalVue } from '@vue/test-utils';
+import Vue from 'vue';
+import Vuex from 'vuex';
+import { GlAlert } from '@gitlab/ui';
+import waitForPromises from 'helpers/wait_for_promises';
+import Api from '~/api';
+import createStore from '~/user_lists/store/new';
+import NewUserList from '~/user_lists/components/new_user_list.vue';
+import { redirectTo } from '~/lib/utils/url_utility';
+import { userList } from '../../feature_flags/mock_data';
+
+jest.mock('~/api');
+jest.mock('~/lib/utils/url_utility');
+
+const localVue = createLocalVue(Vue);
+localVue.use(Vuex);
+
+describe('user_lists/components/new_user_list', () => {
+ let wrapper;
+
+ const setInputValue = value => wrapper.find('[data-testid="user-list-name"]').setValue(value);
+
+ const click = button => wrapper.find(`[data-testid="${button}"]`).trigger('click');
+
+ beforeEach(() => {
+ wrapper = mount(NewUserList, {
+ localVue,
+ store: createStore({ projectId: '1' }),
+ provide: {
+ featureFlagsPath: '/feature_flags',
+ userListsDocsPath: '/docs/user_lists',
+ },
+ });
+ });
+
+ it('should link to the documentation', () => {
+ const link = wrapper.find('[data-testid="user-list-docs-link"]');
+ expect(link.attributes('href')).toBe('/docs/user_lists');
+ });
+
+ it('should link the cancel buton back to feature flags', () => {
+ const cancel = wrapper.find('[data-testid="user-list-cancel"');
+ expect(cancel.attributes('href')).toBe('/feature_flags');
+ });
+
+ describe('create', () => {
+ describe('success', () => {
+ beforeEach(() => {
+ Api.createFeatureFlagUserList.mockResolvedValue({ data: userList });
+ setInputValue('test');
+ click('save-user-list');
+ return wrapper.vm.$nextTick();
+ });
+
+ it('should create a user list with the entered name', () => {
+ expect(Api.createFeatureFlagUserList).toHaveBeenCalledWith('1', {
+ name: 'test',
+ user_xids: '',
+ });
+ });
+
+ it('should redirect to the feature flag details page', () => {
+ expect(redirectTo).toHaveBeenCalledWith(userList.path);
+ });
+ });
+
+ describe('error', () => {
+ let alert;
+
+ beforeEach(async () => {
+ Api.createFeatureFlagUserList.mockRejectedValue({ message: 'error creating list' });
+ setInputValue('test');
+ click('save-user-list');
+
+ await waitForPromises();
+
+ alert = wrapper.find(GlAlert);
+ });
+
+ it('should show a flash with the error respopnse', () => {
+ expect(alert.text()).toContain('error creating list');
+ });
+
+ it('should dismiss the error when the dismiss button is clicked', async () => {
+ alert.find('button').trigger('click');
+
+ await wrapper.vm.$nextTick();
+
+ expect(alert.exists()).toBe(false);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/user_lists/components/user_list_form_spec.js b/spec/frontend/user_lists/components/user_list_form_spec.js
new file mode 100644
index 00000000000..42f7659600e
--- /dev/null
+++ b/spec/frontend/user_lists/components/user_list_form_spec.js
@@ -0,0 +1,40 @@
+import { mount } from '@vue/test-utils';
+import Form from '~/user_lists/components/user_list_form.vue';
+import { userList } from '../../feature_flags/mock_data';
+
+describe('user_lists/components/user_list_form', () => {
+ let wrapper;
+ let input;
+
+ beforeEach(() => {
+ wrapper = mount(Form, {
+ propsData: {
+ cancelPath: '/cancel',
+ saveButtonLabel: 'Save',
+ userListsDocsPath: '/docs',
+ userList,
+ },
+ });
+
+ input = wrapper.find('[data-testid="user-list-name"]');
+ });
+
+ it('should set the name to the name of the given user list', () => {
+ expect(input.element.value).toBe(userList.name);
+ });
+
+ it('should link to the user lists docs', () => {
+ expect(wrapper.find('[data-testid="user-list-docs-link"]').attributes('href')).toBe('/docs');
+ });
+
+ it('should emit an updated user list when save is clicked', () => {
+ input.setValue('test');
+ wrapper.find('[data-testid="save-user-list"]').trigger('click');
+
+ expect(wrapper.emitted('submit')).toEqual([[{ ...userList, name: 'test' }]]);
+ });
+
+ it('should set the cancel button to the passed url', () => {
+ expect(wrapper.find('[data-testid="user-list-cancel"]').attributes('href')).toBe('/cancel');
+ });
+});
diff --git a/spec/frontend/user_lists/components/user_list_spec.js b/spec/frontend/user_lists/components/user_list_spec.js
new file mode 100644
index 00000000000..5f9b7967846
--- /dev/null
+++ b/spec/frontend/user_lists/components/user_list_spec.js
@@ -0,0 +1,196 @@
+import Vue from 'vue';
+import Vuex from 'vuex';
+import { mount } from '@vue/test-utils';
+import { uniq } from 'lodash';
+import { GlAlert, GlEmptyState, GlLoadingIcon } from '@gitlab/ui';
+import Api from '~/api';
+import { parseUserIds, stringifyUserIds } from '~/user_lists/store/utils';
+import createStore from '~/user_lists/store/show';
+import UserList from '~/user_lists/components/user_list.vue';
+import { userList } from '../../feature_flags/mock_data';
+
+jest.mock('~/api');
+
+Vue.use(Vuex);
+
+describe('User List', () => {
+ let wrapper;
+
+ const click = testId => wrapper.find(`[data-testid="${testId}"]`).trigger('click');
+
+ const findUserIds = () => wrapper.findAll('[data-testid="user-id"]');
+
+ const destroy = () => wrapper?.destroy();
+
+ const factory = () => {
+ destroy();
+
+ wrapper = mount(UserList, {
+ store: createStore({ projectId: '1', userListIid: '2' }),
+ propsData: {
+ emptyStatePath: '/empty_state.svg',
+ },
+ });
+ };
+
+ describe('loading', () => {
+ let resolveFn;
+
+ beforeEach(() => {
+ Api.fetchFeatureFlagUserList.mockReturnValue(
+ new Promise(resolve => {
+ resolveFn = resolve;
+ }),
+ );
+ factory();
+ });
+
+ afterEach(() => {
+ resolveFn();
+ });
+
+ it('shows a loading icon', () => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ });
+ });
+
+ describe('success', () => {
+ let userIds;
+
+ beforeEach(() => {
+ userIds = parseUserIds(userList.user_xids);
+ Api.fetchFeatureFlagUserList.mockResolvedValueOnce({ data: userList });
+ factory();
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('requests the user list on mount', () => {
+ expect(Api.fetchFeatureFlagUserList).toHaveBeenCalledWith('1', '2');
+ });
+
+ it('shows the list name', () => {
+ expect(wrapper.find('h3').text()).toBe(userList.name);
+ });
+
+ it('shows an add users button', () => {
+ expect(wrapper.find('[data-testid="add-users"]').text()).toBe('Add Users');
+ });
+
+ it('shows an edit list button', () => {
+ expect(wrapper.find('[data-testid="edit-user-list"]').text()).toBe('Edit');
+ });
+
+ it('shows a row for every id', () => {
+ expect(wrapper.findAll('[data-testid="user-id-row"]')).toHaveLength(userIds.length);
+ });
+
+ it('shows one id on each row', () => {
+ findUserIds().wrappers.forEach((w, i) => expect(w.text()).toBe(userIds[i]));
+ });
+
+ it('shows a delete button for every row', () => {
+ expect(wrapper.findAll('[data-testid="delete-user-id"]')).toHaveLength(userIds.length);
+ });
+
+ describe('adding users', () => {
+ const newIds = ['user3', 'user4', 'user5', 'test', 'example', 'foo'];
+ let receivedUserIds;
+ let parsedReceivedUserIds;
+
+ beforeEach(async () => {
+ Api.updateFeatureFlagUserList.mockResolvedValue(userList);
+ click('add-users');
+ await wrapper.vm.$nextTick();
+ wrapper.find('#add-user-ids').setValue(`${stringifyUserIds(newIds)},`);
+ click('confirm-add-user-ids');
+ await wrapper.vm.$nextTick();
+ [[, { user_xids: receivedUserIds }]] = Api.updateFeatureFlagUserList.mock.calls;
+ parsedReceivedUserIds = parseUserIds(receivedUserIds);
+ });
+
+ it('should add user IDs to the user list', () => {
+ newIds.forEach(id => expect(receivedUserIds).toContain(id));
+ });
+
+ it('should not remove existing user ids', () => {
+ userIds.forEach(id => expect(receivedUserIds).toContain(id));
+ });
+
+ it('should not submit empty IDs', () => {
+ parsedReceivedUserIds.forEach(id => expect(id).not.toBe(''));
+ });
+
+ it('should not create duplicate entries', () => {
+ expect(uniq(parsedReceivedUserIds)).toEqual(parsedReceivedUserIds);
+ });
+
+ it('should display the new IDs', () => {
+ const userIdWrappers = findUserIds();
+ newIds.forEach(id => {
+ const userIdWrapper = userIdWrappers.wrappers.find(w => w.text() === id);
+ expect(userIdWrapper.exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('deleting users', () => {
+ let receivedUserIds;
+
+ beforeEach(async () => {
+ Api.updateFeatureFlagUserList.mockResolvedValue(userList);
+ click('delete-user-id');
+ await wrapper.vm.$nextTick();
+ [[, { user_xids: receivedUserIds }]] = Api.updateFeatureFlagUserList.mock.calls;
+ });
+
+ it('should remove the ID clicked', () => {
+ expect(receivedUserIds).not.toContain(userIds[0]);
+ });
+
+ it('should not display the deleted user', () => {
+ const userIdWrappers = findUserIds();
+ const userIdWrapper = userIdWrappers.wrappers.find(w => w.text() === userIds[0]);
+ expect(userIdWrapper).toBeUndefined();
+ });
+ });
+ });
+
+ describe('error', () => {
+ const findAlert = () => wrapper.find(GlAlert);
+
+ beforeEach(() => {
+ Api.fetchFeatureFlagUserList.mockRejectedValue();
+ factory();
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('displays the alert message', () => {
+ const alert = findAlert();
+ expect(alert.text()).toBe('Something went wrong on our end. Please try again!');
+ });
+
+ it('can dismiss the alert', async () => {
+ const alert = findAlert();
+ alert.find('button').trigger('click');
+
+ await wrapper.vm.$nextTick();
+
+ expect(alert.exists()).toBe(false);
+ });
+ });
+
+ describe('empty list', () => {
+ beforeEach(() => {
+ Api.fetchFeatureFlagUserList.mockResolvedValueOnce({ data: { ...userList, user_xids: '' } });
+ factory();
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('displays an empty state', () => {
+ expect(wrapper.find(GlEmptyState).exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/user_lists/store/edit/actions_spec.js b/spec/frontend/user_lists/store/edit/actions_spec.js
new file mode 100644
index 00000000000..7f0fb8e5401
--- /dev/null
+++ b/spec/frontend/user_lists/store/edit/actions_spec.js
@@ -0,0 +1,121 @@
+import testAction from 'helpers/vuex_action_helper';
+import Api from '~/api';
+import createState from '~/user_lists/store/edit/state';
+import * as types from '~/user_lists/store/edit/mutation_types';
+import * as actions from '~/user_lists/store/edit/actions';
+import { redirectTo } from '~/lib/utils/url_utility';
+import { userList } from '../../../feature_flags/mock_data';
+
+jest.mock('~/api');
+jest.mock('~/lib/utils/url_utility');
+
+describe('User Lists Edit Actions', () => {
+ let state;
+
+ beforeEach(() => {
+ state = createState({ projectId: '1', userListIid: '2' });
+ });
+
+ describe('fetchUserList', () => {
+ describe('success', () => {
+ beforeEach(() => {
+ Api.fetchFeatureFlagUserList.mockResolvedValue({ data: userList });
+ });
+
+ it('should commit RECEIVE_USER_LIST_SUCCESS', () => {
+ return testAction(
+ actions.fetchUserList,
+ undefined,
+ state,
+ [
+ { type: types.REQUEST_USER_LIST },
+ { type: types.RECEIVE_USER_LIST_SUCCESS, payload: userList },
+ ],
+ [],
+ () => expect(Api.fetchFeatureFlagUserList).toHaveBeenCalledWith('1', '2'),
+ );
+ });
+ });
+
+ describe('error', () => {
+ let error;
+ beforeEach(() => {
+ error = { response: { data: { message: ['error'] } } };
+ Api.fetchFeatureFlagUserList.mockRejectedValue(error);
+ });
+
+ it('should commit RECEIVE_USER_LIST_ERROR', () => {
+ return testAction(
+ actions.fetchUserList,
+ undefined,
+ state,
+ [
+ { type: types.REQUEST_USER_LIST },
+ { type: types.RECEIVE_USER_LIST_ERROR, payload: ['error'] },
+ ],
+ [],
+ () => expect(Api.fetchFeatureFlagUserList).toHaveBeenCalledWith('1', '2'),
+ );
+ });
+ });
+ });
+
+ describe('dismissErrorAlert', () => {
+ it('should commit DISMISS_ERROR_ALERT', () => {
+ return testAction(actions.dismissErrorAlert, undefined, state, [
+ { type: types.DISMISS_ERROR_ALERT },
+ ]);
+ });
+ });
+
+ describe('updateUserList', () => {
+ let updatedList;
+
+ beforeEach(() => {
+ updatedList = {
+ ...userList,
+ name: 'new',
+ };
+ });
+ describe('success', () => {
+ beforeEach(() => {
+ Api.updateFeatureFlagUserList.mockResolvedValue({ data: userList });
+ state.userList = userList;
+ });
+
+ it('should commit RECEIVE_USER_LIST_SUCCESS', () => {
+ return testAction(actions.updateUserList, updatedList, state, [], [], () => {
+ expect(Api.updateFeatureFlagUserList).toHaveBeenCalledWith('1', {
+ name: updatedList.name,
+ iid: updatedList.iid,
+ });
+ expect(redirectTo).toHaveBeenCalledWith(userList.path);
+ });
+ });
+ });
+
+ describe('error', () => {
+ let error;
+
+ beforeEach(() => {
+ error = { message: 'error' };
+ Api.updateFeatureFlagUserList.mockRejectedValue(error);
+ });
+
+ it('should commit RECEIVE_USER_LIST_ERROR', () => {
+ return testAction(
+ actions.updateUserList,
+ updatedList,
+ state,
+ [{ type: types.RECEIVE_USER_LIST_ERROR, payload: ['error'] }],
+ [],
+ () =>
+ expect(Api.updateFeatureFlagUserList).toHaveBeenCalledWith('1', {
+ name: updatedList.name,
+ iid: updatedList.iid,
+ }),
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/user_lists/store/edit/mutations_spec.js b/spec/frontend/user_lists/store/edit/mutations_spec.js
new file mode 100644
index 00000000000..3d4d2a59717
--- /dev/null
+++ b/spec/frontend/user_lists/store/edit/mutations_spec.js
@@ -0,0 +1,61 @@
+import statuses from '~/user_lists/constants/edit';
+import createState from '~/user_lists/store/edit/state';
+import * as types from '~/user_lists/store/edit/mutation_types';
+import mutations from '~/user_lists/store/edit/mutations';
+import { userList } from '../../../feature_flags/mock_data';
+
+describe('User List Edit Mutations', () => {
+ let state;
+
+ beforeEach(() => {
+ state = createState({ projectId: '1', userListIid: '2' });
+ });
+
+ describe(types.REQUEST_USER_LIST, () => {
+ beforeEach(() => {
+ mutations[types.REQUEST_USER_LIST](state);
+ });
+
+ it('sets the state to loading', () => {
+ expect(state.status).toBe(statuses.LOADING);
+ });
+ });
+
+ describe(types.RECEIVE_USER_LIST_SUCCESS, () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_USER_LIST_SUCCESS](state, userList);
+ });
+
+ it('sets the state to success', () => {
+ expect(state.status).toBe(statuses.SUCCESS);
+ });
+
+ it('sets the user list to the one received', () => {
+ expect(state.userList).toEqual(userList);
+ });
+ });
+
+ describe(types.RECIEVE_USER_LIST_ERROR, () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_USER_LIST_ERROR](state, ['network error']);
+ });
+
+ it('sets the state to error', () => {
+ expect(state.status).toBe(statuses.ERROR);
+ });
+
+ it('sets the error message to the recieved one', () => {
+ expect(state.errorMessage).toEqual(['network error']);
+ });
+ });
+
+ describe(types.DISMISS_ERROR_ALERT, () => {
+ beforeEach(() => {
+ mutations[types.DISMISS_ERROR_ALERT](state);
+ });
+
+ it('sets the state to error dismissed', () => {
+ expect(state.status).toBe(statuses.UNSYNCED);
+ });
+ });
+});
diff --git a/spec/frontend/user_lists/store/new/actions_spec.js b/spec/frontend/user_lists/store/new/actions_spec.js
new file mode 100644
index 00000000000..9cc6212a125
--- /dev/null
+++ b/spec/frontend/user_lists/store/new/actions_spec.js
@@ -0,0 +1,69 @@
+import testAction from 'helpers/vuex_action_helper';
+import Api from '~/api';
+import createState from '~/user_lists/store/new/state';
+import * as types from '~/user_lists/store/new/mutation_types';
+import * as actions from '~/user_lists/store/new/actions';
+import { redirectTo } from '~/lib/utils/url_utility';
+import { userList } from '../../../feature_flags/mock_data';
+
+jest.mock('~/api');
+jest.mock('~/lib/utils/url_utility');
+
+describe('User Lists Edit Actions', () => {
+ let state;
+
+ beforeEach(() => {
+ state = createState({ projectId: '1' });
+ });
+
+ describe('dismissErrorAlert', () => {
+ it('should commit DISMISS_ERROR_ALERT', () => {
+ return testAction(actions.dismissErrorAlert, undefined, state, [
+ { type: types.DISMISS_ERROR_ALERT },
+ ]);
+ });
+ });
+
+ describe('createUserList', () => {
+ let createdList;
+
+ beforeEach(() => {
+ createdList = {
+ ...userList,
+ name: 'new',
+ };
+ });
+ describe('success', () => {
+ beforeEach(() => {
+ Api.createFeatureFlagUserList.mockResolvedValue({ data: userList });
+ });
+
+ it('should redirect to the user list page', () => {
+ return testAction(actions.createUserList, createdList, state, [], [], () => {
+ expect(Api.createFeatureFlagUserList).toHaveBeenCalledWith('1', createdList);
+ expect(redirectTo).toHaveBeenCalledWith(userList.path);
+ });
+ });
+ });
+
+ describe('error', () => {
+ let error;
+
+ beforeEach(() => {
+ error = { message: 'error' };
+ Api.createFeatureFlagUserList.mockRejectedValue(error);
+ });
+
+ it('should commit RECEIVE_USER_LIST_ERROR', () => {
+ return testAction(
+ actions.createUserList,
+ createdList,
+ state,
+ [{ type: types.RECEIVE_CREATE_USER_LIST_ERROR, payload: ['error'] }],
+ [],
+ () => expect(Api.createFeatureFlagUserList).toHaveBeenCalledWith('1', createdList),
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/user_lists/store/new/mutations_spec.js b/spec/frontend/user_lists/store/new/mutations_spec.js
new file mode 100644
index 00000000000..89e8a83eb25
--- /dev/null
+++ b/spec/frontend/user_lists/store/new/mutations_spec.js
@@ -0,0 +1,38 @@
+import createState from '~/user_lists/store/new/state';
+import * as types from '~/user_lists/store/new/mutation_types';
+import mutations from '~/user_lists/store/new/mutations';
+
+describe('User List Edit Mutations', () => {
+ let state;
+
+ beforeEach(() => {
+ state = createState({ projectId: '1' });
+ });
+
+ describe(types.RECIEVE_USER_LIST_ERROR, () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_CREATE_USER_LIST_ERROR](state, ['network error']);
+ });
+
+ it('sets the error message to the recieved one', () => {
+ expect(state.errorMessage).toEqual(['network error']);
+ });
+
+ it('sets the error message to the recevied API message if present', () => {
+ const message = ['name is blank', 'name is too short'];
+
+ mutations[types.RECEIVE_CREATE_USER_LIST_ERROR](state, message);
+ expect(state.errorMessage).toEqual(message);
+ });
+ });
+
+ describe(types.DISMISS_ERROR_ALERT, () => {
+ beforeEach(() => {
+ mutations[types.DISMISS_ERROR_ALERT](state);
+ });
+
+ it('clears the error message', () => {
+ expect(state.errorMessage).toBe('');
+ });
+ });
+});
diff --git a/spec/frontend/user_lists/store/show/actions_spec.js b/spec/frontend/user_lists/store/show/actions_spec.js
new file mode 100644
index 00000000000..25a6b9ec0e4
--- /dev/null
+++ b/spec/frontend/user_lists/store/show/actions_spec.js
@@ -0,0 +1,117 @@
+import testAction from 'helpers/vuex_action_helper';
+import { userList } from 'jest/feature_flags/mock_data';
+import Api from '~/api';
+import { stringifyUserIds } from '~/user_lists/store/utils';
+import createState from '~/user_lists/store/show/state';
+import * as types from '~/user_lists/store/show/mutation_types';
+import * as actions from '~/user_lists/store/show/actions';
+
+jest.mock('~/api');
+
+describe('User Lists Show Actions', () => {
+ let mockState;
+
+ beforeEach(() => {
+ mockState = createState({ projectId: '1', userListIid: '2' });
+ });
+
+ describe('fetchUserList', () => {
+ it('commits REQUEST_USER_LIST and RECEIVE_USER_LIST_SUCCESS on success', () => {
+ Api.fetchFeatureFlagUserList.mockResolvedValue({ data: userList });
+ return testAction(
+ actions.fetchUserList,
+ undefined,
+ mockState,
+ [
+ { type: types.REQUEST_USER_LIST },
+ { type: types.RECEIVE_USER_LIST_SUCCESS, payload: userList },
+ ],
+ [],
+ () => expect(Api.fetchFeatureFlagUserList).toHaveBeenCalledWith('1', '2'),
+ );
+ });
+
+ it('commits REQUEST_USER_LIST and RECEIVE_USER_LIST_ERROR on error', () => {
+ Api.fetchFeatureFlagUserList.mockRejectedValue({ message: 'fail' });
+ return testAction(
+ actions.fetchUserList,
+ undefined,
+ mockState,
+ [{ type: types.REQUEST_USER_LIST }, { type: types.RECEIVE_USER_LIST_ERROR }],
+ [],
+ );
+ });
+ });
+
+ describe('dismissErrorAlert', () => {
+ it('commits DISMISS_ERROR_ALERT', () => {
+ return testAction(
+ actions.dismissErrorAlert,
+ undefined,
+ mockState,
+ [{ type: types.DISMISS_ERROR_ALERT }],
+ [],
+ );
+ });
+ });
+
+ describe('addUserIds', () => {
+ it('adds the given IDs and tries to update the user list', () => {
+ return testAction(
+ actions.addUserIds,
+ '1,2,3',
+ mockState,
+ [{ type: types.ADD_USER_IDS, payload: '1,2,3' }],
+ [{ type: 'updateUserList' }],
+ );
+ });
+ });
+
+ describe('removeUserId', () => {
+ it('removes the given ID and tries to update the user list', () => {
+ return testAction(
+ actions.removeUserId,
+ 'user3',
+ mockState,
+ [{ type: types.REMOVE_USER_ID, payload: 'user3' }],
+ [{ type: 'updateUserList' }],
+ );
+ });
+ });
+
+ describe('updateUserList', () => {
+ beforeEach(() => {
+ mockState.userList = userList;
+ mockState.userIds = ['user1', 'user2', 'user3'];
+ });
+
+ it('commits REQUEST_USER_LIST and RECEIVE_USER_LIST_SUCCESS on success', () => {
+ Api.updateFeatureFlagUserList.mockResolvedValue({ data: userList });
+ return testAction(
+ actions.updateUserList,
+ undefined,
+ mockState,
+ [
+ { type: types.REQUEST_USER_LIST },
+ { type: types.RECEIVE_USER_LIST_SUCCESS, payload: userList },
+ ],
+ [],
+ () =>
+ expect(Api.updateFeatureFlagUserList).toHaveBeenCalledWith('1', {
+ ...userList,
+ user_xids: stringifyUserIds(mockState.userIds),
+ }),
+ );
+ });
+ it('commits REQUEST_USER_LIST and RECEIVE_USER_LIST_ERROR on error', () => {
+ Api.updateFeatureFlagUserList.mockRejectedValue({ message: 'fail' });
+ return testAction(
+ actions.updateUserList,
+ undefined,
+ mockState,
+ [{ type: types.REQUEST_USER_LIST }, { type: types.RECEIVE_USER_LIST_ERROR }],
+ [],
+ );
+ });
+ });
+});
diff --git a/spec/frontend/user_lists/store/show/mutations_spec.js b/spec/frontend/user_lists/store/show/mutations_spec.js
new file mode 100644
index 00000000000..364cc6a0225
--- /dev/null
+++ b/spec/frontend/user_lists/store/show/mutations_spec.js
@@ -0,0 +1,86 @@
+import { uniq } from 'lodash';
+import { userList } from 'jest/feature_flags/mock_data';
+import createState from '~/user_lists/store/show/state';
+import mutations from '~/user_lists/store/show/mutations';
+import { states } from '~/user_lists/constants/show';
+import * as types from '~/user_lists/store/show/mutation_types';
+
+describe('User Lists Show Mutations', () => {
+ let mockState;
+
+ beforeEach(() => {
+ mockState = createState({ projectId: '1', userListIid: '2' });
+ });
+
+ describe(types.REQUEST_USER_LIST, () => {
+ it('puts us in the loading state', () => {
+ mutations[types.REQUEST_USER_LIST](mockState);
+
+ expect(mockState.state).toBe(states.LOADING);
+ });
+ });
+
+ describe(types.RECEIVE_USER_LIST_SUCCESS, () => {
+ beforeEach(() => {
+ mutations[types.RECEIVE_USER_LIST_SUCCESS](mockState, userList);
+ });
+
+ it('sets the state to LOADED', () => {
+ expect(mockState.state).toBe(states.SUCCESS);
+ });
+
+ it('sets the active user list', () => {
+ expect(mockState.userList).toEqual(userList);
+ });
+
+ it('splits the user IDs into an Array', () => {
+ expect(mockState.userIds).toEqual(userList.user_xids.split(','));
+ });
+
+ it('sets user IDs to an empty Array if an empty string is received', () => {
+ mutations[types.RECEIVE_USER_LIST_SUCCESS](mockState, { ...userList, user_xids: '' });
+ expect(mockState.userIds).toEqual([]);
+ });
+ });
+ describe(types.RECEIVE_USER_LIST_ERROR, () => {
+ it('sets the state to error', () => {
+ mutations[types.RECEIVE_USER_LIST_ERROR](mockState);
+ expect(mockState.state).toBe(states.ERROR);
+ });
+ });
+ describe(types.ADD_USER_IDS, () => {
+ const newIds = ['user3', 'test1', '1', '3', ''];
+
+ beforeEach(() => {
+ mutations[types.RECEIVE_USER_LIST_SUCCESS](mockState, userList);
+ mutations[types.ADD_USER_IDS](mockState, newIds.join(', '));
+ });
+
+ it('adds the new IDs to the state unless empty', () => {
+ newIds.filter(id => id).forEach(id => expect(mockState.userIds).toContain(id));
+ });
+
+ it('does not add duplicate IDs to the state', () => {
+ expect(mockState.userIds).toEqual(uniq(mockState.userIds));
+ });
+ });
+ describe(types.REMOVE_USER_ID, () => {
+ let userIds;
+ let removedId;
+
+ beforeEach(() => {
+ mutations[types.RECEIVE_USER_LIST_SUCCESS](mockState, userList);
+ userIds = mockState.userIds;
+ removedId = 'user3';
+ mutations[types.REMOVE_USER_ID](mockState, removedId);
+ });
+
+ it('should remove the given id', () => {
+ expect(mockState).not.toContain(removedId);
+ });
+
+ it('should leave the rest of the IDs alone', () => {
+ userIds.filter(id => id !== removedId).forEach(id => expect(mockState.userIds).toContain(id));
+ });
+ });
+});
diff --git a/spec/frontend/user_lists/store/utils_spec.js b/spec/frontend/user_lists/store/utils_spec.js
new file mode 100644
index 00000000000..9547b463eec
--- /dev/null
+++ b/spec/frontend/user_lists/store/utils_spec.js
@@ -0,0 +1,23 @@
+import { parseUserIds, stringifyUserIds } from '~/user_lists/store/utils';
+
+describe('User List Store Utils', () => {
+ describe('parseUserIds', () => {
+ it('should split comma-seperated user IDs into an array', () => {
+ expect(parseUserIds('1,2,3')).toEqual(['1', '2', '3']);
+ });
+
+ it('should filter whitespace before the comma', () => {
+ expect(parseUserIds('1\t,2 ,3')).toEqual(['1', '2', '3']);
+ });
+
+ it('should filter whitespace after the comma', () => {
+ expect(parseUserIds('1,\t2, 3')).toEqual(['1', '2', '3']);
+ });
+ });
+
+ describe('stringifyUserIds', () => {
+ it('should convert a list of user IDs into a comma-separated string', () => {
+ expect(stringifyUserIds(['1', '2', '3'])).toBe('1,2,3');
+ });
+ });
+});
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
index caea9a757ae..015f8bbac51 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
@@ -130,7 +130,7 @@ describe('MRWidgetHeader', () => {
});
it('renders clipboard button', () => {
- expect(vm.$el.querySelector('.btn-clipboard')).not.toEqual(null);
+ expect(vm.$el.querySelector('[data-testid="mr-widget-copy-clipboard"]')).not.toEqual(null);
});
it('renders target branch', () => {
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js
index 6ec30493f8b..9923434a7dd 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js
@@ -6,6 +6,10 @@ import component from '~/vue_merge_request_widget/components/states/mr_widget_re
describe('Merge request widget rebase component', () => {
let Component;
let vm;
+
+ const findRebaseMessageEl = () => vm.$el.querySelector('[data-testid="rebase-message"]');
+ const findRebaseMessageElText = () => findRebaseMessageEl().textContent.trim();
+
beforeEach(() => {
Component = Vue.extend(component);
});
@@ -21,9 +25,7 @@ describe('Merge request widget rebase component', () => {
service: {},
});
- expect(
- vm.$el.querySelector('.rebase-state-find-class-convention span').textContent.trim(),
- ).toContain('Rebase in progress');
+ expect(findRebaseMessageElText()).toContain('Rebase in progress');
});
});
@@ -39,9 +41,7 @@ describe('Merge request widget rebase component', () => {
});
it('it should render rebase button and warning message', () => {
- const text = vm.$el
- .querySelector('.rebase-state-find-class-convention span')
- .textContent.trim();
+ const text = findRebaseMessageElText();
expect(text).toContain('Fast-forward merge is not possible.');
expect(text.replace(/\s\s+/g, ' ')).toContain(
@@ -53,9 +53,7 @@ describe('Merge request widget rebase component', () => {
vm.rebasingError = 'Something went wrong!';
Vue.nextTick(() => {
- expect(
- vm.$el.querySelector('.rebase-state-find-class-convention span').textContent.trim(),
- ).toContain('Something went wrong!');
+ expect(findRebaseMessageElText()).toContain('Something went wrong!');
done();
});
});
@@ -72,9 +70,7 @@ describe('Merge request widget rebase component', () => {
service: {},
});
- const text = vm.$el
- .querySelector('.rebase-state-find-class-convention span')
- .textContent.trim();
+ const text = findRebaseMessageElText();
expect(text).toContain('Fast-forward merge is not possible.');
expect(text).toContain('Rebase the source branch onto');
@@ -93,7 +89,7 @@ describe('Merge request widget rebase component', () => {
service: {},
});
- const elem = vm.$el.querySelector('.rebase-state-find-class-convention span');
+ const elem = findRebaseMessageEl();
expect(elem.innerHTML).toContain(
`Fast-forward merge is not possible. Rebase the source branch onto <span class="label-branch">${targetBranch}</span> to allow this merge request to be merged.`,
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js
index 98af44b0975..aae9b8660e2 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js
@@ -1,12 +1,12 @@
import { shallowMount } from '@vue/test-utils';
-import { GlLoadingIcon } from '@gitlab/ui';
+import { GlLoadingIcon, GlButton } from '@gitlab/ui';
import AutoMergeFailedComponent from '~/vue_merge_request_widget/components/states/mr_widget_auto_merge_failed.vue';
import eventHub from '~/vue_merge_request_widget/event_hub';
describe('MRWidgetAutoMergeFailed', () => {
let wrapper;
const mergeError = 'This is the merge error';
- const findButton = () => wrapper.find('button');
+ const findButton = () => wrapper.find(GlButton);
const createComponent = (props = {}) => {
wrapper = shallowMount(AutoMergeFailedComponent, {
@@ -38,17 +38,13 @@ describe('MRWidgetAutoMergeFailed', () => {
it('emits event and shows loading icon when button is clicked', () => {
jest.spyOn(eventHub, '$emit');
- findButton().trigger('click');
+ findButton().vm.$emit('click');
expect(eventHub.$emit.mock.calls[0][0]).toBe('MRWidgetUpdateRequested');
return wrapper.vm.$nextTick(() => {
- expect(findButton().attributes('disabled')).toEqual('disabled');
- expect(
- findButton()
- .find(GlLoadingIcon)
- .exists(),
- ).toBe(true);
+ expect(findButton().attributes('disabled')).toBe('true');
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
index 5eb24315ca6..9057ffaea45 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -101,8 +101,6 @@ describe('ReadyToMerge', () => {
expect(vm.isMakingRequest).toBeFalsy();
expect(vm.isMergingImmediately).toBeFalsy();
expect(vm.commitMessage).toBe(vm.mr.commitMessage);
- expect(vm.successSvg).toBeDefined();
- expect(vm.warningSvg).toBeDefined();
});
});
@@ -494,19 +492,6 @@ describe('ReadyToMerge', () => {
});
});
- it('hides close button', done => {
- jest.spyOn(vm.service, 'poll').mockReturnValue(returnPromise('merged'));
- jest.spyOn(vm, 'initiateRemoveSourceBranchPolling').mockImplementation(() => {});
-
- vm.handleMergePolling(() => {}, () => {});
-
- setImmediate(() => {
- expect(document.querySelector('.btn-close').classList.contains('hidden')).toBeTruthy();
-
- done();
- });
- });
-
it('updates merge request count badge', done => {
jest.spyOn(vm.service, 'poll').mockReturnValue(returnPromise('merged'));
jest.spyOn(vm, 'initiateRemoveSourceBranchPolling').mockImplementation(() => {});
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js b/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js
index 1711efb5512..13c0665f929 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js
+++ b/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js
@@ -31,10 +31,7 @@ describe('DeploymentAction component', () => {
wrapper.destroy();
}
- wrapper = mount(DeploymentActions, {
- ...options,
- provide: { glFeatures: { deployFromFooter: true } },
- });
+ wrapper = mount(DeploymentActions, options);
};
const findStopButton = () => wrapper.find('.js-stop-env');
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_spec.js b/spec/frontend/vue_mr_widget/deployment/deployment_spec.js
index ce395de3b5d..17d7fcc4bff 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_spec.js
+++ b/spec/frontend/vue_mr_widget/deployment/deployment_spec.js
@@ -19,10 +19,7 @@ describe('Deployment component', () => {
if (wrapper && wrapper.destroy) {
wrapper.destroy();
}
- wrapper = mount(DeploymentComponent, {
- ...options,
- provide: { glFeatures: { deployFromFooter: true } },
- });
+ wrapper = mount(DeploymentComponent, options);
};
beforeEach(() => {
diff --git a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
index a2ade44b7c4..5fe8ff58d31 100644
--- a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
+++ b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
@@ -1,6 +1,7 @@
import Vue from 'vue';
import MockAdapter from 'axios-mock-adapter';
import mountComponent from 'helpers/vue_mount_component_helper';
+import { withGonExperiment } from 'helpers/experimentation_helper';
import axios from '~/lib/utils/axios_utils';
import mrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options.vue';
import eventHub from '~/vue_merge_request_widget/event_hub';
@@ -812,43 +813,61 @@ describe('mrWidgetOptions', () => {
});
});
- describe('given suggestPipeline feature flag is enabled', () => {
+ describe('suggestPipeline Experiment', () => {
beforeEach(() => {
mock.onAny().reply(200);
// This is needed because some grandchildren Bootstrap components throw warnings
// https://gitlab.com/gitlab-org/gitlab/issues/208458
jest.spyOn(console, 'warn').mockImplementation();
+ });
- gon.features = { suggestPipeline: true };
+ describe('given experiment is enabled', () => {
+ withGonExperiment('suggestPipeline');
- createComponent();
+ beforeEach(() => {
+ createComponent();
- vm.mr.hasCI = false;
- });
+ vm.mr.hasCI = false;
+ });
- it('should suggest pipelines when none exist', () => {
- expect(findSuggestPipeline()).toEqual(expect.any(Element));
- });
+ it('should suggest pipelines when none exist', () => {
+ expect(findSuggestPipeline()).toEqual(expect.any(Element));
+ });
- it.each([
- { isDismissedSuggestPipeline: true },
- { mergeRequestAddCiConfigPath: null },
- { hasCI: true },
- ])('with %s, should not suggest pipeline', async obj => {
- Object.assign(vm.mr, obj);
+ it.each([
+ { isDismissedSuggestPipeline: true },
+ { mergeRequestAddCiConfigPath: null },
+ { hasCI: true },
+ ])('with %s, should not suggest pipeline', async obj => {
+ Object.assign(vm.mr, obj);
- await vm.$nextTick();
+ await vm.$nextTick();
- expect(findSuggestPipeline()).toBeNull();
+ expect(findSuggestPipeline()).toBeNull();
+ });
+
+ it('should allow dismiss of the suggest pipeline message', async () => {
+ findSuggestPipelineButton().click();
+
+ await vm.$nextTick();
+
+ expect(findSuggestPipeline()).toBeNull();
+ });
});
- it('should allow dismiss of the suggest pipeline message', async () => {
- findSuggestPipelineButton().click();
+ describe('given suggestPipeline experiment is not enabled', () => {
+ withGonExperiment('suggestPipeline', false);
- await vm.$nextTick();
+ beforeEach(() => {
+ createComponent();
- expect(findSuggestPipeline()).toBeNull();
+ vm.mr.hasCI = false;
+ });
+
+ it('should not suggest pipelines when none exist', () => {
+ expect(findSuggestPipeline()).toBeNull();
+ });
});
});
});
diff --git a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
index dfd114a2d1c..ec4a81054db 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
@@ -39,6 +39,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
tag="div"
>
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="d-inline-flex"
data-clipboard-text="ssh://foo.bar"
@@ -80,6 +81,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
tag="div"
>
<gl-button-stub
+ buttontextclasses=""
category="primary"
class="d-inline-flex"
data-clipboard-text="http://foo.bar"
diff --git a/spec/frontend/vue_shared/components/alert_detail_table_spec.js b/spec/frontend/vue_shared/components/alert_details_table_spec.js
index 9c38ccad8a7..dbdb7705d3c 100644
--- a/spec/frontend/vue_shared/components/alert_detail_table_spec.js
+++ b/spec/frontend/vue_shared/components/alert_details_table_spec.js
@@ -1,5 +1,5 @@
+import { GlLoadingIcon, GlTable } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
-import { GlTable, GlLoadingIcon } from '@gitlab/ui';
import AlertDetailsTable from '~/vue_shared/components/alert_details_table.vue';
const mockAlert = {
@@ -14,6 +14,7 @@ const mockAlert = {
assignees: { nodes: [] },
notes: { nodes: [] },
todos: { nodes: [] },
+ __typename: 'AlertManagementAlert',
};
describe('AlertDetails', () => {
@@ -35,6 +36,8 @@ describe('AlertDetails', () => {
});
const findTableComponent = () => wrapper.find(GlTable);
+ const findTableKeys = () => findTableComponent().findAll('tbody td:first-child');
+ const findTableField = (fields, fieldName) => fields.filter(row => row.text() === fieldName);
describe('Alert details', () => {
describe('empty state', () => {
@@ -58,8 +61,10 @@ describe('AlertDetails', () => {
});
describe('with table data', () => {
+ const environment = 'myEnvironment';
+ const environmentUrl = 'fake/url';
beforeEach(() => {
- mountComponent();
+ mountComponent({ alert: { ...mockAlert, environment, environmentUrl } });
});
it('renders a table', () => {
@@ -69,6 +74,26 @@ describe('AlertDetails', () => {
it('renders a cell based on alert data', () => {
expect(findTableComponent().text()).toContain('SyntaxError: Invalid or unexpected token');
});
+
+ it('should show allowed alert fields', () => {
+ const fields = findTableKeys();
+
+ expect(findTableField(fields, 'Iid').exists()).toBe(true);
+ expect(findTableField(fields, 'Title').exists()).toBe(true);
+ expect(findTableField(fields, 'Severity').exists()).toBe(true);
+ expect(findTableField(fields, 'Status').exists()).toBe(true);
+ expect(findTableField(fields, 'Environment').exists()).toBe(true);
+ });
+
+ it('should not show disallowed alert fields', () => {
+ const fields = findTableKeys();
+
+ expect(findTableField(fields, 'Typename').exists()).toBe(false);
+ expect(findTableField(fields, 'Todos').exists()).toBe(false);
+ expect(findTableField(fields, 'Notes').exists()).toBe(false);
+ expect(findTableField(fields, 'Assignees').exists()).toBe(false);
+ expect(findTableField(fields, 'EnvironmentUrl').exists()).toBe(false);
+ });
});
});
});
diff --git a/spec/frontend/vue_shared/components/clipboard_button_spec.js b/spec/frontend/vue_shared/components/clipboard_button_spec.js
index 7f0b7ba8cf8..51a2653befc 100644
--- a/spec/frontend/vue_shared/components/clipboard_button_spec.js
+++ b/spec/frontend/vue_shared/components/clipboard_button_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDeprecatedButton, GlIcon } from '@gitlab/ui';
+import { GlButton } from '@gitlab/ui';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
describe('clipboard button', () => {
@@ -26,9 +26,8 @@ describe('clipboard button', () => {
});
it('renders a button for clipboard', () => {
- expect(wrapper.find(GlDeprecatedButton).exists()).toBe(true);
+ expect(wrapper.find(GlButton).exists()).toBe(true);
expect(wrapper.attributes('data-clipboard-text')).toBe('copy me');
- expect(wrapper.find(GlIcon).props('name')).toBe('copy-to-clipboard');
});
it('should have a tooltip with default values', () => {
diff --git a/spec/frontend/vue_shared/components/confirm_modal_spec.js b/spec/frontend/vue_shared/components/confirm_modal_spec.js
index 5d92af64de0..8456ca9d125 100644
--- a/spec/frontend/vue_shared/components/confirm_modal_spec.js
+++ b/spec/frontend/vue_shared/components/confirm_modal_spec.js
@@ -86,6 +86,22 @@ describe('vue_shared/components/confirm_modal', () => {
expect(findForm().element.submit).not.toHaveBeenCalled();
});
+ describe('with handleSubmit prop', () => {
+ const handleSubmit = jest.fn();
+ beforeEach(() => {
+ createComponent({ handleSubmit });
+ findModal().vm.$emit('primary');
+ });
+
+ it('will call handleSubmit', () => {
+ expect(handleSubmit).toHaveBeenCalled();
+ });
+
+ it('does not submit the form', () => {
+ expect(findForm().element.submit).not.toHaveBeenCalled();
+ });
+ });
+
describe('when modal submitted', () => {
beforeEach(() => {
findModal().vm.$emit('primary');
diff --git a/spec/frontend/vue_shared/components/local_storage_sync_spec.js b/spec/frontend/vue_shared/components/local_storage_sync_spec.js
index 5470171a21e..3ff4c0917f2 100644
--- a/spec/frontend/vue_shared/components/local_storage_sync_spec.js
+++ b/spec/frontend/vue_shared/components/local_storage_sync_spec.js
@@ -12,7 +12,9 @@ describe('Local Storage Sync', () => {
};
afterEach(() => {
- wrapper.destroy();
+ if (wrapper) {
+ wrapper.destroy();
+ }
wrapper = null;
localStorage.clear();
});
@@ -45,23 +47,23 @@ describe('Local Storage Sync', () => {
expect(wrapper.emitted('input')).toBeFalsy();
});
- it('saves updated value to localStorage', () => {
- createComponent({
- props: {
- storageKey,
- value: 'ascending',
- },
- });
-
- const newValue = 'descending';
- wrapper.setProps({
- value: newValue,
- });
-
- return wrapper.vm.$nextTick().then(() => {
- expect(localStorage.getItem(storageKey)).toBe(newValue);
- });
- });
+ it.each('foo', 3, true, ['foo', 'bar'], { foo: 'bar' })(
+ 'saves updated value to localStorage',
+ newValue => {
+ createComponent({
+ props: {
+ storageKey,
+ value: 'initial',
+ },
+ });
+
+ wrapper.setProps({ value: newValue });
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(localStorage.getItem(storageKey)).toBe(String(newValue));
+ });
+ },
+ );
it('does not save default value', () => {
const value = 'ascending';
@@ -125,4 +127,88 @@ describe('Local Storage Sync', () => {
});
});
});
+
+ describe('with "asJson" prop set to "true"', () => {
+ const storageKey = 'testStorageKey';
+
+ describe.each`
+ value | serializedValue
+ ${null} | ${'null'}
+ ${''} | ${'""'}
+ ${true} | ${'true'}
+ ${false} | ${'false'}
+ ${42} | ${'42'}
+ ${'42'} | ${'"42"'}
+ ${'{ foo: '} | ${'"{ foo: "'}
+ ${['test']} | ${'["test"]'}
+ ${{ foo: 'bar' }} | ${'{"foo":"bar"}'}
+ `('given $value', ({ value, serializedValue }) => {
+ describe('is a new value', () => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ storageKey,
+ value: 'initial',
+ asJson: true,
+ },
+ });
+
+ wrapper.setProps({ value });
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('serializes the value correctly to localStorage', () => {
+ expect(localStorage.getItem(storageKey)).toBe(serializedValue);
+ });
+ });
+
+ describe('is already stored', () => {
+ beforeEach(() => {
+ localStorage.setItem(storageKey, serializedValue);
+
+ createComponent({
+ props: {
+ storageKey,
+ value: 'initial',
+ asJson: true,
+ },
+ });
+ });
+
+ it('emits an input event with the deserialized value', () => {
+ expect(wrapper.emitted('input')).toEqual([[value]]);
+ });
+ });
+ });
+
+ describe('with bad JSON in storage', () => {
+ const badJSON = '{ badJSON';
+
+ beforeEach(() => {
+ jest.spyOn(console, 'warn').mockImplementation();
+ localStorage.setItem(storageKey, badJSON);
+
+ createComponent({
+ props: {
+ storageKey,
+ value: 'initial',
+ asJson: true,
+ },
+ });
+ });
+
+ it('should console warn', () => {
+ // eslint-disable-next-line no-console
+ expect(console.warn).toHaveBeenCalledWith(
+ `[gitlab] Failed to deserialize value from localStorage (key=${storageKey})`,
+ badJSON,
+ );
+ });
+
+ it('should not emit an input event', () => {
+ expect(wrapper.emitted('input')).toBeUndefined();
+ });
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap b/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap
index cdd7a3ccaf0..b8a9143bc79 100644
--- a/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap
+++ b/spec/frontend/vue_shared/components/markdown/__snapshots__/suggestion_diff_spec.js.snap
@@ -10,6 +10,7 @@ exports[`Suggestion Diff component matches snapshot 1`] = `
helppagepath="path_to_docs"
isapplyingbatch="true"
isbatched="true"
+ suggestionscount="0"
/>
<table
diff --git a/spec/frontend/vue_shared/components/markdown/field_spec.js b/spec/frontend/vue_shared/components/markdown/field_spec.js
index 3da0a35f05a..f1ead33ec68 100644
--- a/spec/frontend/vue_shared/components/markdown/field_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/field_spec.js
@@ -7,6 +7,7 @@ import axios from '~/lib/utils/axios_utils';
const markdownPreviewPath = `${TEST_HOST}/preview`;
const markdownDocsPath = `${TEST_HOST}/docs`;
+const textareaValue = 'testing\n123';
function assertMarkdownTabs(isWrite, writeLink, previewLink, wrapper) {
expect(writeLink.element.parentNode.classList.contains('active')).toBe(isWrite);
@@ -20,23 +21,11 @@ function createComponent() {
markdownDocsPath,
markdownPreviewPath,
isSubmitting: false,
+ textareaValue,
},
slots: {
- textarea: '<textarea>testing\n123</textarea>',
+ textarea: `<textarea>${textareaValue}</textarea>`,
},
- template: `
- <field-component
- markdown-preview-path="${markdownPreviewPath}"
- markdown-docs-path="${markdownDocsPath}"
- :isSubmitting="false"
- >
- <textarea
- slot="textarea"
- v-model="text">
- <slot>this is a test</slot>
- </textarea>
- </field-component>
- `,
});
return wrapper;
}
diff --git a/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js b/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
index a521668b15c..b19e74b5b11 100644
--- a/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
@@ -57,7 +57,9 @@ describe('Suggestion Diff component', () => {
});
it('renders apply suggestion and add to batch buttons', () => {
- createComponent();
+ createComponent({
+ suggestionsCount: 2,
+ });
const applyBtn = findApplyButton();
const addToBatchBtn = findAddToBatchButton();
@@ -104,7 +106,9 @@ describe('Suggestion Diff component', () => {
describe('when add to batch is clicked', () => {
it('emits addToBatch', () => {
- createComponent();
+ createComponent({
+ suggestionsCount: 2,
+ });
findAddToBatchButton().vm.$emit('click');
diff --git a/spec/frontend/vue_shared/components/members/avatars/group_avatar_spec.js b/spec/frontend/vue_shared/components/members/avatars/group_avatar_spec.js
new file mode 100644
index 00000000000..d6f5773295c
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/avatars/group_avatar_spec.js
@@ -0,0 +1,46 @@
+import { mount, createWrapper } from '@vue/test-utils';
+import { getByText as getByTextHelper } from '@testing-library/dom';
+import { GlAvatarLink } from '@gitlab/ui';
+import { group as member } from '../mock_data';
+import GroupAvatar from '~/vue_shared/components/members/avatars/group_avatar.vue';
+
+describe('MemberList', () => {
+ let wrapper;
+
+ const group = member.sharedWithGroup;
+
+ const createComponent = (propsData = {}) => {
+ wrapper = mount(GroupAvatar, {
+ propsData: {
+ member,
+ ...propsData,
+ },
+ });
+ };
+
+ const getByText = (text, options) =>
+ createWrapper(getByTextHelper(wrapper.element, text, options));
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders link to group', () => {
+ const link = wrapper.find(GlAvatarLink);
+
+ expect(link.exists()).toBe(true);
+ expect(link.attributes('href')).toBe(group.webUrl);
+ });
+
+ it("renders group's full name", () => {
+ expect(getByText(group.fullName).exists()).toBe(true);
+ });
+
+ it("renders group's avatar", () => {
+ expect(wrapper.find('img').attributes('src')).toBe(group.avatarUrl);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/avatars/invite_avatar_spec.js b/spec/frontend/vue_shared/components/members/avatars/invite_avatar_spec.js
new file mode 100644
index 00000000000..7948da7eb40
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/avatars/invite_avatar_spec.js
@@ -0,0 +1,38 @@
+import { mount, createWrapper } from '@vue/test-utils';
+import { getByText as getByTextHelper } from '@testing-library/dom';
+import { invite as member } from '../mock_data';
+import InviteAvatar from '~/vue_shared/components/members/avatars/invite_avatar.vue';
+
+describe('MemberList', () => {
+ let wrapper;
+
+ const { invite } = member;
+
+ const createComponent = (propsData = {}) => {
+ wrapper = mount(InviteAvatar, {
+ propsData: {
+ member,
+ ...propsData,
+ },
+ });
+ };
+
+ const getByText = (text, options) =>
+ createWrapper(getByTextHelper(wrapper.element, text, options));
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders email as name', () => {
+ expect(getByText(invite.email).exists()).toBe(true);
+ });
+
+ it('renders avatar', () => {
+ expect(wrapper.find('img').attributes('src')).toBe(invite.avatarUrl);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/avatars/user_avatar_spec.js b/spec/frontend/vue_shared/components/members/avatars/user_avatar_spec.js
new file mode 100644
index 00000000000..6c0ba8afede
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/avatars/user_avatar_spec.js
@@ -0,0 +1,85 @@
+import { mount, createWrapper } from '@vue/test-utils';
+import { within } from '@testing-library/dom';
+import { GlAvatarLink, GlBadge } from '@gitlab/ui';
+import { member as memberMock, orphanedMember } from '../mock_data';
+import UserAvatar from '~/vue_shared/components/members/avatars/user_avatar.vue';
+
+describe('UserAvatar', () => {
+ let wrapper;
+
+ const { user } = memberMock;
+
+ const createComponent = (propsData = {}) => {
+ wrapper = mount(UserAvatar, {
+ propsData: {
+ member: memberMock,
+ isCurrentUser: false,
+ ...propsData,
+ },
+ });
+ };
+
+ const getByText = (text, options) =>
+ createWrapper(within(wrapper.element).findByText(text, options));
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it("renders link to user's profile", () => {
+ createComponent();
+
+ const link = wrapper.find(GlAvatarLink);
+
+ expect(link.exists()).toBe(true);
+ expect(link.attributes()).toMatchObject({
+ href: user.webUrl,
+ 'data-user-id': `${user.id}`,
+ 'data-username': user.username,
+ });
+ });
+
+ it("renders user's name", () => {
+ createComponent();
+
+ expect(getByText(user.name).exists()).toBe(true);
+ });
+
+ it("renders user's username", () => {
+ createComponent();
+
+ expect(getByText(`@${user.username}`).exists()).toBe(true);
+ });
+
+ it("renders user's avatar", () => {
+ createComponent();
+
+ expect(wrapper.find('img').attributes('src')).toBe(user.avatarUrl);
+ });
+
+ describe('when user property does not exist', () => {
+ it('displays an orphaned user', () => {
+ createComponent({ member: orphanedMember });
+
+ expect(getByText('Orphaned member').exists()).toBe(true);
+ });
+ });
+
+ describe('badges', () => {
+ it.each`
+ member | badgeText
+ ${{ ...memberMock, user: { ...memberMock.user, blocked: true } }} | ${'Blocked'}
+ ${{ ...memberMock, user: { ...memberMock.user, twoFactorEnabled: true } }} | ${'2FA'}
+ `('renders the "$badgeText" badge', ({ member, badgeText }) => {
+ createComponent({ member });
+
+ expect(wrapper.find(GlBadge).text()).toBe(badgeText);
+ });
+
+ it('renders the "It\'s you" badge when member is current user', () => {
+ createComponent({ isCurrentUser: true });
+
+ expect(getByText("It's you").exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/mock_data.js b/spec/frontend/vue_shared/components/members/mock_data.js
new file mode 100644
index 00000000000..3195f04f202
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/mock_data.js
@@ -0,0 +1,61 @@
+export const member = {
+ requestedAt: null,
+ canUpdate: false,
+ canRemove: false,
+ canOverride: false,
+ accessLevel: { integerValue: 50, stringValue: 'Owner' },
+ source: {
+ id: 178,
+ name: 'Foo Bar',
+ webUrl: 'https://gitlab.com/groups/foo-bar',
+ },
+ user: {
+ id: 123,
+ name: 'Administrator',
+ username: 'root',
+ webUrl: 'https://gitlab.com/root',
+ avatarUrl: 'https://www.gravatar.com/avatar/4816142ef496f956a277bedf1a40607b?s=80&d=identicon',
+ blocked: false,
+ twoFactorEnabled: false,
+ },
+ id: 238,
+ createdAt: '2020-07-17T16:22:46.923Z',
+ expiresAt: null,
+ usingLicense: false,
+ groupSso: false,
+ groupManagedAccount: false,
+};
+
+export const group = {
+ accessLevel: { integerValue: 10, stringValue: 'Guest' },
+ sharedWithGroup: {
+ id: 24,
+ name: 'Commit451',
+ avatarUrl: '/uploads/-/system/user/avatar/1/avatar.png?width=40',
+ fullPath: 'parent-group/commit451',
+ fullName: 'Parent group / Commit451',
+ webUrl: 'https://gitlab.com/groups/parent-group/commit451',
+ },
+ id: 3,
+ createdAt: '2020-08-06T15:31:07.662Z',
+ expiresAt: null,
+};
+
+const { user, ...memberNoUser } = member;
+export const invite = {
+ ...memberNoUser,
+ invite: {
+ email: 'jewel@hudsonwalter.biz',
+ avatarUrl: 'https://www.gravatar.com/avatar/cbab7510da7eec2f60f638261b05436d?s=80&d=identicon',
+ canResend: true,
+ },
+};
+
+export const orphanedMember = memberNoUser;
+
+export const accessRequest = {
+ ...member,
+ requestedAt: '2020-07-17T16:22:46.923Z',
+};
+
+export const members = [member];
diff --git a/spec/frontend/vue_shared/components/members/table/created_at_spec.js b/spec/frontend/vue_shared/components/members/table/created_at_spec.js
new file mode 100644
index 00000000000..cf3821baf44
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/table/created_at_spec.js
@@ -0,0 +1,61 @@
+import { mount, createWrapper } from '@vue/test-utils';
+import { within } from '@testing-library/dom';
+import { useFakeDate } from 'helpers/fake_date';
+import CreatedAt from '~/vue_shared/components/members/table/created_at.vue';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+
+describe('CreatedAt', () => {
+ // March 15th, 2020
+ useFakeDate(2020, 2, 15);
+
+ const date = '2020-03-01T00:00:00.000';
+ const dateTimeAgo = '2 weeks ago';
+
+ let wrapper;
+
+ const createComponent = propsData => {
+ wrapper = mount(CreatedAt, {
+ propsData: {
+ date,
+ ...propsData,
+ },
+ });
+ };
+
+ const getByText = (text, options) =>
+ createWrapper(within(wrapper.element).getByText(text, options));
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('created at text', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('displays created at text', () => {
+ expect(getByText(dateTimeAgo).exists()).toBe(true);
+ });
+
+ it('uses `TimeAgoTooltip` component to display tooltip', () => {
+ expect(wrapper.find(TimeAgoTooltip).exists()).toBe(true);
+ });
+ });
+
+ describe('when `createdBy` prop is provided', () => {
+ it('displays a link to the user that created the member', () => {
+ createComponent({
+ createdBy: {
+ name: 'Administrator',
+ webUrl: 'https://gitlab.com/root',
+ },
+ });
+
+ const link = getByText('Administrator');
+
+ expect(link.exists()).toBe(true);
+ expect(link.attributes('href')).toBe('https://gitlab.com/root');
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/table/expires_at_spec.js b/spec/frontend/vue_shared/components/members/table/expires_at_spec.js
new file mode 100644
index 00000000000..95ae251b0fd
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/table/expires_at_spec.js
@@ -0,0 +1,86 @@
+import { mount, createWrapper } from '@vue/test-utils';
+import { within } from '@testing-library/dom';
+import { useFakeDate } from 'helpers/fake_date';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import ExpiresAt from '~/vue_shared/components/members/table/expires_at.vue';
+
+describe('ExpiresAt', () => {
+ // March 15th, 2020
+ useFakeDate(2020, 2, 15);
+
+ let wrapper;
+
+ const createComponent = propsData => {
+ wrapper = mount(ExpiresAt, {
+ propsData,
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ });
+ };
+
+ const getByText = (text, options) =>
+ createWrapper(within(wrapper.element).getByText(text, options));
+
+ const getTooltipDirective = elementWrapper => getBinding(elementWrapper.element, 'gl-tooltip');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when no expiration date is set', () => {
+ it('displays "No expiration set"', () => {
+ createComponent({ date: null });
+
+ expect(getByText('No expiration set').exists()).toBe(true);
+ });
+ });
+
+ describe('when expiration date is in the past', () => {
+ let expiredText;
+
+ beforeEach(() => {
+ createComponent({ date: '2019-03-15T00:00:00.000' });
+
+ expiredText = getByText('Expired');
+ });
+
+ it('displays "Expired"', () => {
+ expect(expiredText.exists()).toBe(true);
+ expect(expiredText.classes()).toContain('gl-text-red-500');
+ });
+
+ it('displays tooltip with formatted date', () => {
+ const tooltipDirective = getTooltipDirective(expiredText);
+
+ expect(tooltipDirective).not.toBeUndefined();
+ expect(expiredText.attributes('title')).toBe('Mar 15, 2019 12:00am GMT+0000');
+ });
+ });
+
+ describe('when expiration date is in the future', () => {
+ it.each`
+ date | expected | warningColor
+ ${'2020-03-23T00:00:00.000'} | ${'in 8 days'} | ${false}
+ ${'2020-03-20T00:00:00.000'} | ${'in 5 days'} | ${true}
+ ${'2020-03-16T00:00:00.000'} | ${'in 1 day'} | ${true}
+ ${'2020-03-15T05:00:00.000'} | ${'in about 5 hours'} | ${true}
+ ${'2020-03-15T01:00:00.000'} | ${'in about 1 hour'} | ${true}
+ ${'2020-03-15T00:30:00.000'} | ${'in 30 minutes'} | ${true}
+ ${'2020-03-15T00:01:15.000'} | ${'in 1 minute'} | ${true}
+ ${'2020-03-15T00:00:15.000'} | ${'in less than a minute'} | ${true}
+ `('displays "$expected"', ({ date, expected, warningColor }) => {
+ createComponent({ date });
+
+ const expiredText = getByText(expected);
+
+ expect(expiredText.exists()).toBe(true);
+
+ if (warningColor) {
+ expect(expiredText.classes()).toContain('gl-text-orange-500');
+ } else {
+ expect(expiredText.classes()).not.toContain('gl-text-orange-500');
+ }
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/table/member_avatar_spec.js b/spec/frontend/vue_shared/components/members/table/member_avatar_spec.js
new file mode 100644
index 00000000000..a171dd830c1
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/table/member_avatar_spec.js
@@ -0,0 +1,39 @@
+import { shallowMount } from '@vue/test-utils';
+import { MEMBER_TYPES } from '~/vue_shared/components/members/constants';
+import { member as memberMock, group, invite, accessRequest } from '../mock_data';
+import MemberAvatar from '~/vue_shared/components/members/table/member_avatar.vue';
+import UserAvatar from '~/vue_shared/components/members/avatars/user_avatar.vue';
+import GroupAvatar from '~/vue_shared/components/members/avatars/group_avatar.vue';
+import InviteAvatar from '~/vue_shared/components/members/avatars/invite_avatar.vue';
+
+describe('MemberList', () => {
+ let wrapper;
+
+ const createComponent = propsData => {
+ wrapper = shallowMount(MemberAvatar, {
+ propsData: {
+ isCurrentUser: false,
+ ...propsData,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ test.each`
+ memberType | member | expectedComponent | expectedComponentName
+ ${MEMBER_TYPES.user} | ${memberMock} | ${UserAvatar} | ${'UserAvatar'}
+ ${MEMBER_TYPES.group} | ${group} | ${GroupAvatar} | ${'GroupAvatar'}
+ ${MEMBER_TYPES.invite} | ${invite} | ${InviteAvatar} | ${'InviteAvatar'}
+ ${MEMBER_TYPES.accessRequest} | ${accessRequest} | ${UserAvatar} | ${'UserAvatar'}
+ `(
+ 'renders $expectedComponentName when `memberType` is $memberType',
+ ({ memberType, member, expectedComponent }) => {
+ createComponent({ memberType, member });
+
+ expect(wrapper.find(expectedComponent).exists()).toBe(true);
+ },
+ );
+});
diff --git a/spec/frontend/vue_shared/components/members/table/member_source_spec.js b/spec/frontend/vue_shared/components/members/table/member_source_spec.js
new file mode 100644
index 00000000000..8b914d76674
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/table/member_source_spec.js
@@ -0,0 +1,71 @@
+import { mount, createWrapper } from '@vue/test-utils';
+import { getByText as getByTextHelper } from '@testing-library/dom';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import MemberSource from '~/vue_shared/components/members/table/member_source.vue';
+
+describe('MemberSource', () => {
+ let wrapper;
+
+ const createComponent = propsData => {
+ wrapper = mount(MemberSource, {
+ propsData: {
+ memberSource: {
+ id: 102,
+ name: 'Foo bar',
+ webUrl: 'https://gitlab.com/groups/foo-bar',
+ },
+ ...propsData,
+ },
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ });
+ };
+
+ const getByText = (text, options) =>
+ createWrapper(getByTextHelper(wrapper.element, text, options));
+
+ const getTooltipDirective = elementWrapper => getBinding(elementWrapper.element, 'gl-tooltip');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('direct member', () => {
+ it('displays "Direct member"', () => {
+ createComponent({
+ isDirectMember: true,
+ });
+
+ expect(getByText('Direct member').exists()).toBe(true);
+ });
+ });
+
+ describe('inherited member', () => {
+ let sourceGroupLink;
+
+ beforeEach(() => {
+ createComponent({
+ isDirectMember: false,
+ });
+
+ sourceGroupLink = getByText('Foo bar');
+ });
+
+ it('displays a link to source group', () => {
+ createComponent({
+ isDirectMember: false,
+ });
+
+ expect(sourceGroupLink.exists()).toBe(true);
+ expect(sourceGroupLink.attributes('href')).toBe('https://gitlab.com/groups/foo-bar');
+ });
+
+ it('displays tooltip with "Inherited"', () => {
+ const tooltipDirective = getTooltipDirective(sourceGroupLink);
+
+ expect(tooltipDirective).not.toBeUndefined();
+ expect(sourceGroupLink.attributes('title')).toBe('Inherited');
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/table/member_table_cell_spec.js b/spec/frontend/vue_shared/components/members/table/member_table_cell_spec.js
new file mode 100644
index 00000000000..960d9bc164c
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/table/member_table_cell_spec.js
@@ -0,0 +1,130 @@
+import { mount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
+import { MEMBER_TYPES } from '~/vue_shared/components/members/constants';
+import { member as memberMock, group, invite, accessRequest } from '../mock_data';
+import MembersTableCell from '~/vue_shared/components/members/table/members_table_cell.vue';
+
+describe('MemberList', () => {
+ const WrappedComponent = {
+ props: {
+ memberType: {
+ type: String,
+ required: true,
+ },
+ isDirectMember: {
+ type: Boolean,
+ required: true,
+ },
+ isCurrentUser: {
+ type: Boolean,
+ required: true,
+ },
+ },
+ render(createElement) {
+ return createElement('div', this.memberType);
+ },
+ };
+
+ const localVue = createLocalVue();
+ localVue.use(Vuex);
+ localVue.component('wrapped-component', WrappedComponent);
+
+ const createStore = (state = {}) => {
+ return new Vuex.Store({
+ state: {
+ sourceId: 1,
+ currentUserId: 1,
+ ...state,
+ },
+ });
+ };
+
+ let wrapper;
+
+ const createComponent = (propsData, state = {}) => {
+ wrapper = mount(MembersTableCell, {
+ localVue,
+ propsData,
+ store: createStore(state),
+ scopedSlots: {
+ default: `
+ <wrapped-component
+ :member-type="props.memberType"
+ :is-direct-member="props.isDirectMember"
+ :is-current-user="props.isCurrentUser"
+ />
+ `,
+ },
+ });
+ };
+
+ const findWrappedComponent = () => wrapper.find(WrappedComponent);
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ test.each`
+ member | expectedMemberType
+ ${memberMock} | ${MEMBER_TYPES.user}
+ ${group} | ${MEMBER_TYPES.group}
+ ${invite} | ${MEMBER_TYPES.invite}
+ ${accessRequest} | ${MEMBER_TYPES.accessRequest}
+ `(
+ 'sets scoped slot prop `memberType` to $expectedMemberType',
+ ({ member, expectedMemberType }) => {
+ createComponent({ member });
+
+ expect(findWrappedComponent().props('memberType')).toBe(expectedMemberType);
+ },
+ );
+
+ describe('isDirectMember', () => {
+ it('returns `true` when member source has same ID as `sourceId`', () => {
+ createComponent({
+ member: {
+ ...memberMock,
+ source: {
+ ...memberMock.source,
+ id: 1,
+ },
+ },
+ });
+
+ expect(findWrappedComponent().props('isDirectMember')).toBe(true);
+ });
+
+ it('returns `false` when member is inherited', () => {
+ createComponent({
+ member: memberMock,
+ });
+
+ expect(findWrappedComponent().props('isDirectMember')).toBe(false);
+ });
+ });
+
+ describe('isCurrentUser', () => {
+ it('returns `true` when `member.user` has the same ID as `currentUserId`', () => {
+ createComponent({
+ member: {
+ ...memberMock,
+ user: {
+ ...memberMock.user,
+ id: 1,
+ },
+ },
+ });
+
+ expect(findWrappedComponent().props('isCurrentUser')).toBe(true);
+ });
+
+ it('returns `false` when `member.user` does not have the same ID as `currentUserId`', () => {
+ createComponent({
+ member: memberMock,
+ });
+
+ expect(findWrappedComponent().props('isCurrentUser')).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/table/members_table_spec.js b/spec/frontend/vue_shared/components/members/table/members_table_spec.js
new file mode 100644
index 00000000000..4979a7096ac
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/table/members_table_spec.js
@@ -0,0 +1,104 @@
+import { mount, createLocalVue, createWrapper } from '@vue/test-utils';
+import Vuex from 'vuex';
+import {
+ getByText as getByTextHelper,
+ getByTestId as getByTestIdHelper,
+} from '@testing-library/dom';
+import MembersTable from '~/vue_shared/components/members/table/members_table.vue';
+import MemberAvatar from '~/vue_shared/components/members/table/member_avatar.vue';
+import MemberSource from '~/vue_shared/components/members/table/member_source.vue';
+import ExpiresAt from '~/vue_shared/components/members/table/expires_at.vue';
+import CreatedAt from '~/vue_shared/components/members/table/created_at.vue';
+import * as initUserPopovers from '~/user_popovers';
+import { member as memberMock, invite, accessRequest } from '../mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('MemberList', () => {
+ let wrapper;
+
+ const createStore = (state = {}) => {
+ return new Vuex.Store({
+ state: {
+ members: [],
+ tableFields: [],
+ ...state,
+ },
+ });
+ };
+
+ const createComponent = state => {
+ wrapper = mount(MembersTable, {
+ localVue,
+ store: createStore(state),
+ stubs: ['member-avatar', 'member-source', 'expires-at', 'created-at'],
+ });
+ };
+
+ const getByText = (text, options) =>
+ createWrapper(getByTextHelper(wrapper.element, text, options));
+
+ const getByTestId = (id, options) =>
+ createWrapper(getByTestIdHelper(wrapper.element, id, options));
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('fields', () => {
+ it.each`
+ field | label | member | expectedComponent
+ ${'account'} | ${'Account'} | ${memberMock} | ${MemberAvatar}
+ ${'source'} | ${'Source'} | ${memberMock} | ${MemberSource}
+ ${'granted'} | ${'Access granted'} | ${memberMock} | ${CreatedAt}
+ ${'invited'} | ${'Invited'} | ${invite} | ${CreatedAt}
+ ${'requested'} | ${'Requested'} | ${accessRequest} | ${CreatedAt}
+ ${'expires'} | ${'Access expires'} | ${memberMock} | ${ExpiresAt}
+ ${'maxRole'} | ${'Max role'} | ${memberMock} | ${null}
+ ${'expiration'} | ${'Expiration'} | ${memberMock} | ${null}
+ `('renders the $label field', ({ field, label, member, expectedComponent }) => {
+ createComponent({
+ members: [member],
+ tableFields: [field],
+ });
+
+ expect(getByText(label, { selector: '[role="columnheader"]' }).exists()).toBe(true);
+
+ if (expectedComponent) {
+ expect(
+ wrapper
+ .find(`[data-label="${label}"][role="cell"]`)
+ .find(expectedComponent)
+ .exists(),
+ ).toBe(true);
+ }
+ });
+
+ it('renders "Actions" field for screen readers', () => {
+ createComponent({ tableFields: ['actions'] });
+
+ const actionField = getByTestId('col-actions');
+
+ expect(actionField.exists()).toBe(true);
+ expect(actionField.classes('gl-sr-only')).toBe(true);
+ });
+ });
+
+ describe('when `members` is an empty array', () => {
+ it('displays a "No members found" message', () => {
+ createComponent();
+
+ expect(getByText('No members found').exists()).toBe(true);
+ });
+ });
+
+ it('initializes user popovers when mounted', () => {
+ const initUserPopoversMock = jest.spyOn(initUserPopovers, 'default');
+
+ createComponent();
+
+ expect(initUserPopoversMock).toHaveBeenCalled();
+ });
+});
diff --git a/spec/frontend/vue_shared/components/members/utils_spec.js b/spec/frontend/vue_shared/components/members/utils_spec.js
new file mode 100644
index 00000000000..f183abc08d6
--- /dev/null
+++ b/spec/frontend/vue_shared/components/members/utils_spec.js
@@ -0,0 +1,29 @@
+import { generateBadges } from '~/vue_shared/components/members/utils';
+import { member as memberMock } from './mock_data';
+
+describe('Members Utils', () => {
+ describe('generateBadges', () => {
+ it('has correct properties for each badge', () => {
+ const badges = generateBadges(memberMock, true);
+
+ badges.forEach(badge => {
+ expect(badge).toEqual(
+ expect.objectContaining({
+ show: expect.any(Boolean),
+ text: expect.any(String),
+ variant: expect.stringMatching(/muted|neutral|info|success|danger|warning/),
+ }),
+ );
+ });
+ });
+
+ it.each`
+ member | expected
+ ${memberMock} | ${{ show: true, text: "It's you", variant: 'success' }}
+ ${{ ...memberMock, user: { ...memberMock.user, blocked: true } }} | ${{ show: true, text: 'Blocked', variant: 'danger' }}
+ ${{ ...memberMock, user: { ...memberMock.user, twoFactorEnabled: true } }} | ${{ show: true, text: '2FA', variant: 'info' }}
+ `('returns expected output for "$expected.text" badge', ({ member, expected }) => {
+ expect(generateBadges(member, true)).toContainEqual(expect.objectContaining(expected));
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/registry/__snapshots__/code_instruction_spec.js.snap b/spec/frontend/vue_shared/components/registry/__snapshots__/code_instruction_spec.js.snap
index 16094a42668..27276faf333 100644
--- a/spec/frontend/vue_shared/components/registry/__snapshots__/code_instruction_spec.js.snap
+++ b/spec/frontend/vue_shared/components/registry/__snapshots__/code_instruction_spec.js.snap
@@ -38,7 +38,8 @@ exports[`Package code instruction single line to match the default snapshot 1`]
data-testid="instruction-button"
>
<button
- class="btn input-group-text btn-secondary btn-md btn-default"
+ aria-label="Copy this value"
+ class="btn input-group-text btn-default btn-md gl-button btn-default-secondary btn-icon"
data-clipboard-text="npm i @my-package"
title="Copy npm install command"
type="button"
@@ -53,6 +54,8 @@ exports[`Package code instruction single line to match the default snapshot 1`]
href="#copy-to-clipboard"
/>
</svg>
+
+ <!---->
</button>
</span>
</div>
diff --git a/spec/frontend/vue_shared/components/registry/title_area_spec.js b/spec/frontend/vue_shared/components/registry/title_area_spec.js
index 6740d6097a4..a513f178f45 100644
--- a/spec/frontend/vue_shared/components/registry/title_area_spec.js
+++ b/spec/frontend/vue_shared/components/registry/title_area_spec.js
@@ -1,4 +1,4 @@
-import { GlAvatar } from '@gitlab/ui';
+import { GlAvatar, GlSprintf, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import component from '~/vue_shared/components/registry/title_area.vue';
@@ -10,10 +10,12 @@ describe('title area', () => {
const findMetadataSlot = name => wrapper.find(`[data-testid="${name}"]`);
const findTitle = () => wrapper.find('[data-testid="title"]');
const findAvatar = () => wrapper.find(GlAvatar);
+ const findInfoMessages = () => wrapper.findAll('[data-testid="info-message"]');
const mountComponent = ({ propsData = { title: 'foo' }, slots } = {}) => {
wrapper = shallowMount(component, {
propsData,
+ stubs: { GlSprintf },
slots: {
'sub-header': '<div data-testid="sub-header" />',
'right-actions': '<div data-testid="right-actions" />',
@@ -95,4 +97,33 @@ describe('title area', () => {
});
});
});
+
+ describe('info-messages', () => {
+ it('shows a message when the props contains one', () => {
+ mountComponent({ propsData: { infoMessages: [{ text: 'foo foo bar bar' }] } });
+
+ const messages = findInfoMessages();
+ expect(messages).toHaveLength(1);
+ expect(messages.at(0).text()).toBe('foo foo bar bar');
+ });
+
+ it('shows a link when the props contains one', () => {
+ mountComponent({
+ propsData: {
+ infoMessages: [{ text: 'foo %{docLinkStart}link%{docLinkEnd}', link: 'bar' }],
+ },
+ });
+
+ const message = findInfoMessages().at(0);
+
+ expect(message.find(GlLink).attributes('href')).toBe('bar');
+ expect(message.text()).toBe('foo link');
+ });
+
+ it('multiple messages generates multiple spans', () => {
+ mountComponent({ propsData: { infoMessages: [{ text: 'foo' }, { text: 'bar' }] } });
+
+ expect(findInfoMessages()).toHaveLength(2);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/editor_service_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/editor_service_spec.js
index 16f60b5ff21..81d31a284df 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/editor_service_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/editor_service_spec.js
@@ -9,9 +9,11 @@ import {
} from '~/vue_shared/components/rich_content_editor/services/editor_service';
import buildHTMLToMarkdownRenderer from '~/vue_shared/components/rich_content_editor/services/build_html_to_markdown_renderer';
import buildCustomRenderer from '~/vue_shared/components/rich_content_editor/services/build_custom_renderer';
+import sanitizeHTML from '~/vue_shared/components/rich_content_editor/services/sanitize_html';
jest.mock('~/vue_shared/components/rich_content_editor/services/build_html_to_markdown_renderer');
jest.mock('~/vue_shared/components/rich_content_editor/services/build_custom_renderer');
+jest.mock('~/vue_shared/components/rich_content_editor/services/sanitize_html');
describe('Editor Service', () => {
let mockInstance;
@@ -143,5 +145,14 @@ describe('Editor Service', () => {
getEditorOptions(externalOptions);
expect(buildCustomRenderer).toHaveBeenCalledWith(externalOptions.customRenderers);
});
+
+ it('uses the internal sanitizeHTML service for HTML sanitization', () => {
+ const options = getEditorOptions();
+ const html = '<div></div>';
+
+ options.customHTMLSanitizer(html);
+
+ expect(sanitizeHTML).toHaveBeenCalledWith(html);
+ });
});
});
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_html_block_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_html_block_spec.js
index a6c712eeb31..b31684a400e 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_html_block_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_html_block_spec.js
@@ -1,22 +1,21 @@
import renderer from '~/vue_shared/components/rich_content_editor/services/renderers/render_html_block';
import { buildUneditableHtmlAsTextTokens } from '~/vue_shared/components/rich_content_editor/services/renderers/build_uneditable_token';
-import { normalTextNode } from './mock_data';
+describe('rich_content_editor/services/renderers/render_html_block', () => {
+ const htmlBlockNode = {
+ literal: '<div><h1>Heading</h1><p>Paragraph.</p></div>',
+ type: 'htmlBlock',
+ };
-const htmlBlockNode = {
- firstChild: null,
- literal: '<div><h1>Heading</h1><p>Paragraph.</p></div>',
- type: 'htmlBlock',
-};
-
-describe('Render HTML renderer', () => {
describe('canRender', () => {
- it('should return true when the argument is an html block', () => {
- expect(renderer.canRender(htmlBlockNode)).toBe(true);
- });
-
- it('should return false when the argument is not an html block', () => {
- expect(renderer.canRender(normalTextNode)).toBe(false);
+ it.each`
+ input | result
+ ${htmlBlockNode} | ${true}
+ ${{ literal: '<iframe></iframe>', type: 'htmlBlock' }} | ${true}
+ ${{ literal: '<iframe src="https://www.youtube.com"></iframe>', type: 'htmlBlock' }} | ${false}
+ ${{ literal: '<iframe></iframe>', type: 'text' }} | ${false}
+ `('returns $result when input=$input', ({ input, result }) => {
+ expect(renderer.canRender(input)).toBe(result);
});
});
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/services/sanitize_html_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/services/sanitize_html_spec.js
new file mode 100644
index 00000000000..f2182ef60d7
--- /dev/null
+++ b/spec/frontend/vue_shared/components/rich_content_editor/services/sanitize_html_spec.js
@@ -0,0 +1,11 @@
+import sanitizeHTML from '~/vue_shared/components/rich_content_editor/services/sanitize_html';
+
+describe('rich_content_editor/services/sanitize_html', () => {
+ it.each`
+ input | result
+ ${'<iframe src="https://www.youtube.com"></iframe>'} | ${'<iframe src="https://www.youtube.com"></iframe>'}
+ ${'<iframe src="https://gitlab.com"></iframe>'} | ${''}
+ `('removes iframes if the iframe source origin is not allowed', ({ input, result }) => {
+ expect(sanitizeHTML(input)).toBe(result);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
index 589be0ad7a4..a9350bc059d 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
@@ -69,6 +69,16 @@ describe('DropdownContentsLabelsView', () => {
expect(wrapper.vm.visibleLabels[0].title).toBe('Bug');
});
+ it('returns matching labels with fuzzy filtering', () => {
+ wrapper.setData({
+ searchKey: 'bg',
+ });
+
+ expect(wrapper.vm.visibleLabels.length).toBe(2);
+ expect(wrapper.vm.visibleLabels[0].title).toBe('Bug');
+ expect(wrapper.vm.visibleLabels[1].title).toBe('Boog');
+ });
+
it('returns all labels when `searchKey` is empty', () => {
wrapper.setData({
searchKey: '',
@@ -133,6 +143,19 @@ describe('DropdownContentsLabelsView', () => {
expect(wrapper.vm.currentHighlightItem).toBe(2);
});
+ it('resets the search text when the Enter key is pressed', () => {
+ wrapper.setData({
+ currentHighlightItem: 1,
+ searchKey: 'bug',
+ });
+
+ wrapper.vm.handleKeyDown({
+ keyCode: ENTER_KEY_CODE,
+ });
+
+ expect(wrapper.vm.searchKey).toBe('');
+ });
+
it('calls action `updateSelectedLabels` with currently highlighted label when Enter key is pressed', () => {
jest.spyOn(wrapper.vm, 'updateSelectedLabels').mockImplementation();
wrapper.setData({
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/mock_data.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/mock_data.js
index e1008d13fc2..9697d6c30f2 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/mock_data.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/mock_data.js
@@ -24,6 +24,13 @@ export const mockLabels = [
color: '#FF0000',
textColor: '#FFFFFF',
},
+ {
+ id: 29,
+ title: 'Boog',
+ description: 'Label for bugs',
+ color: '#FF0000',
+ textColor: '#FFFFFF',
+ },
];
export const mockConfig = {
diff --git a/spec/frontend/vue_shared/components/todo_button_spec.js b/spec/frontend/vue_shared/components/todo_button_spec.js
index 482b5de11f6..1f8a214d632 100644
--- a/spec/frontend/vue_shared/components/todo_button_spec.js
+++ b/spec/frontend/vue_shared/components/todo_button_spec.js
@@ -33,7 +33,7 @@ describe('Todo Button', () => {
it.each`
label | isTodo
${'Mark as done'} | ${true}
- ${'Add a To-Do'} | ${false}
+ ${'Add a To Do'} | ${false}
`('sets correct label when isTodo is $isTodo', ({ label, isTodo }) => {
createComponent({ isTodo });
diff --git a/spec/frontend/vue_shared/components/web_ide_link_spec.js b/spec/frontend/vue_shared/components/web_ide_link_spec.js
index 57f511903d9..5532a27b767 100644
--- a/spec/frontend/vue_shared/components/web_ide_link_spec.js
+++ b/spec/frontend/vue_shared/components/web_ide_link_spec.js
@@ -60,6 +60,7 @@ describe('Web IDE link component', () => {
it.each`
props | expectedActions
${{}} | ${[ACTION_WEB_IDE]}
+ ${{ webIdeIsFork: true }} | ${[{ ...ACTION_WEB_IDE, text: 'Edit fork in Web IDE' }]}
${{ needsToFork: true }} | ${[ACTION_WEB_IDE_FORK]}
${{ showWebIdeButton: false, showGitpodButton: true, gitpodEnabled: true }} | ${[ACTION_GITPOD]}
${{ showWebIdeButton: false, showGitpodButton: true, gitpodEnabled: false }} | ${[ACTION_GITPOD_ENABLE]}
diff --git a/spec/frontend/vue_shared/droplab_dropdown_button_spec.js b/spec/frontend/vue_shared/droplab_dropdown_button_spec.js
deleted file mode 100644
index e57c730ecee..00000000000
--- a/spec/frontend/vue_shared/droplab_dropdown_button_spec.js
+++ /dev/null
@@ -1,132 +0,0 @@
-import { mount } from '@vue/test-utils';
-
-import DroplabDropdownButton from '~/vue_shared/components/droplab_dropdown_button.vue';
-
-const mockActions = [
- {
- title: 'Foo',
- description: 'Some foo action',
- },
- {
- title: 'Bar',
- description: 'Some bar action',
- },
-];
-
-const createComponent = ({
- size = '',
- dropdownClass = '',
- actions = mockActions,
- defaultAction = 0,
-}) =>
- mount(DroplabDropdownButton, {
- propsData: {
- size,
- dropdownClass,
- actions,
- defaultAction,
- },
- });
-
-describe('DroplabDropdownButton', () => {
- let wrapper;
-
- beforeEach(() => {
- wrapper = createComponent({});
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('data', () => {
- it('contains `selectedAction` representing value of `defaultAction` prop', () => {
- expect(wrapper.vm.selectedAction).toBe(0);
- });
- });
-
- describe('computed', () => {
- describe('selectedActionTitle', () => {
- it('returns string containing title of selected action', () => {
- wrapper.setData({ selectedAction: 0 });
-
- expect(wrapper.vm.selectedActionTitle).toBe(mockActions[0].title);
-
- wrapper.setData({ selectedAction: 1 });
-
- expect(wrapper.vm.selectedActionTitle).toBe(mockActions[1].title);
- });
- });
-
- describe('buttonSizeClass', () => {
- it('returns string containing button sizing class based on `size` prop', done => {
- const wrapperWithSize = createComponent({
- size: 'sm',
- });
-
- wrapperWithSize.vm.$nextTick(() => {
- expect(wrapperWithSize.vm.buttonSizeClass).toBe('btn-sm');
-
- done();
- wrapperWithSize.destroy();
- });
- });
- });
- });
-
- describe('methods', () => {
- describe('handlePrimaryActionClick', () => {
- it('emits `onActionClick` event on component with selectedAction object as param', () => {
- jest.spyOn(wrapper.vm, '$emit');
-
- wrapper.setData({ selectedAction: 0 });
- wrapper.vm.handlePrimaryActionClick();
-
- expect(wrapper.vm.$emit).toHaveBeenCalledWith('onActionClick', mockActions[0]);
- });
- });
-
- describe('handleActionClick', () => {
- it('emits `onActionSelect` event on component with selectedAction index as param', () => {
- jest.spyOn(wrapper.vm, '$emit');
-
- wrapper.vm.handleActionClick(1);
-
- expect(wrapper.vm.$emit).toHaveBeenCalledWith('onActionSelect', 1);
- });
- });
- });
-
- describe('template', () => {
- it('renders default action button', () => {
- const defaultButton = wrapper.findAll('.btn').at(0);
-
- expect(defaultButton.text()).toBe(mockActions[0].title);
- });
-
- it('renders dropdown button', () => {
- const dropdownButton = wrapper.findAll('.dropdown-toggle').at(0);
-
- expect(dropdownButton.isVisible()).toBe(true);
- });
-
- it('renders dropdown actions', () => {
- const dropdownActions = wrapper.findAll('.dropdown-menu li button');
-
- Array(dropdownActions.length)
- .fill()
- .forEach((_, index) => {
- const actionContent = dropdownActions.at(index).find('.description');
-
- expect(actionContent.find('strong').text()).toBe(mockActions[index].title);
- expect(actionContent.find('p').text()).toBe(mockActions[index].description);
- });
- });
-
- it('renders divider between dropdown actions', () => {
- const dropdownDivider = wrapper.find('.dropdown-menu .divider');
-
- expect(dropdownDivider.isVisible()).toBe(true);
- });
- });
-});
diff --git a/spec/frontend/whats_new/components/app_spec.js b/spec/frontend/whats_new/components/app_spec.js
index 59d05f68fdd..157faa90efa 100644
--- a/spec/frontend/whats_new/components/app_spec.js
+++ b/spec/frontend/whats_new/components/app_spec.js
@@ -1,6 +1,7 @@
import { createLocalVue, mount } from '@vue/test-utils';
import Vuex from 'vuex';
import { GlDrawer } from '@gitlab/ui';
+import { mockTracking, unmockTracking, triggerEvent } from 'helpers/tracking_helper';
import App from '~/whats_new/components/app.vue';
const localVue = createLocalVue();
@@ -11,7 +12,8 @@ describe('App', () => {
let store;
let actions;
let state;
- let propsData = { features: '[ {"title":"Whats New Drawer"} ]' };
+ let propsData = { features: '[ {"title":"Whats New Drawer"} ]', storageKey: 'storage-key' };
+ let trackingSpy;
const buildWrapper = () => {
actions = {
@@ -36,11 +38,16 @@ describe('App', () => {
};
beforeEach(() => {
+ document.body.dataset.page = 'test-page';
+ document.body.dataset.namespaceId = 'namespace-840';
+
+ trackingSpy = mockTracking('_category_', null, jest.spyOn);
buildWrapper();
});
afterEach(() => {
wrapper.destroy();
+ unmockTracking();
});
const getDrawer = () => wrapper.find(GlDrawer);
@@ -50,7 +57,11 @@ describe('App', () => {
});
it('dispatches openDrawer when mounted', () => {
- expect(actions.openDrawer).toHaveBeenCalled();
+ expect(actions.openDrawer).toHaveBeenCalledWith(expect.any(Object), 'storage-key');
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_whats_new_drawer', {
+ label: 'namespace_id',
+ value: 'namespace-840',
+ });
});
it('dispatches closeDrawer when clicking close', () => {
@@ -71,9 +82,30 @@ describe('App', () => {
});
it('handles bad json argument gracefully', () => {
- propsData = { features: 'this is not json' };
+ propsData = { features: 'this is not json', storageKey: 'storage-key' };
buildWrapper();
expect(getDrawer().exists()).toBe(true);
});
+
+ it('send an event when feature item is clicked', () => {
+ propsData = {
+ features: '[ {"title":"Whats New Drawer", "url": "www.url.com"} ]',
+ storageKey: 'storage-key',
+ };
+ buildWrapper();
+ trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
+
+ const link = wrapper.find('[data-testid="whats-new-title-link"]');
+ triggerEvent(link.element);
+
+ expect(trackingSpy.mock.calls[2]).toMatchObject([
+ '_category_',
+ 'click_whats_new_item',
+ {
+ label: 'Whats New Drawer',
+ property: 'www.url.com',
+ },
+ ]);
+ });
});
diff --git a/spec/frontend/whats_new/store/actions_spec.js b/spec/frontend/whats_new/store/actions_spec.js
index d95453c9175..6f550222074 100644
--- a/spec/frontend/whats_new/store/actions_spec.js
+++ b/spec/frontend/whats_new/store/actions_spec.js
@@ -1,11 +1,16 @@
import testAction from 'helpers/vuex_action_helper';
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import actions from '~/whats_new/store/actions';
import * as types from '~/whats_new/store/mutation_types';
describe('whats new actions', () => {
describe('openDrawer', () => {
+ useLocalStorageSpy();
+
it('should commit openDrawer', () => {
- testAction(actions.openDrawer, {}, {}, [{ type: types.OPEN_DRAWER }]);
+ testAction(actions.openDrawer, 'storage-key', {}, [{ type: types.OPEN_DRAWER }]);
+
+ expect(window.localStorage.setItem).toHaveBeenCalledWith('storage-key', 'false');
});
});
diff --git a/spec/frontend_integration/.eslintrc.yml b/spec/frontend_integration/.eslintrc.yml
index 26b6f935ffb..2460e218f59 100644
--- a/spec/frontend_integration/.eslintrc.yml
+++ b/spec/frontend_integration/.eslintrc.yml
@@ -4,3 +4,5 @@ settings:
import/resolver:
jest:
jestConfigFile: 'jest.config.integration.js'
+globals:
+ mockServer: false
diff --git a/spec/frontend_integration/ide/__snapshots__/ide_integration_spec.js.snap b/spec/frontend_integration/ide/__snapshots__/ide_integration_spec.js.snap
index 6beb5eab6db..6c120898f01 100644
--- a/spec/frontend_integration/ide/__snapshots__/ide_integration_spec.js.snap
+++ b/spec/frontend_integration/ide/__snapshots__/ide_integration_spec.js.snap
@@ -20,6 +20,7 @@ exports[`WebIDE runs 1`] = `
>
<div
class="multi-file-commit-panel-inner"
+ data-testid="ide-side-bar-inner"
>
<div
class="multi-file-loading-container"
diff --git a/spec/frontend_integration/ide/ide_helper.js b/spec/frontend_integration/ide/ide_helper.js
new file mode 100644
index 00000000000..a43695fea8f
--- /dev/null
+++ b/spec/frontend_integration/ide/ide_helper.js
@@ -0,0 +1,102 @@
+import { findAllByText, fireEvent, getByLabelText, screen } from '@testing-library/dom';
+
+const isFileRowOpen = row => row.matches('.is-open');
+
+const getLeftSidebar = () => screen.getByTestId('left-sidebar');
+
+const clickOnLeftSidebarTab = name => {
+ const sidebar = getLeftSidebar();
+
+ const button = getByLabelText(sidebar, name);
+
+ button.click();
+};
+
+const findMonacoEditor = () =>
+ screen.findByLabelText(/Editor content;/).then(x => x.closest('.monaco-editor'));
+
+const findAndSetEditorValue = async value => {
+ const editor = await findMonacoEditor();
+ const uri = editor.getAttribute('data-uri');
+
+ window.monaco.editor.getModel(uri).setValue(value);
+};
+
+const findTreeBody = () => screen.findByTestId('ide-tree-body', {}, { timeout: 5000 });
+
+const findFileRowContainer = (row = null) =>
+ row ? Promise.resolve(row.parentElement) : findTreeBody();
+
+const findFileChild = async (row, name, index = 0) => {
+ const container = await findFileRowContainer(row);
+ const children = await findAllByText(container, name, { selector: '.file-row-name' });
+
+ return children.map(x => x.closest('.file-row')).find(x => x.dataset.level === index.toString());
+};
+
+const openFileRow = row => {
+ if (!row || isFileRowOpen(row)) {
+ return;
+ }
+
+ row.click();
+};
+
+const findAndTraverseToPath = async (path, index = 0, row = null) => {
+ if (!path) {
+ return row;
+ }
+
+ const [name, ...restOfPath] = path.split('/');
+
+ openFileRow(row);
+
+ const child = await findFileChild(row, name, index);
+
+ return findAndTraverseToPath(restOfPath.join('/'), index + 1, child);
+};
+
+const clickFileRowAction = (row, name) => {
+ fireEvent.mouseOver(row);
+
+ const dropdownButton = getByLabelText(row, 'Create new file or directory');
+ dropdownButton.click();
+
+ const dropdownAction = getByLabelText(dropdownButton.parentNode, name);
+ dropdownAction.click();
+};
+
+const findAndSetFileName = async value => {
+ const nameField = await screen.findByTestId('file-name-field');
+ fireEvent.input(nameField, { target: { value } });
+
+ const createButton = screen.getByText('Create file');
+ createButton.click();
+};
+
+export const createFile = async (path, content) => {
+ const parentPath = path
+ .split('/')
+ .slice(0, -1)
+ .join('/');
+
+ const parentRow = await findAndTraverseToPath(parentPath);
+ clickFileRowAction(parentRow, 'New file');
+
+ await findAndSetFileName(path);
+ await findAndSetEditorValue(content);
+};
+
+export const deleteFile = async path => {
+ const row = await findAndTraverseToPath(path);
+ clickFileRowAction(row, 'Delete');
+};
+
+export const commit = async () => {
+ clickOnLeftSidebarTab('Commit');
+ screen.getByTestId('begin-commit-button').click();
+
+ await screen.findByLabelText(/Commit to .+ branch/).then(x => x.click());
+
+ screen.getByText('Commit').click();
+};
diff --git a/spec/frontend_integration/ide/ide_integration_spec.js b/spec/frontend_integration/ide/ide_integration_spec.js
index 91d89c26ec1..c4d0c4df8de 100644
--- a/spec/frontend_integration/ide/ide_integration_spec.js
+++ b/spec/frontend_integration/ide/ide_integration_spec.js
@@ -1,17 +1,10 @@
-/**
- * WARNING: WIP
- *
- * Please do not copy from this spec or use it as an example for anything.
- *
- * This is in place to iteratively set up the frontend integration testing environment
- * and will be improved upon in a later iteration.
- *
- * See https://gitlab.com/gitlab-org/gitlab/-/issues/208800 for more information.
- */
import { TEST_HOST } from 'helpers/test_constants';
+import { waitForText } from 'helpers/wait_for_text';
import { useOverclockTimers } from 'test_helpers/utils/overclock_timers';
+import { createCommitId } from 'test_helpers/factories/commit_id';
import { initIde } from '~/ide';
import extendStore from '~/ide/stores/extend';
+import * as ideHelper from './ide_helper';
const TEST_DATASET = {
emptyStateSvgPath: '/test/empty_state.svg',
@@ -59,4 +52,38 @@ describe('WebIDE', () => {
expect(root).toMatchSnapshot();
});
+
+ it('user commits changes', async () => {
+ createComponent();
+
+ await ideHelper.createFile('foo/bar/test.txt', 'Lorem ipsum dolar sit');
+ await ideHelper.deleteFile('foo/bar/.gitkeep');
+ await ideHelper.commit();
+
+ const commitId = createCommitId(1);
+ const commitShortId = commitId.slice(0, 8);
+
+ await waitForText('All changes are committed');
+ await waitForText(commitShortId);
+
+ expect(mockServer.db.branches.findBy({ name: 'master' }).commit).toMatchObject({
+ short_id: commitShortId,
+ id: commitId,
+ message: 'Update foo/bar/test.txt\nDeleted foo/bar/.gitkeep',
+ __actions: [
+ {
+ action: 'create',
+ content: 'Lorem ipsum dolar sit\n',
+ encoding: 'text',
+ file_path: 'foo/bar/test.txt',
+ last_commit_id: '',
+ },
+ {
+ action: 'delete',
+ encoding: 'text',
+ file_path: 'foo/bar/.gitkeep',
+ },
+ ],
+ });
+ });
});
diff --git a/spec/frontend_integration/test_helpers/setup/setup_mock_server.js b/spec/frontend_integration/test_helpers/setup/setup_mock_server.js
index 343aeebf88e..43a21deed25 100644
--- a/spec/frontend_integration/test_helpers/setup/setup_mock_server.js
+++ b/spec/frontend_integration/test_helpers/setup/setup_mock_server.js
@@ -1,13 +1,12 @@
import { createMockServer } from '../mock_server';
beforeEach(() => {
+ if (global.mockServer) {
+ global.mockServer.shutdown();
+ }
+
const server = createMockServer();
server.logging = false;
global.mockServer = server;
});
-
-afterEach(() => {
- global.mockServer.shutdown();
- global.mockServer = null;
-});
diff --git a/spec/graphql/features/feature_flag_spec.rb b/spec/graphql/features/feature_flag_spec.rb
index b484663d675..9ebc6e595a6 100644
--- a/spec/graphql/features/feature_flag_spec.rb
+++ b/spec/graphql/features/feature_flag_spec.rb
@@ -12,6 +12,10 @@ RSpec.describe 'Graphql Field feature flags' do
let(:query_string) { '{ item { name } }' }
let(:result) { execute_query(query_type)['data'] }
+ before do
+ skip_feature_flags_yaml_validation
+ end
+
subject { result }
describe 'Feature flagged field' do
diff --git a/spec/graphql/resolvers/admin/analytics/instance_statistics/measurements_resolver_spec.rb b/spec/graphql/resolvers/admin/analytics/instance_statistics/measurements_resolver_spec.rb
index 76854be2daa..c5637d43382 100644
--- a/spec/graphql/resolvers/admin/analytics/instance_statistics/measurements_resolver_spec.rb
+++ b/spec/graphql/resolvers/admin/analytics/instance_statistics/measurements_resolver_spec.rb
@@ -5,9 +5,11 @@ require 'spec_helper'
RSpec.describe Resolvers::Admin::Analytics::InstanceStatistics::MeasurementsResolver do
include GraphqlHelpers
+ let_it_be(:admin_user) { create(:user, :admin) }
+ let(:current_user) { admin_user }
+
describe '#resolve' do
let_it_be(:user) { create(:user) }
- let_it_be(:admin_user) { create(:user, :admin) }
let_it_be(:project_measurement_new) { create(:instance_statistics_measurement, :project_count, recorded_at: 2.days.ago) }
let_it_be(:project_measurement_old) { create(:instance_statistics_measurement, :project_count, recorded_at: 10.days.ago) }
@@ -39,6 +41,37 @@ RSpec.describe Resolvers::Admin::Analytics::InstanceStatistics::MeasurementsReso
end
end
end
+
+ context 'when requesting pipeline counts by pipeline status' do
+ let_it_be(:pipelines_succeeded_measurement) { create(:instance_statistics_measurement, :pipelines_succeeded_count, recorded_at: 2.days.ago) }
+ let_it_be(:pipelines_skipped_measurement) { create(:instance_statistics_measurement, :pipelines_skipped_count, recorded_at: 2.days.ago) }
+
+ subject { resolve_measurements({ identifier: identifier }, { current_user: current_user }) }
+
+ context 'filter for pipelines_succeeded' do
+ let(:identifier) { 'pipelines_succeeded' }
+
+ it { is_expected.to eq([pipelines_succeeded_measurement]) }
+ end
+
+ context 'filter for pipelines_skipped' do
+ let(:identifier) { 'pipelines_skipped' }
+
+ it { is_expected.to eq([pipelines_skipped_measurement]) }
+ end
+
+ context 'filter for pipelines_failed' do
+ let(:identifier) { 'pipelines_failed' }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'filter for pipelines_canceled' do
+ let(:identifier) { 'pipelines_canceled' }
+
+ it { is_expected.to be_empty }
+ end
+ end
end
def resolve_measurements(args = {}, context = {})
diff --git a/spec/graphql/resolvers/board_resolver_spec.rb b/spec/graphql/resolvers/board_resolver_spec.rb
new file mode 100644
index 00000000000..c70c696005f
--- /dev/null
+++ b/spec/graphql/resolvers/board_resolver_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::BoardResolver do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+
+ let(:dummy_gid) { 'gid://gitlab/Board/1' }
+
+ shared_examples_for 'group and project boards resolver' do
+ it 'does not create a default board' do
+ expect(resolve_board(id: dummy_gid)).to eq nil
+ end
+
+ it 'calls Boards::ListService' do
+ expect_next_instance_of(Boards::ListService) do |service|
+ expect(service).to receive(:execute).and_return([])
+ end
+
+ resolve_board(id: dummy_gid)
+ end
+
+ it 'requires an ID' do
+ expect do
+ resolve(described_class, obj: board_parent, args: {}, ctx: { current_user: user })
+ end.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
+
+ context 'when querying for a single board' do
+ let(:board1) { create(:board, name: 'One', resource_parent: board_parent) }
+
+ it 'returns specified board' do
+ expect(resolve_board(id: global_id_of(board1))).to eq board1
+ end
+
+ it 'returns nil if board not found' do
+ outside_parent = create(board_parent.class.underscore.to_sym) # rubocop:disable Rails/SaveBang
+ outside_board = create(:board, name: 'outside board', resource_parent: outside_parent)
+
+ expect(resolve_board(id: global_id_of(outside_board))).to eq nil
+ end
+ end
+ end
+
+ describe '#resolve' do
+ context 'when there is no parent' do
+ let(:board_parent) { nil }
+
+ it 'returns nil if parent is nil' do
+ expect(resolve_board(id: dummy_gid)).to eq(nil)
+ end
+ end
+
+ context 'when project boards' do
+ let(:board_parent) { create(:project, :public, creator_id: user.id, namespace: user.namespace ) }
+
+ it_behaves_like 'group and project boards resolver'
+ end
+
+ context 'when group boards' do
+ let(:board_parent) { create(:group) }
+
+ it_behaves_like 'group and project boards resolver'
+ end
+ end
+
+ def resolve_board(id:)
+ resolve(described_class, obj: board_parent, args: { id: id }, ctx: { current_user: user })
+ end
+end
diff --git a/spec/graphql/resolvers/concerns/looks_ahead_spec.rb b/spec/graphql/resolvers/concerns/looks_ahead_spec.rb
index f13823085b8..ebea9e5522b 100644
--- a/spec/graphql/resolvers/concerns/looks_ahead_spec.rb
+++ b/spec/graphql/resolvers/concerns/looks_ahead_spec.rb
@@ -117,20 +117,6 @@ RSpec.describe LooksAhead do
query.result
end
- context 'the feature flag is off' do
- before do
- stub_feature_flags(described_class::FEATURE_FLAG => false)
- end
-
- it_behaves_like 'a working query on the test schema'
-
- it 'does not preload labels on issues' do
- expect(the_user.issues).not_to receive(:preload).with(:labels)
-
- query.result
- end
- end
-
it 'issues fewer queries than the naive approach' do
the_user.reload # ensure no attributes are loaded before we begin
naive = <<-GQL
diff --git a/spec/graphql/resolvers/issues_resolver_spec.rb b/spec/graphql/resolvers/issues_resolver_spec.rb
index db5d009f0e7..3a6507f906c 100644
--- a/spec/graphql/resolvers/issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/issues_resolver_spec.rb
@@ -46,6 +46,13 @@ RSpec.describe Resolvers::IssuesResolver do
expect(resolve_issues(assignee_username: assignee.username)).to contain_exactly(issue2)
end
+ it 'filters by two assignees' do
+ assignee2 = create(:user)
+ issue2.update!(assignees: [assignee, assignee2])
+
+ expect(resolve_issues(assignee_id: [assignee.id, assignee2.id])).to contain_exactly(issue2)
+ end
+
it 'filters by assignee_id' do
expect(resolve_issues(assignee_id: assignee.id)).to contain_exactly(issue2)
end
@@ -58,6 +65,10 @@ RSpec.describe Resolvers::IssuesResolver do
expect(resolve_issues(assignee_id: IssuableFinder::Params::FILTER_NONE)).to contain_exactly(issue1)
end
+ it 'filters by author' do
+ expect(resolve_issues(author_username: issue1.author.username)).to contain_exactly(issue1, issue2)
+ end
+
it 'filters by labels' do
expect(resolve_issues(label_name: [label1.title])).to contain_exactly(issue1, issue2)
expect(resolve_issues(label_name: [label1.title, label2.title])).to contain_exactly(issue2)
@@ -219,6 +230,21 @@ RSpec.describe Resolvers::IssuesResolver do
expect(resolve_issues(sort: :milestone_due_desc).items).to eq([milestone_issue3, milestone_issue2, milestone_issue1])
end
end
+
+ context 'when sorting by severity' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue_high_severity) { create_issue_with_severity(project, severity: :high) }
+ let_it_be(:issue_low_severity) { create_issue_with_severity(project, severity: :low) }
+ let_it_be(:issue_no_severity) { create(:incident, project: project) }
+
+ it 'sorts issues ascending' do
+ expect(resolve_issues(sort: :severity_asc)).to eq([issue_no_severity, issue_low_severity, issue_high_severity])
+ end
+
+ it 'sorts issues descending' do
+ expect(resolve_issues(sort: :severity_desc)).to eq([issue_high_severity, issue_low_severity, issue_no_severity])
+ end
+ end
end
it 'returns issues user can see' do
@@ -304,6 +330,13 @@ RSpec.describe Resolvers::IssuesResolver do
expect(field.to_graphql.complexity.call({}, { labelName: 'foo' }, 1)).to eq 8
end
+ def create_issue_with_severity(project, severity:)
+ issue = create(:incident, project: project)
+ create(:issuable_severity, issue: issue, severity: severity)
+
+ issue
+ end
+
def resolve_issues(args = {}, context = { current_user: current_user })
resolve(described_class, obj: project, args: args, ctx: context)
end
diff --git a/spec/graphql/resolvers/projects_resolver_spec.rb b/spec/graphql/resolvers/projects_resolver_spec.rb
index d22ffeed740..07069bb1b06 100644
--- a/spec/graphql/resolvers/projects_resolver_spec.rb
+++ b/spec/graphql/resolvers/projects_resolver_spec.rb
@@ -8,10 +8,15 @@ RSpec.describe Resolvers::ProjectsResolver do
describe '#resolve' do
subject { resolve(described_class, obj: nil, args: filters, ctx: { current_user: current_user }) }
+ let_it_be(:group) { create(:group, name: 'public-group') }
+ let_it_be(:private_group) { create(:group, name: 'private-group') }
let_it_be(:project) { create(:project, :public) }
let_it_be(:other_project) { create(:project, :public) }
+ let_it_be(:group_project) { create(:project, :public, group: group) }
let_it_be(:private_project) { create(:project, :private) }
let_it_be(:other_private_project) { create(:project, :private) }
+ let_it_be(:other_private_project) { create(:project, :private) }
+ let_it_be(:private_group_project) { create(:project, :private, group: private_group) }
let_it_be(:user) { create(:user) }
@@ -20,6 +25,7 @@ RSpec.describe Resolvers::ProjectsResolver do
before_all do
project.add_developer(user)
private_project.add_developer(user)
+ private_group.add_developer(user)
end
context 'when user is not logged in' do
@@ -27,7 +33,7 @@ RSpec.describe Resolvers::ProjectsResolver do
context 'when no filters are applied' do
it 'returns all public projects' do
- is_expected.to contain_exactly(project, other_project)
+ is_expected.to contain_exactly(project, other_project, group_project)
end
context 'when search filter is provided' do
@@ -45,6 +51,22 @@ RSpec.describe Resolvers::ProjectsResolver do
is_expected.to be_empty
end
end
+
+ context 'when searchNamespaces filter is provided' do
+ let(:filters) { { search: 'group', search_namespaces: true } }
+
+ it 'returns projects in a matching namespace' do
+ is_expected.to contain_exactly(group_project)
+ end
+ end
+
+ context 'when searchNamespaces filter false' do
+ let(:filters) { { search: 'group', search_namespaces: false } }
+
+ it 'returns ignores namespace matches' do
+ is_expected.to be_empty
+ end
+ end
end
end
@@ -53,7 +75,7 @@ RSpec.describe Resolvers::ProjectsResolver do
context 'when no filters are applied' do
it 'returns all visible projects for the user' do
- is_expected.to contain_exactly(project, other_project, private_project)
+ is_expected.to contain_exactly(project, other_project, group_project, private_project, private_group_project)
end
context 'when search filter is provided' do
@@ -68,7 +90,23 @@ RSpec.describe Resolvers::ProjectsResolver do
let(:filters) { { membership: true } }
it 'returns projects that user is member of' do
- is_expected.to contain_exactly(project, private_project)
+ is_expected.to contain_exactly(project, private_project, private_group_project)
+ end
+ end
+
+ context 'when searchNamespaces filter is provided' do
+ let(:filters) { { search: 'group', search_namespaces: true } }
+
+ it 'returns projects from matching group' do
+ is_expected.to contain_exactly(group_project, private_group_project)
+ end
+ end
+
+ context 'when searchNamespaces filter false' do
+ let(:filters) { { search: 'group', search_namespaces: false } }
+
+ it 'returns ignores namespace matches' do
+ is_expected.to be_empty
end
end
diff --git a/spec/graphql/resolvers/snippets/blobs_resolver_spec.rb b/spec/graphql/resolvers/snippets/blobs_resolver_spec.rb
new file mode 100644
index 00000000000..fdbd87c32be
--- /dev/null
+++ b/spec/graphql/resolvers/snippets/blobs_resolver_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Snippets::BlobsResolver do
+ include GraphqlHelpers
+
+ describe '#resolve' do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:snippet) { create(:personal_snippet, :private, :repository, author: current_user) }
+
+ context 'when user is not authorized' do
+ let(:other_user) { create(:user) }
+
+ it 'raises an error' do
+ expect do
+ resolve_blobs(snippet, user: other_user)
+ end.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'when using no filter' do
+ it 'returns all snippet blobs' do
+ expect(resolve_blobs(snippet).map(&:path)).to contain_exactly(*snippet.list_files)
+ end
+ end
+
+ context 'when using filters' do
+ context 'when paths is a single string' do
+ it 'returns an array of files' do
+ path = 'CHANGELOG'
+
+ expect(resolve_blobs(snippet, args: { paths: path }).first.path).to eq(path)
+ end
+ end
+
+ context 'when paths is an array of string' do
+ it 'returns an array of files' do
+ paths = ['CHANGELOG', 'README.md']
+
+ expect(resolve_blobs(snippet, args: { paths: paths }).map(&:path)).to contain_exactly(*paths)
+ end
+ end
+ end
+ end
+
+ def resolve_blobs(snippet, user: current_user, args: {})
+ resolve(described_class, args: args, ctx: { current_user: user }, obj: snippet)
+ end
+end
diff --git a/spec/graphql/resolvers/terraform/states_resolver_spec.rb b/spec/graphql/resolvers/terraform/states_resolver_spec.rb
new file mode 100644
index 00000000000..64b515528cd
--- /dev/null
+++ b/spec/graphql/resolvers/terraform/states_resolver_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Terraform::StatesResolver do
+ include GraphqlHelpers
+
+ it { expect(described_class.type).to eq(Types::Terraform::StateType) }
+ it { expect(described_class.null).to be_truthy }
+
+ describe '#resolve' do
+ let_it_be(:project) { create(:project) }
+
+ let_it_be(:production_state) { create(:terraform_state, project: project) }
+ let_it_be(:staging_state) { create(:terraform_state, project: project) }
+ let_it_be(:other_state) { create(:terraform_state) }
+
+ let(:ctx) { Hash(current_user: user) }
+ let(:user) { create(:user, developer_projects: [project]) }
+
+ subject { resolve(described_class, obj: project, ctx: ctx) }
+
+ it 'returns states associated with the agent' do
+ expect(subject).to contain_exactly(production_state, staging_state)
+ end
+
+ context 'user does not have permission' do
+ let(:user) { create(:user) }
+
+ it { is_expected.to be_empty }
+ end
+ end
+end
diff --git a/spec/graphql/types/base_field_spec.rb b/spec/graphql/types/base_field_spec.rb
index bcfbd7f2480..d61ea6aa6e9 100644
--- a/spec/graphql/types/base_field_spec.rb
+++ b/spec/graphql/types/base_field_spec.rb
@@ -126,6 +126,10 @@ RSpec.describe Types::BaseField do
let(:field) { described_class.new(name: 'test', type: GraphQL::STRING_TYPE, feature_flag: flag, null: false) }
let(:context) { {} }
+ before do
+ skip_feature_flags_yaml_validation
+ end
+
it 'returns false if the feature is not enabled' do
stub_feature_flags(flag => false)
diff --git a/spec/graphql/types/design_management/design_collection_copy_state_enum_spec.rb b/spec/graphql/types/design_management/design_collection_copy_state_enum_spec.rb
new file mode 100644
index 00000000000..f536d91aeda
--- /dev/null
+++ b/spec/graphql/types/design_management/design_collection_copy_state_enum_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['DesignCollectionCopyState'] do
+ it { expect(described_class.graphql_name).to eq('DesignCollectionCopyState') }
+
+ it 'exposes the correct event states' do
+ expect(described_class.values.keys).to match_array(%w(READY IN_PROGRESS ERROR))
+ end
+end
diff --git a/spec/graphql/types/design_management/design_collection_type_spec.rb b/spec/graphql/types/design_management/design_collection_type_spec.rb
index 6b1d3a87c2d..83208705249 100644
--- a/spec/graphql/types/design_management/design_collection_type_spec.rb
+++ b/spec/graphql/types/design_management/design_collection_type_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe GitlabSchema.types['DesignCollection'] do
it { expect(described_class).to require_graphql_authorizations(:read_design) }
it 'has the expected fields' do
- expected_fields = %i[project issue designs versions version designAtVersion design]
+ expected_fields = %i[project issue designs versions version designAtVersion design copyState]
expect(described_class).to have_graphql_fields(*expected_fields)
end
diff --git a/spec/graphql/types/global_id_type_spec.rb b/spec/graphql/types/global_id_type_spec.rb
index 2a7b26f66b0..7589b0e285e 100644
--- a/spec/graphql/types/global_id_type_spec.rb
+++ b/spec/graphql/types/global_id_type_spec.rb
@@ -99,8 +99,6 @@ RSpec.describe Types::GlobalIDType do
end
describe 'compatibility' do
- # Simplified schema to test compatibility
-
def query(doc, vars)
GraphQL::Query.new(schema, document: doc, context: {}, variables: vars)
end
@@ -112,6 +110,7 @@ RSpec.describe Types::GlobalIDType do
all_types = [::GraphQL::ID_TYPE, ::Types::GlobalIDType, ::Types::GlobalIDType[::Project]]
shared_examples 'a working query' do
+ # Simplified schema to test compatibility
let!(:schema) do
# capture values so they can be closed over
arg_type = argument_type
@@ -135,10 +134,21 @@ RSpec.describe Types::GlobalIDType do
argument :id, arg_type, required: true
end
+ # This is needed so that all types are always registered as input types
+ field :echo, String, null: true do
+ argument :id, ::GraphQL::ID_TYPE, required: false
+ argument :gid, ::Types::GlobalIDType, required: false
+ argument :pid, ::Types::GlobalIDType[::Project], required: false
+ end
+
def project_by_id(id:)
gid = ::Types::GlobalIDType[::Project].coerce_isolated_input(id)
gid.model_class.find(gid.model_id)
end
+
+ def echo(id: nil, gid: nil, pid: nil)
+ "id: #{id}, gid: #{gid}, pid: #{pid}"
+ end
end)
end
end
@@ -152,7 +162,7 @@ RSpec.describe Types::GlobalIDType do
end
end
- context 'when the argument is declared as ID' do
+ context 'when the client declares the argument as ID the actual argument can be any type' do
let(:document) do
<<-GRAPHQL
query($projectId: ID!){
@@ -163,16 +173,16 @@ RSpec.describe Types::GlobalIDType do
GRAPHQL
end
- let(:argument_type) { ::GraphQL::ID_TYPE }
-
- where(:result_type) { all_types }
+ where(:result_type, :argument_type) do
+ all_types.flat_map { |arg_type| all_types.zip([arg_type].cycle) }
+ end
with_them do
it_behaves_like 'a working query'
end
end
- context 'when the argument is declared as GlobalID' do
+ context 'when the client passes the argument as GlobalID' do
let(:document) do
<<-GRAPHQL
query($projectId: GlobalID!) {
@@ -192,7 +202,7 @@ RSpec.describe Types::GlobalIDType do
end
end
- context 'when the argument is declared as ProjectID' do
+ context 'when the client passes the argument as ProjectID' do
let(:document) do
<<-GRAPHQL
query($projectId: ProjectID!) {
diff --git a/spec/graphql/types/issue_sort_enum_spec.rb b/spec/graphql/types/issue_sort_enum_spec.rb
index 9313d3aee84..4433709d193 100644
--- a/spec/graphql/types/issue_sort_enum_spec.rb
+++ b/spec/graphql/types/issue_sort_enum_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe GitlabSchema.types['IssueSort'] do
it 'exposes all the existing issue sort values' do
expect(described_class.values.keys).to include(
- *%w[DUE_DATE_ASC DUE_DATE_DESC RELATIVE_POSITION_ASC]
+ *%w[DUE_DATE_ASC DUE_DATE_DESC RELATIVE_POSITION_ASC SEVERITY_ASC SEVERITY_DESC]
)
end
end
diff --git a/spec/graphql/types/package_type_enum_spec.rb b/spec/graphql/types/package_type_enum_spec.rb
index 80a20a68bc2..638f8ccbaee 100644
--- a/spec/graphql/types/package_type_enum_spec.rb
+++ b/spec/graphql/types/package_type_enum_spec.rb
@@ -4,6 +4,6 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['PackageTypeEnum'] do
it 'exposes all package types' do
- expect(described_class.values.keys).to contain_exactly(*%w[MAVEN NPM CONAN NUGET PYPI COMPOSER GENERIC])
+ expect(described_class.values.keys).to contain_exactly(*%w[MAVEN NPM CONAN NUGET PYPI COMPOSER GENERIC GOLANG])
end
end
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index 44a89bfa35e..8aa9e1138cc 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe GitlabSchema.types['Project'] do
environment boards jira_import_status jira_imports services releases release
alert_management_alerts alert_management_alert alert_management_alert_status_counts
container_expiration_policy service_desk_enabled service_desk_address
- issue_status_counts
+ issue_status_counts terraform_states
]
expect(described_class).to include_graphql_fields(*expected_fields)
@@ -154,5 +154,12 @@ RSpec.describe GitlabSchema.types['Project'] do
it { is_expected.to have_graphql_type(Types::ContainerExpirationPolicyType) }
end
+ describe 'terraform states field' do
+ subject { described_class.fields['terraformStates'] }
+
+ it { is_expected.to have_graphql_type(Types::Terraform::StateType.connection_type) }
+ it { is_expected.to have_graphql_resolver(Resolvers::Terraform::StatesResolver) }
+ end
+
it_behaves_like 'a GraphQL type with labels'
end
diff --git a/spec/graphql/types/snippet_type_spec.rb b/spec/graphql/types/snippet_type_spec.rb
index 86af69f1294..2ea215450cb 100644
--- a/spec/graphql/types/snippet_type_spec.rb
+++ b/spec/graphql/types/snippet_type_spec.rb
@@ -16,6 +16,15 @@ RSpec.describe GitlabSchema.types['Snippet'] do
expect(described_class).to have_graphql_fields(*expected_fields)
end
+ describe 'blobs field' do
+ subject { described_class.fields['blobs'] }
+
+ it 'returns blobs' do
+ is_expected.to have_graphql_type(Types::Snippets::BlobType.connection_type)
+ is_expected.to have_graphql_resolver(Resolvers::Snippets::BlobsResolver)
+ end
+ end
+
context 'when restricted visibility level is set to public' do
let_it_be(:snippet) { create(:personal_snippet, :repository, :public, author: user) }
@@ -142,9 +151,30 @@ RSpec.describe GitlabSchema.types['Snippet'] do
describe '#blobs' do
let_it_be(:snippet) { create(:personal_snippet, :public, author: user) }
- let(:query_blobs) { subject.dig('data', 'snippets', 'edges')[0]['node']['blobs'] }
+ let(:query_blobs) { subject.dig('data', 'snippets', 'edges')[0].dig('node', 'blobs', 'edges') }
+ let(:paths) { [] }
+ let(:query) do
+ %(
+ {
+ snippets {
+ edges {
+ node {
+ blobs(paths: #{paths}) {
+ edges {
+ node {
+ name
+ path
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ )
+ end
- subject { GitlabSchema.execute(snippet_query_for(field: 'blobs'), context: { current_user: user }).as_json }
+ subject { GitlabSchema.execute(query, context: { current_user: user }).as_json }
shared_examples 'an array' do
it 'returns an array of snippet blobs' do
@@ -158,8 +188,8 @@ RSpec.describe GitlabSchema.types['Snippet'] do
it_behaves_like 'an array'
it 'contains the first blob from the snippet' do
- expect(query_blobs.first['name']).to eq blob.name
- expect(query_blobs.first['path']).to eq blob.path
+ expect(query_blobs.first['node']['name']).to eq blob.name
+ expect(query_blobs.first['node']['path']).to eq blob.path
end
end
@@ -170,10 +200,22 @@ RSpec.describe GitlabSchema.types['Snippet'] do
it_behaves_like 'an array'
it 'contains all the blobs from the repository' do
- resulting_blobs_names = query_blobs.map { |b| b['name'] }
+ resulting_blobs_names = query_blobs.map { |b| b['node']['name'] }
expect(resulting_blobs_names).to match_array(blobs.map(&:name))
end
+
+ context 'when specific path is set' do
+ let(:paths) { ['CHANGELOG'] }
+
+ it_behaves_like 'an array'
+
+ it 'returns specific files' do
+ resulting_blobs_names = query_blobs.map { |b| b['node']['name'] }
+
+ expect(resulting_blobs_names).to match(paths)
+ end
+ end
end
end
diff --git a/spec/graphql/types/terraform/state_type_spec.rb b/spec/graphql/types/terraform/state_type_spec.rb
new file mode 100644
index 00000000000..51508208046
--- /dev/null
+++ b/spec/graphql/types/terraform/state_type_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['TerraformState'] do
+ it { expect(described_class.graphql_name).to eq('TerraformState') }
+ it { expect(described_class).to require_graphql_authorizations(:read_terraform_state) }
+
+ describe 'fields' do
+ let(:fields) { %i[id name locked_by_user locked_at created_at updated_at] }
+
+ it { expect(described_class).to have_graphql_fields(fields) }
+
+ it { expect(described_class.fields['id'].type).to be_non_null }
+ it { expect(described_class.fields['name'].type).to be_non_null }
+ it { expect(described_class.fields['lockedByUser'].type).not_to be_non_null }
+ it { expect(described_class.fields['lockedAt'].type).not_to be_non_null }
+ it { expect(described_class.fields['createdAt'].type).to be_non_null }
+ it { expect(described_class.fields['updatedAt'].type).to be_non_null }
+ end
+end
diff --git a/spec/haml_lint/linter/documentation_links_spec.rb b/spec/haml_lint/linter/documentation_links_spec.rb
index 68de8317b82..5de455b6e8c 100644
--- a/spec/haml_lint/linter/documentation_links_spec.rb
+++ b/spec/haml_lint/linter/documentation_links_spec.rb
@@ -8,75 +8,85 @@ require Rails.root.join('haml_lint/linter/documentation_links')
RSpec.describe HamlLint::Linter::DocumentationLinks do
include_context 'linter'
- context 'when link_to points to the existing file path' do
- let(:haml) { "= link_to 'Description', help_page_path('README.md')" }
+ shared_examples 'link validation rules' do |link_pattern|
+ context 'when link_to points to the existing file path' do
+ let(:haml) { "= link_to 'Description', #{link_pattern}('README.md')" }
- it { is_expected.not_to report_lint }
- end
+ it { is_expected.not_to report_lint }
+ end
- context 'when link_to points to the existing file with valid anchor' do
- let(:haml) { "= link_to 'Description', help_page_path('README.md', anchor: 'overview'), target: '_blank'" }
+ context 'when link_to points to the existing file with valid anchor' do
+ let(:haml) { "= link_to 'Description', #{link_pattern}('README.md', anchor: 'overview'), target: '_blank'" }
- it { is_expected.not_to report_lint }
- end
+ it { is_expected.not_to report_lint }
+ end
- context 'when link_to points to the existing file path without .md extension' do
- let(:haml) { "= link_to 'Description', help_page_path('README')" }
+ context 'when link_to points to the existing file path without .md extension' do
+ let(:haml) { "= link_to 'Description', #{link_pattern}('README')" }
- it { is_expected.not_to report_lint }
- end
+ it { is_expected.not_to report_lint }
+ end
- context 'when anchor is not correct' do
- let(:haml) { "= link_to 'Description', help_page_path('README.md', anchor: 'wrong')" }
+ context 'when anchor is not correct' do
+ let(:haml) { "= link_to 'Description', #{link_pattern}('README.md', anchor: 'wrong')" }
- it { is_expected.to report_lint }
+ it { is_expected.to report_lint }
- context 'when help_page_path has multiple options' do
- let(:haml) { "= link_to 'Description', help_page_path('README.md', key: :value, anchor: 'wrong')" }
+ context "when #{link_pattern} has multiple options" do
+ let(:haml) { "= link_to 'Description', #{link_pattern}('README.md', key: :value, anchor: 'wrong')" }
+
+ it { is_expected.to report_lint }
+ end
+ end
+
+ context 'when file path is wrong' do
+ let(:haml) { "= link_to 'Description', #{link_pattern}('wrong.md'), target: '_blank'" }
it { is_expected.to report_lint }
end
- end
- context 'when file path is wrong' do
- let(:haml) { "= link_to 'Description', help_page_path('wrong.md'), target: '_blank'" }
+ context 'when link with wrong file path is assigned to a variable' do
+ let(:haml) { "- my_link = link_to 'Description', #{link_pattern}('wrong.md')" }
- it { is_expected.to report_lint }
- end
+ it { is_expected.to report_lint }
+ end
- context 'when link with wrong file path is assigned to a variable' do
- let(:haml) { "- my_link = link_to 'Description', help_page_path('wrong.md')" }
+ context 'when it is a broken code' do
+ let(:haml) { "= I am broken! ]]]]" }
- it { is_expected.to report_lint }
- end
+ it { is_expected.not_to report_lint }
+ end
- context 'when it is a broken code' do
- let(:haml) { "= I am broken! ]]]]" }
+ context 'when anchor belongs to a different element' do
+ let(:haml) { "= link_to 'Description', #{link_pattern}('README.md'), target: (anchor: 'blank')" }
- it { is_expected.not_to report_lint }
- end
+ it { is_expected.not_to report_lint }
+ end
- context 'when anchor belongs to a different element' do
- let(:haml) { "= link_to 'Description', help_page_path('README.md'), target: (anchor: 'blank')" }
+ context "when a simple #{link_pattern}" do
+ let(:haml) { "- url = #{link_pattern}('wrong.md')" }
- it { is_expected.not_to report_lint }
- end
+ it { is_expected.to report_lint }
+ end
- context 'when a simple help_page_path' do
- let(:haml) { "- url = help_page_path('wrong.md')" }
+ context 'when link is not a string' do
+ let(:haml) { "- url = #{link_pattern}(help_url)" }
- it { is_expected.to report_lint }
- end
+ it { is_expected.not_to report_lint }
+ end
- context 'when link is not a string' do
- let(:haml) { "- url = help_page_path(help_url)" }
+ context 'when link is a part of the tag' do
+ let(:haml) { ".data-form{ data: { url: #{link_pattern}('wrong.md') } }" }
- it { is_expected.not_to report_lint }
+ it { is_expected.to report_lint }
+ end
end
- context 'when link is a part of the tag' do
- let(:haml) { ".data-form{ data: { url: help_page_path('wrong.md') } }" }
+ context 'help_page_path' do
+ it_behaves_like 'link validation rules', 'help_page_path'
+ end
- it { is_expected.to report_lint }
+ context 'help_page_url' do
+ it_behaves_like 'link validation rules', 'help_page_url'
end
end
diff --git a/spec/helpers/analytics/unique_visits_helper_spec.rb b/spec/helpers/analytics/unique_visits_helper_spec.rb
index ff9769078c4..ff363e81ac7 100644
--- a/spec/helpers/analytics/unique_visits_helper_spec.rb
+++ b/spec/helpers/analytics/unique_visits_helper_spec.rb
@@ -22,15 +22,6 @@ RSpec.describe Analytics::UniqueVisitsHelper do
helper.track_visit(target_id)
end
- it 'does not track visits if usage ping is disabled' do
- sign_in(current_user)
- expect(Gitlab::CurrentSettings).to receive(:usage_ping_enabled?).and_return(false)
-
- expect_any_instance_of(Gitlab::Analytics::UniqueVisits).not_to receive(:track_visit)
-
- helper.track_visit(target_id)
- end
-
it 'does not track visit if user is not logged in' do
expect_any_instance_of(Gitlab::Analytics::UniqueVisits).not_to receive(:track_visit)
diff --git a/spec/helpers/boards_helper_spec.rb b/spec/helpers/boards_helper_spec.rb
index a805b96a8cc..6bdd6fb6813 100644
--- a/spec/helpers/boards_helper_spec.rb
+++ b/spec/helpers/boards_helper_spec.rb
@@ -43,6 +43,7 @@ RSpec.describe BoardsHelper do
allow(helper).to receive(:current_user) { user }
allow(helper).to receive(:can?).with(user, :create_non_backlog_issues, board).and_return(true)
+ allow(helper).to receive(:can?).with(user, :admin_issue, board).and_return(true)
end
it 'returns a board_lists_path as lists_endpoint' do
@@ -52,6 +53,10 @@ RSpec.describe BoardsHelper do
it 'returns board type as parent' do
expect(helper.board_data[:parent]).to eq('project')
end
+
+ it 'returns can_update for user permissions on the board' do
+ expect(helper.board_data[:can_update]).to eq('true')
+ end
end
describe '#current_board_json' do
diff --git a/spec/helpers/clusters_helper_spec.rb b/spec/helpers/clusters_helper_spec.rb
index 6164f3b5e8d..6b08b6515cf 100644
--- a/spec/helpers/clusters_helper_spec.rb
+++ b/spec/helpers/clusters_helper_spec.rb
@@ -59,6 +59,24 @@ RSpec.describe ClustersHelper do
end
end
+ describe '#js_cluster_agents_list_data' do
+ let_it_be(:project) { build(:project, :repository) }
+
+ subject { helper.js_cluster_agents_list_data(project) }
+
+ it 'displays project default branch' do
+ expect(subject[:default_branch_name]).to eq(project.default_branch)
+ end
+
+ it 'displays image path' do
+ expect(subject[:empty_state_image]).to match(%r(/illustrations/logos/clusters_empty|svg))
+ end
+
+ it 'displays project path' do
+ expect(subject[:project_path]).to eq(project.full_path)
+ end
+ end
+
describe '#js_clusters_list_data' do
subject { helper.js_clusters_list_data('/path') }
@@ -89,32 +107,6 @@ RSpec.describe ClustersHelper do
end
end
- describe '#provider_icon' do
- it 'will return GCP logo with gcp argument' do
- logo = helper.provider_icon('gcp')
-
- expect(logo).to match(%r(img alt="Google GKE" data-src="|/illustrations/logos/google_gke|svg))
- end
-
- it 'will return AWS logo with aws argument' do
- logo = helper.provider_icon('aws')
-
- expect(logo).to match(%r(img alt="Amazon EKS" data-src="|/illustrations/logos/amazon_eks|svg))
- end
-
- it 'will return default logo with unknown provider' do
- logo = helper.provider_icon('unknown')
-
- expect(logo).to match(%r(img alt="Kubernetes Cluster" data-src="|/illustrations/logos/kubernetes|svg))
- end
-
- it 'will return default logo when provider is empty' do
- logo = helper.provider_icon
-
- expect(logo).to match(%r(img alt="Kubernetes Cluster" data-src="|/illustrations/logos/kubernetes|svg))
- end
- end
-
describe '#cluster_type_label' do
subject { helper.cluster_type_label(cluster_type) }
diff --git a/spec/helpers/emails_helper_spec.rb b/spec/helpers/emails_helper_spec.rb
index 96ac4015c77..ef8b342a3f6 100644
--- a/spec/helpers/emails_helper_spec.rb
+++ b/spec/helpers/emails_helper_spec.rb
@@ -361,4 +361,116 @@ RSpec.describe EmailsHelper do
end
end
end
+
+ describe '#change_reviewer_notification_text' do
+ let(:mary) { build(:user, name: 'Mary') }
+ let(:john) { build(:user, name: 'John') }
+ let(:ted) { build(:user, name: 'Ted') }
+
+ context 'to new reviewers only' do
+ let(:previous_reviewers) { [] }
+ let(:new_reviewers) { [john] }
+
+ context 'with no html tag' do
+ let(:expected_output) do
+ 'Reviewer changed to John'
+ end
+
+ it 'returns the expected output' do
+ expect(change_reviewer_notification_text(new_reviewers, previous_reviewers)).to eq(expected_output)
+ end
+ end
+
+ context 'with <strong> tag' do
+ let(:expected_output) do
+ 'Reviewer changed to <strong>John</strong>'
+ end
+
+ it 'returns the expected output' do
+ expect(change_reviewer_notification_text(new_reviewers, previous_reviewers, :strong)).to eq(expected_output)
+ end
+ end
+ end
+
+ context 'from previous reviewers to new reviewers' do
+ let(:previous_reviewers) { [john, mary] }
+ let(:new_reviewers) { [ted] }
+
+ context 'with no html tag' do
+ let(:expected_output) do
+ 'Reviewer changed from John and Mary to Ted'
+ end
+
+ it 'returns the expected output' do
+ expect(change_reviewer_notification_text(new_reviewers, previous_reviewers)).to eq(expected_output)
+ end
+ end
+
+ context 'with <strong> tag' do
+ let(:expected_output) do
+ 'Reviewer changed from <strong>John and Mary</strong> to <strong>Ted</strong>'
+ end
+
+ it 'returns the expected output' do
+ expect(change_reviewer_notification_text(new_reviewers, previous_reviewers, :strong)).to eq(expected_output)
+ end
+ end
+ end
+
+ context 'from previous reviewers to no reviewers' do
+ let(:previous_reviewers) { [john, mary] }
+ let(:new_reviewers) { [] }
+
+ context 'with no html tag' do
+ let(:expected_output) do
+ 'Reviewer changed from John and Mary to Unassigned'
+ end
+
+ it 'returns the expected output' do
+ expect(change_reviewer_notification_text(new_reviewers, previous_reviewers)).to eq(expected_output)
+ end
+ end
+
+ context 'with <strong> tag' do
+ let(:expected_output) do
+ 'Reviewer changed from <strong>John and Mary</strong> to <strong>Unassigned</strong>'
+ end
+
+ it 'returns the expected output' do
+ expect(change_reviewer_notification_text(new_reviewers, previous_reviewers, :strong)).to eq(expected_output)
+ end
+ end
+ end
+
+ context "with a <script> tag in user's name" do
+ let(:previous_reviewers) { [] }
+ let(:new_reviewers) { [fishy_user] }
+ let(:fishy_user) { build(:user, name: "<script>alert('hi')</script>") }
+
+ let(:expected_output) do
+ 'Reviewer changed to <strong>&lt;script&gt;alert(&#39;hi&#39;)&lt;/script&gt;</strong>'
+ end
+
+ it 'escapes the html tag' do
+ expect(change_reviewer_notification_text(new_reviewers, previous_reviewers, :strong)).to eq(expected_output)
+ end
+ end
+
+ context "with url in user's name" do
+ subject(:email_helper) { Object.new.extend(described_class) }
+
+ let(:previous_reviewers) { [] }
+ let(:new_reviewers) { [fishy_user] }
+ let(:fishy_user) { build(:user, name: "example.com") }
+
+ let(:expected_output) do
+ 'Reviewer changed to example_com'
+ end
+
+ it "sanitizes user's name" do
+ expect(email_helper).to receive(:sanitize_name).and_call_original
+ expect(email_helper.change_reviewer_notification_text(new_reviewers, previous_reviewers)).to eq(expected_output)
+ end
+ end
+ end
end
diff --git a/spec/helpers/feature_flags_helper_spec.rb b/spec/helpers/feature_flags_helper_spec.rb
new file mode 100644
index 00000000000..9a080736595
--- /dev/null
+++ b/spec/helpers/feature_flags_helper_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlagsHelper do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:feature_flag) { create(:operations_feature_flag, project: project) }
+ let_it_be(:user) { create(:user) }
+
+ describe '#unleash_api_url' do
+ subject { helper.unleash_api_url(project) }
+
+ it { is_expected.to end_with("/api/v4/feature_flags/unleash/#{project.id}") }
+ end
+
+ describe '#unleash_api_instance_id' do
+ subject { helper.unleash_api_instance_id(project) }
+
+ it { is_expected.not_to be_empty }
+ end
+end
diff --git a/spec/helpers/gitlab_routing_helper_spec.rb b/spec/helpers/gitlab_routing_helper_spec.rb
index 1ad7c7bb9ff..0088f739879 100644
--- a/spec/helpers/gitlab_routing_helper_spec.rb
+++ b/spec/helpers/gitlab_routing_helper_spec.rb
@@ -187,7 +187,7 @@ RSpec.describe GitlabRoutingHelper do
let(:ref) { 'test-ref' }
let(:args) { {} }
- subject { gitlab_raw_snippet_blob_path(snippet, blob.path, ref, args) }
+ subject { gitlab_raw_snippet_blob_path(snippet, blob.path, ref, **args) }
it_behaves_like 'snippet blob raw path'
@@ -222,7 +222,7 @@ RSpec.describe GitlabRoutingHelper do
let(:ref) { 'snippet-test-ref' }
let(:args) { {} }
- subject { gitlab_raw_snippet_blob_url(snippet, blob.path, ref, args) }
+ subject { gitlab_raw_snippet_blob_url(snippet, blob.path, ref, **args) }
it_behaves_like 'snippet blob raw url'
diff --git a/spec/helpers/groups/group_members_helper_spec.rb b/spec/helpers/groups/group_members_helper_spec.rb
index a25bf1c4157..abf621aee10 100644
--- a/spec/helpers/groups/group_members_helper_spec.rb
+++ b/spec/helpers/groups/group_members_helper_spec.rb
@@ -5,6 +5,8 @@ require "spec_helper"
RSpec.describe Groups::GroupMembersHelper do
include MembersPresentation
+ let_it_be(:current_user) { create(:user) }
+
describe '.group_member_select_options' do
let(:group) { create(:group) }
@@ -20,6 +22,10 @@ RSpec.describe Groups::GroupMembersHelper do
describe '#linked_groups_data_json' do
include_context 'group_group_link'
+ before do
+ allow(helper).to receive(:current_user).and_return(current_user)
+ end
+
it 'matches json schema' do
json = helper.linked_groups_data_json(shared_group.shared_with_group_links)
@@ -28,7 +34,6 @@ RSpec.describe Groups::GroupMembersHelper do
end
describe '#members_data_json' do
- let(:current_user) { create(:user) }
let(:group) { create(:group) }
before do
@@ -48,6 +53,14 @@ RSpec.describe Groups::GroupMembersHelper do
let(:group_member) { create(:group_member, group: group, created_by: current_user) }
it_behaves_like 'group_members.json'
+
+ context 'with user status set' do
+ let(:user) { create(:user) }
+ let!(:status) { create(:user_status, user: user) }
+ let(:group_member) { create(:group_member, group: group, user: user, created_by: current_user) }
+
+ it_behaves_like 'group_members.json'
+ end
end
context 'for an invited group member' do
diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb
index 89a2a92ea57..821440db551 100644
--- a/spec/helpers/issuables_helper_spec.rb
+++ b/spec/helpers/issuables_helper_spec.rb
@@ -306,6 +306,38 @@ RSpec.describe IssuablesHelper do
end
end
+ describe '#reviewer_sidebar_data' do
+ let(:user) { create(:user) }
+
+ subject { helper.reviewer_sidebar_data(user, merge_request: merge_request) }
+
+ context 'without merge_request' do
+ let(:merge_request) { nil }
+
+ it 'returns hash of reviewer data' do
+ is_expected.to eql({
+ avatar_url: user.avatar_url,
+ name: user.name,
+ username: user.username
+ })
+ end
+ end
+
+ context 'with merge_request' do
+ let(:merge_request) { build(:merge_request) }
+
+ where(can_merge: [true, false])
+
+ with_them do
+ before do
+ allow(merge_request).to receive(:can_be_merged_by?).and_return(can_merge)
+ end
+
+ it { is_expected.to include({ can_merge: can_merge })}
+ end
+ end
+ end
+
describe '#issuable_squash_option?' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb
index 3f84eeb12c2..1ed61bd3144 100644
--- a/spec/helpers/issues_helper_spec.rb
+++ b/spec/helpers/issues_helper_spec.rb
@@ -233,4 +233,25 @@ RSpec.describe IssuesHelper do
expect(helper.show_moved_service_desk_issue_warning?(new_issue)).to be(false)
end
end
+
+ describe '#use_startup_call' do
+ it "returns false when a query param is present" do
+ allow(controller.request).to receive(:query_parameters).and_return({ foo: 'bar' })
+
+ expect(helper.use_startup_call?).to eq(false)
+ end
+
+ it "returns false when user has stored sort preference" do
+ controller.instance_variable_set(:@sort, 'updated_asc')
+
+ expect(helper.use_startup_call?).to eq(false)
+ end
+
+ it 'returns true when request.query_parameters is empty with default sorting preference' do
+ controller.instance_variable_set(:@sort, 'created_date')
+ allow(controller.request).to receive(:query_parameters).and_return({})
+
+ expect(helper.use_startup_call?).to eq(true)
+ end
+ end
end
diff --git a/spec/helpers/projects/incidents_helper_spec.rb b/spec/helpers/projects/incidents_helper_spec.rb
index 0affa67a902..68a5ce4eb91 100644
--- a/spec/helpers/projects/incidents_helper_spec.rb
+++ b/spec/helpers/projects/incidents_helper_spec.rb
@@ -9,9 +9,16 @@ RSpec.describe Projects::IncidentsHelper do
let(:project_path) { project.full_path }
let(:new_issue_path) { new_project_issue_path(project) }
let(:issue_path) { project_issues_path(project) }
+ let(:params) do
+ {
+ search: 'search text',
+ author_username: 'root',
+ assignee_username: 'max.power'
+ }
+ end
describe '#incidents_data' do
- subject(:data) { helper.incidents_data(project) }
+ subject(:data) { helper.incidents_data(project, params) }
it 'returns frontend configuration' do
expect(data).to match(
@@ -20,7 +27,10 @@ RSpec.describe Projects::IncidentsHelper do
'incident-template-name' => 'incident',
'incident-type' => 'incident',
'issue-path' => issue_path,
- 'empty-list-svg-path' => match_asset_path('/assets/illustrations/incident-empty-state.svg')
+ 'empty-list-svg-path' => match_asset_path('/assets/illustrations/incident-empty-state.svg'),
+ 'text-query': 'search text',
+ 'author-usernames-query': 'root',
+ 'assignee-usernames-query': 'max.power'
)
end
end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index 2b345ff3ae6..f081cf225b1 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -5,16 +5,15 @@ require 'spec_helper'
RSpec.describe ProjectsHelper do
include ProjectForksHelper
- let_it_be(:project) { create(:project) }
+ let_it_be_with_reload(:project) { create(:project) }
+ let_it_be_with_refind(:project_with_repo) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
- describe '#project_incident_management_setting' do
- let(:project) { create(:project) }
-
- before do
- helper.instance_variable_set(:@project, project)
- end
+ before do
+ helper.instance_variable_set(:@project, project)
+ end
+ describe '#project_incident_management_setting' do
context 'when incident_management_setting exists' do
let(:project_incident_management_setting) do
create(:project_incident_management_setting, project: project)
@@ -40,20 +39,14 @@ RSpec.describe ProjectsHelper do
end
describe '#error_tracking_setting_project_json' do
- let(:project) { create(:project) }
-
context 'error tracking setting does not exist' do
- before do
- helper.instance_variable_set(:@project, project)
- end
-
it 'returns nil' do
expect(helper.error_tracking_setting_project_json).to be_nil
end
end
context 'error tracking setting exists' do
- let!(:error_tracking_setting) { create(:project_error_tracking_setting, project: project) }
+ let_it_be(:error_tracking_setting) { create(:project_error_tracking_setting, project: project) }
context 'api_url present' do
let(:json) do
@@ -65,24 +58,16 @@ RSpec.describe ProjectsHelper do
}.to_json
end
- before do
- helper.instance_variable_set(:@project, project)
- end
-
it 'returns error tracking json' do
expect(helper.error_tracking_setting_project_json).to eq(json)
end
end
context 'api_url not present' do
- before do
+ it 'returns nil' do
project.error_tracking_setting.api_url = nil
project.error_tracking_setting.enabled = false
- helper.instance_variable_set(:@project, project)
- end
-
- it 'returns nil' do
expect(helper.error_tracking_setting_project_json).to be_nil
end
end
@@ -98,8 +83,7 @@ RSpec.describe ProjectsHelper do
end
describe "can_change_visibility_level?" do
- let(:project) { create(:project) }
- let(:user) { create(:project_member, :reporter, user: create(:user), project: project).user }
+ let_it_be(:user) { create(:project_member, :reporter, user: create(:user), project: project).user }
let(:forked_project) { fork_project(project, user) }
it "returns false if there are no appropriate permissions" do
@@ -142,8 +126,7 @@ RSpec.describe ProjectsHelper do
end
describe '#can_disable_emails?' do
- let(:project) { create(:project) }
- let(:user) { create(:project_member, :maintainer, user: create(:user), project: project).user }
+ let_it_be(:user) { create(:project_member, :maintainer, user: create(:user), project: project).user }
it 'returns true for the project owner' do
allow(helper).to receive(:can?).with(project.owner, :set_emails_disabled, project) { true }
@@ -166,11 +149,7 @@ RSpec.describe ProjectsHelper do
end
describe "readme_cache_key" do
- let(:project) { create(:project, :repository) }
-
- before do
- helper.instance_variable_set(:@project, project)
- end
+ let(:project) { project_with_repo }
it "returns a valid cach key" do
expect(helper.send(:readme_cache_key)).to eq("#{project.full_path}-#{project.commit.id}-readme")
@@ -184,8 +163,7 @@ RSpec.describe ProjectsHelper do
end
describe "#project_list_cache_key", :clean_gitlab_redis_shared_state do
- let(:project) { create(:project, :repository) }
- let(:user) { create(:user) }
+ let(:project) { project_with_repo }
before do
allow(helper).to receive(:current_user).and_return(user)
@@ -249,8 +227,6 @@ RSpec.describe ProjectsHelper do
describe '#load_pipeline_status' do
it 'loads the pipeline status in batch' do
- project = build(:project)
-
helper.load_pipeline_status([project])
# Skip lazy loading of the `pipeline_status` attribute
pipeline_status = project.instance_variable_get('@pipeline_status')
@@ -260,8 +236,6 @@ RSpec.describe ProjectsHelper do
end
describe '#show_no_ssh_key_message?' do
- let(:user) { create(:user) }
-
before do
allow(helper).to receive(:current_user).and_return(user)
end
@@ -282,8 +256,6 @@ RSpec.describe ProjectsHelper do
end
describe '#show_no_password_message?' do
- let(:user) { create(:user) }
-
before do
allow(helper).to receive(:current_user).and_return(user)
end
@@ -424,7 +396,6 @@ RSpec.describe ProjectsHelper do
before do
allow(helper).to receive(:current_user).and_return(user)
- helper.instance_variable_set(:@project, project)
end
context 'when there is no current_user' do
@@ -444,9 +415,6 @@ RSpec.describe ProjectsHelper do
end
describe '#get_project_nav_tabs' do
- let_it_be(:user) { create(:user) }
- let(:project) { create(:project) }
-
before do
allow(helper).to receive(:can?) { true }
end
@@ -524,7 +492,14 @@ RSpec.describe ProjectsHelper do
subject { helper.send(:can_view_operations_tab?, user, project) }
- [:read_environment, :read_cluster, :metrics_dashboard].each do |ability|
+ [
+ :metrics_dashboard,
+ :read_alert_management_alert,
+ :read_environment,
+ :read_issue,
+ :read_sentry_issue,
+ :read_cluster
+ ].each do |ability|
it 'includes operations tab' do
allow(helper).to receive(:can?).and_return(false)
allow(helper).to receive(:can?).with(user, ability, project).and_return(true)
@@ -536,7 +511,6 @@ RSpec.describe ProjectsHelper do
describe '#show_projects' do
let(:projects) do
- create(:project)
Project.all
end
@@ -561,8 +535,8 @@ RSpec.describe ProjectsHelper do
end
end
- describe('#push_to_create_project_command') do
- let(:user) { create(:user, username: 'john') }
+ describe '#push_to_create_project_command' do
+ let(:user) { build_stubbed(:user, username: 'john') }
it 'returns the command to push to create project over HTTP' do
allow(Gitlab::CurrentSettings.current_application_settings).to receive(:enabled_git_access_protocol) { 'http' }
@@ -578,8 +552,6 @@ RSpec.describe ProjectsHelper do
end
describe '#any_projects?' do
- let!(:project) { create(:project) }
-
it 'returns true when projects will be returned' do
expect(helper.any_projects?(Project.all)).to eq(true)
end
@@ -609,7 +581,7 @@ RSpec.describe ProjectsHelper do
end
describe '#git_user_name' do
- let(:user) { double(:user, name: 'John "A" Doe53') }
+ let(:user) { build_stubbed(:user, name: 'John "A" Doe53') }
before do
allow(helper).to receive(:current_user).and_return(user)
@@ -632,8 +604,6 @@ RSpec.describe ProjectsHelper do
end
context 'user logged in' do
- let(:user) { create(:user) }
-
before do
allow(helper).to receive(:current_user).and_return(user)
end
@@ -658,7 +628,6 @@ RSpec.describe ProjectsHelper do
end
describe 'show_xcode_link' do
- let!(:project) { create(:project) }
let(:mac_ua) { 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.139 Safari/537.36' }
let(:ios_ua) { 'Mozilla/5.0 (iPad; CPU OS 5_1_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9B206 Safari/7534.48.3' }
@@ -799,7 +768,7 @@ RSpec.describe ProjectsHelper do
describe '#show_auto_devops_implicitly_enabled_banner?' do
using RSpec::Parameterized::TableSyntax
- let(:user) { create(:user) }
+ let_it_be_with_reload(:project_with_auto_devops) { create(:project, :repository, :auto_devops) }
let(:feature_visibilities) do
{
@@ -873,9 +842,9 @@ RSpec.describe ProjectsHelper do
with_them do
let(:project) do
if project_setting.nil?
- create(:project, :repository)
+ project_with_repo
else
- create(:project, :repository, :auto_devops)
+ project_with_auto_devops
end
end
@@ -896,14 +865,8 @@ RSpec.describe ProjectsHelper do
end
describe '#can_import_members?' do
- let(:project) { create(:project) }
- let(:user) { create(:user) }
let(:owner) { project.owner }
- before do
- helper.instance_variable_set(:@project, project)
- end
-
it 'returns false if user cannot admin_project_member' do
allow(helper).to receive(:current_user) { user }
expect(helper.can_import_members?).to eq false
@@ -916,12 +879,6 @@ RSpec.describe ProjectsHelper do
end
describe '#metrics_external_dashboard_url' do
- let(:project) { create(:project) }
-
- before do
- helper.instance_variable_set(:@project, project)
- end
-
context 'metrics_setting exists' do
it 'returns external_dashboard_url' do
metrics_setting = create(:project_metrics_setting, project: project)
@@ -938,12 +895,6 @@ RSpec.describe ProjectsHelper do
end
describe '#grafana_integration_url' do
- let(:project) { create(:project) }
-
- before do
- helper.instance_variable_set(:@project, project)
- end
-
subject { helper.grafana_integration_url }
it { is_expected.to eq(nil) }
@@ -956,12 +907,6 @@ RSpec.describe ProjectsHelper do
end
describe '#grafana_integration_token' do
- let(:project) { create(:project) }
-
- before do
- helper.instance_variable_set(:@project, project)
- end
-
subject { helper.grafana_integration_masked_token }
it { is_expected.to eq(nil) }
@@ -974,12 +919,6 @@ RSpec.describe ProjectsHelper do
end
describe '#grafana_integration_enabled?' do
- let(:project) { create(:project) }
-
- before do
- helper.instance_variable_set(:@project, project)
- end
-
subject { helper.grafana_integration_enabled? }
it { is_expected.to eq(nil) }
@@ -992,7 +931,6 @@ RSpec.describe ProjectsHelper do
end
describe '#project_license_name(project)', :request_store do
- let_it_be(:project) { create(:project) }
let_it_be(:repository) { project.repository }
subject { project_license_name(project) }
diff --git a/spec/helpers/search_helper_spec.rb b/spec/helpers/search_helper_spec.rb
index 594c5c11994..341606635c0 100644
--- a/spec/helpers/search_helper_spec.rb
+++ b/spec/helpers/search_helper_spec.rb
@@ -357,14 +357,6 @@ RSpec.describe SearchHelper do
describe '#show_user_search_tab?' do
subject { show_user_search_tab? }
- context 'when users_search feature is disabled' do
- before do
- stub_feature_flags(users_search: false)
- end
-
- it { is_expected.to eq(false) }
- end
-
context 'when project search' do
before do
@project = :some_project
@@ -399,4 +391,25 @@ RSpec.describe SearchHelper do
end
end
end
+
+ describe '#repository_ref' do
+ let_it_be(:project) { create(:project, :repository) }
+ let(:params) { { repository_ref: 'the-repository-ref-param' } }
+
+ subject { repository_ref(project) }
+
+ it { is_expected.to eq('the-repository-ref-param') }
+
+ context 'when the param :repository_ref is not set' do
+ let(:params) { { repository_ref: nil } }
+
+ it { is_expected.to eq(project.default_branch) }
+ end
+
+ context 'when the repository_ref param is a number' do
+ let(:params) { { repository_ref: 111111 } }
+
+ it { is_expected.to eq('111111') }
+ end
+ end
end
diff --git a/spec/helpers/tree_helper_spec.rb b/spec/helpers/tree_helper_spec.rb
index 97b6802dde9..d347d8e6bfa 100644
--- a/spec/helpers/tree_helper_spec.rb
+++ b/spec/helpers/tree_helper_spec.rb
@@ -178,7 +178,7 @@ RSpec.describe TreeHelper do
it 'returns a list of attributes related to the project' do
expect(subject).to include(
- ide_base_path: project.full_path,
+ web_ide_url_data: { path: project.full_path, is_fork: false },
needs_to_fork: false,
show_web_ide_button: true,
show_gitpod_button: false,
@@ -200,9 +200,9 @@ RSpec.describe TreeHelper do
allow(helper).to receive(:current_user).and_return(user)
end
- it 'includes ide_base_path: forked_project.full_path' do
+ it 'includes web_ide_url_data: forked_project.full_path' do
expect(subject).to include(
- ide_base_path: forked_project.full_path
+ web_ide_url_data: { path: forked_project.full_path, is_fork: true }
)
end
end
@@ -216,9 +216,9 @@ RSpec.describe TreeHelper do
allow(helper).to receive(:current_user).and_return(user)
end
- it 'includes ide_base_path: project.full_path' do
+ it 'includes web_ide_url_data: project.full_path' do
expect(subject).to include(
- ide_base_path: project.full_path
+ web_ide_url_data: { path: project.full_path, is_fork: false }
)
end
end
diff --git a/spec/helpers/whats_new_helper_spec.rb b/spec/helpers/whats_new_helper_spec.rb
index db880163454..7d8fb236b1e 100644
--- a/spec/helpers/whats_new_helper_spec.rb
+++ b/spec/helpers/whats_new_helper_spec.rb
@@ -3,6 +3,52 @@
require 'spec_helper'
RSpec.describe WhatsNewHelper do
+ describe '#whats_new_storage_key' do
+ subject { helper.whats_new_storage_key }
+
+ before do
+ allow(helper).to receive(:whats_new_most_recent_release_items).and_return(json)
+ end
+
+ context 'when recent release items exist' do
+ let(:json) { [{ release: 84.0 }].to_json }
+
+ it { is_expected.to eq('display-whats-new-notification-84.0') }
+
+ context 'when the release items are missing the release key' do
+ let(:json) { [{ title: 'bells!' }].to_json }
+
+ it { is_expected.to eq('display-whats-new-notification') }
+ end
+ end
+
+ context 'when recent release items do NOT exist' do
+ let(:json) { WhatsNewHelper::EMPTY_JSON }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#whats_new_most_recent_release_items_count' do
+ subject { helper.whats_new_most_recent_release_items_count }
+
+ before do
+ allow(helper).to receive(:whats_new_most_recent_release_items).and_return(json)
+ end
+
+ context 'when recent release items exist' do
+ let(:json) { [:bells, :and, :whistles].to_json }
+
+ it { is_expected.to eq(3) }
+ end
+
+ context 'when recent release items do NOT exist' do
+ let(:json) { WhatsNewHelper::EMPTY_JSON }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
describe '#whats_new_most_recent_release_items' do
let(:fixture_dir_glob) { Dir.glob(File.join('spec', 'fixtures', 'whats_new', '*.yml')) }
diff --git a/spec/lib/api/github/entities_spec.rb b/spec/lib/api/github/entities_spec.rb
new file mode 100644
index 00000000000..00ea60c5d65
--- /dev/null
+++ b/spec/lib/api/github/entities_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Github::Entities do
+ describe API::Github::Entities::User do
+ let(:user) { create(:user, username: username) }
+ let(:username) { 'name_of_user' }
+ let(:gitlab_protocol_and_host) { "#{Gitlab.config.gitlab.protocol}://#{Gitlab.config.gitlab.host}" }
+ let(:expected_user_url) { "#{gitlab_protocol_and_host}/#{username}" }
+ let(:entity) { described_class.new(user) }
+
+ subject { entity.as_json }
+
+ specify :aggregate_failure do
+ expect(subject[:id]).to eq user.id
+ expect(subject[:login]).to eq 'name_of_user'
+ expect(subject[:url]).to eq expected_user_url
+ expect(subject[:html_url]).to eq expected_user_url
+ expect(subject[:avatar_url]).to include('https://www.gravatar.com/avatar')
+ end
+
+ context 'with avatar' do
+ let(:user) { create(:user, :with_avatar, username: username) }
+
+ specify do
+ expect(subject[:avatar_url]).to include("#{gitlab_protocol_and_host}/uploads/-/system/user/avatar/")
+ end
+ end
+ end
+end
diff --git a/spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb b/spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb
index ccf96bcbad6..6d06fc3618d 100644
--- a/spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb
+++ b/spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb
@@ -24,6 +24,7 @@ RSpec.describe API::Helpers::Packages::DependencyProxyHelpers do
shared_examples 'executing redirect' do
it 'redirects to package registry' do
+ expect(helper).to receive(:track_event).with('npm_request_forward').once
expect(helper).to receive(:registry_url).once
expect(helper).to receive(:redirect).once
expect(helper).to receive(:fallback).never
@@ -63,6 +64,7 @@ RSpec.describe API::Helpers::Packages::DependencyProxyHelpers do
let(:package_type) { pkg_type }
it 'raises an error' do
+ allow(helper).to receive(:track_event)
expect { subject }.to raise_error(ArgumentError, "Can't build registry_url for package_type #{package_type}")
end
end
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index 51a45dff6a4..8e738af0fa3 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -191,41 +191,32 @@ RSpec.describe API::Helpers do
describe '#increment_unique_values' do
let(:value) { '9f302fea-f828-4ca9-aef4-e10bd723c0b3' }
- let(:event_name) { 'my_event' }
+ let(:event_name) { 'g_compliance_dashboard' }
let(:unknown_event) { 'unknown' }
let(:feature) { "usage_data_#{event_name}" }
+ before do
+ skip_feature_flags_yaml_validation
+ end
+
context 'with feature enabled' do
before do
stub_feature_flags(feature => true)
end
it 'tracks redis hll event' do
- stub_application_setting(usage_ping_enabled: true)
-
expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(value, event_name)
subject.increment_unique_values(event_name, value)
end
- it 'does not track event usage ping is not enabled' do
- stub_application_setting(usage_ping_enabled: false)
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
-
- subject.increment_unique_values(event_name, value)
- end
-
it 'logs an exception for unknown event' do
- stub_application_setting(usage_ping_enabled: true)
-
expect(Gitlab::AppLogger).to receive(:warn).with("Redis tracking event failed for event: #{unknown_event}, message: Unknown event #{unknown_event}")
subject.increment_unique_values(unknown_event, value)
end
it 'does not track event for nil values' do
- stub_application_setting(usage_ping_enabled: true)
-
expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
subject.increment_unique_values(unknown_event, nil)
diff --git a/spec/lib/backup/files_spec.rb b/spec/lib/backup/files_spec.rb
index c2dbaac7f15..45cc73974d6 100644
--- a/spec/lib/backup/files_spec.rb
+++ b/spec/lib/backup/files_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe Backup::Files do
let(:timestamp) { Time.utc(2017, 3, 22) }
around do |example|
- Timecop.freeze(timestamp) { example.run }
+ travel_to(timestamp) { example.run }
end
describe 'folders with permission' do
diff --git a/spec/lib/backup/repository_spec.rb b/spec/lib/backup/repositories_spec.rb
index 718f38f9452..540c64e74ca 100644
--- a/spec/lib/backup/repository_spec.rb
+++ b/spec/lib/backup/repositories_spec.rb
@@ -2,9 +2,7 @@
require 'spec_helper'
-RSpec.describe Backup::Repository do
- let_it_be(:project) { create(:project, :wiki_repo) }
-
+RSpec.describe Backup::Repositories do
let(:progress) { StringIO.new }
subject { described_class.new(progress) }
@@ -12,7 +10,6 @@ RSpec.describe Backup::Repository do
before do
allow(progress).to receive(:puts)
allow(progress).to receive(:print)
- allow(FileUtils).to receive(:mv).and_return(true)
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:progress).and_return(progress)
@@ -20,13 +17,33 @@ RSpec.describe Backup::Repository do
end
describe '#dump' do
- before do
- allow(Gitlab.config.repositories.storages).to receive(:keys).and_return(storage_keys)
+ let_it_be(:projects) { create_list(:project, 5, :repository) }
+
+ RSpec.shared_examples 'creates repository bundles' do
+ specify :aggregate_failures do
+ # Add data to the wiki and design repositories, so they will be included in the dump.
+ create(:wiki_page, container: project)
+ create(:design, :with_file, issue: create(:issue, project: project))
+
+ subject.dump(max_concurrency: 1, max_storage_concurrency: 1)
+
+ expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.bundle'))
+ expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.wiki' + '.bundle'))
+ expect(File).to exist(File.join(Gitlab.config.backup.path, 'repositories', project.disk_path + '.design' + '.bundle'))
+ end
end
- let_it_be(:projects) { create_list(:project, 5, :wiki_repo) + [project] }
+ context 'hashed storage' do
+ let_it_be(:project) { create(:project, :repository) }
+
+ it_behaves_like 'creates repository bundles'
+ end
- let(:storage_keys) { %w[default test_second_storage] }
+ context 'legacy storage' do
+ let_it_be(:project) { create(:project, :repository, :legacy_storage) }
+
+ it_behaves_like 'creates repository bundles'
+ end
context 'no concurrency' do
it 'creates the expected number of threads' do
@@ -58,7 +75,7 @@ RSpec.describe Backup::Repository do
subject.dump(max_concurrency: 1, max_storage_concurrency: 1)
end.count
- create_list(:project, 2, :wiki_repo)
+ create_list(:project, 2, :repository)
expect do
subject.dump(max_concurrency: 1, max_storage_concurrency: 1)
@@ -68,6 +85,12 @@ RSpec.describe Backup::Repository do
[4, 10].each do |max_storage_concurrency|
context "max_storage_concurrency #{max_storage_concurrency}", quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/241701' do
+ let(:storage_keys) { %w[default test_second_storage] }
+
+ before do
+ allow(Gitlab.config.repositories.storages).to receive(:keys).and_return(storage_keys)
+ end
+
it 'creates the expected number of threads' do
expect(Thread).to receive(:new)
.exactly(storage_keys.length * (max_storage_concurrency + 1)).times
@@ -120,7 +143,7 @@ RSpec.describe Backup::Repository do
subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency)
end.count
- create_list(:project, 2, :wiki_repo)
+ create_list(:project, 2, :repository)
expect do
subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency)
@@ -131,45 +154,55 @@ RSpec.describe Backup::Repository do
end
describe '#restore' do
- let(:timestamp) { Time.utc(2017, 3, 22) }
- let(:temp_dirs) do
- Gitlab.config.repositories.storages.map do |name, storage|
- Gitlab::GitalyClient::StorageSettings.allow_disk_access do
- File.join(storage.legacy_disk_path, '..', 'repositories.old.' + timestamp.to_i.to_s)
- end
+ let_it_be(:project) { create(:project) }
+
+ it 'restores repositories from bundles', :aggregate_failures do
+ next_path_to_bundle = [
+ Rails.root.join('spec/fixtures/lib/backup/project_repo.bundle'),
+ Rails.root.join('spec/fixtures/lib/backup/wiki_repo.bundle'),
+ Rails.root.join('spec/fixtures/lib/backup/design_repo.bundle')
+ ].to_enum
+
+ allow_next_instance_of(described_class::BackupRestore) do |backup_restore|
+ allow(backup_restore).to receive(:path_to_bundle).and_return(next_path_to_bundle.next)
end
- end
- around do |example|
- Timecop.freeze(timestamp) { example.run }
- end
+ subject.restore
+
+ collect_commit_shas = -> (repo) { repo.commits('master', limit: 10).map(&:sha) }
- after do
- temp_dirs.each { |path| FileUtils.rm_rf(path) }
+ expect(collect_commit_shas.call(project.repository)).to eq(['393a7d860a5a4c3cc736d7eb00604e3472bb95ec'])
+ expect(collect_commit_shas.call(project.wiki.repository)).to eq(['c74b9948d0088d703ee1fafeddd9ed9add2901ea'])
+ expect(collect_commit_shas.call(project.design_repository)).to eq(['c3cd4d7bd73a51a0f22045c3a4c871c435dc959d'])
end
describe 'command failure' do
before do
- # Allow us to set expectations on the project directly
expect(Project).to receive(:find_each).and_yield(project)
- expect(project.repository).to receive(:create_repository) { raise 'Fail in tests' }
+
+ allow_next_instance_of(DesignManagement::Repository) do |repository|
+ allow(repository).to receive(:create_repository) { raise 'Fail in tests' }
+ end
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository).to receive(:create_repository) { raise 'Fail in tests' }
+ end
end
context 'hashed storage' do
it 'shows the appropriate error' do
subject.restore
- expect(progress).to have_received(:puts).with("[Failed] restoring #{project.full_path} repository")
+ expect(progress).to have_received(:puts).with("[Failed] restoring #{project.full_path} (#{project.disk_path})")
end
end
context 'legacy storage' do
- let!(:project) { create(:project, :legacy_storage) }
+ let_it_be(:project) { create(:project, :legacy_storage) }
it 'shows the appropriate error' do
subject.restore
- expect(progress).to have_received(:puts).with("[Failed] restoring #{project.full_path} repository")
+ expect(progress).to have_received(:puts).with("[Failed] restoring #{project.full_path} (#{project.disk_path})")
end
end
end
@@ -188,45 +221,18 @@ RSpec.describe Backup::Repository do
end
it 'cleans existing repositories' do
- wiki_repository_spy = spy(:wiki)
-
- allow_next_instance_of(ProjectWiki) do |project_wiki|
- allow(project_wiki).to receive(:repository).and_return(wiki_repository_spy)
- end
-
- expect_next_instance_of(Repository) do |repo|
- expect(repo).to receive(:remove)
+ expect_next_instance_of(DesignManagement::Repository) do |repository|
+ expect(repository).to receive(:remove)
end
+ expect(Repository).to receive(:new).twice.and_wrap_original do |method, *original_args|
+ repository = method.call(*original_args)
- subject.restore
-
- expect(wiki_repository_spy).to have_received(:remove)
- end
- end
+ expect(repository).to receive(:remove)
- describe '#empty_repo?' do
- context 'for a wiki' do
- let(:wiki) { create(:project_wiki) }
-
- it 'invalidates the emptiness cache' do
- expect(wiki.repository).to receive(:expire_emptiness_caches).once
-
- subject.send(:empty_repo?, wiki)
+ repository
end
- context 'wiki repo has content' do
- let!(:wiki_page) { create(:wiki_page, wiki: wiki) }
-
- it 'returns true, regardless of bad cache value' do
- expect(subject.send(:empty_repo?, wiki)).to be(false)
- end
- end
-
- context 'wiki repo does not have content' do
- it 'returns true, regardless of bad cache value' do
- expect(subject.send(:empty_repo?, wiki)).to be_truthy
- end
- end
+ subject.restore
end
end
end
diff --git a/spec/lib/banzai/filter/design_reference_filter_spec.rb b/spec/lib/banzai/filter/design_reference_filter_spec.rb
index 1b558754932..847c398964a 100644
--- a/spec/lib/banzai/filter/design_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/design_reference_filter_spec.rb
@@ -74,26 +74,6 @@ RSpec.describe Banzai::Filter::DesignReferenceFilter do
it_behaves_like 'a no-op filter'
end
-
- context 'design reference filter is not enabled' do
- before do
- stub_feature_flags(described_class::FEATURE_FLAG => false)
- end
-
- it_behaves_like 'a no-op filter'
-
- it 'issues no queries' do
- expect { process(input_text) }.not_to exceed_query_limit(0)
- end
- end
-
- context 'the filter is enabled for the context project' do
- before do
- stub_feature_flags(described_class::FEATURE_FLAG => project)
- end
-
- it_behaves_like 'a good link reference'
- end
end
end
diff --git a/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb b/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb
index e7b6c910b8a..35ef2abfa63 100644
--- a/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb
@@ -5,6 +5,8 @@ require 'spec_helper'
RSpec.describe Banzai::Filter::ExternalIssueReferenceFilter do
include FilterSpecHelper
+ let_it_be_with_refind(:project) { create(:project) }
+
shared_examples_for "external issue tracker" do
it_behaves_like 'a reference containing an element node'
@@ -116,7 +118,7 @@ RSpec.describe Banzai::Filter::ExternalIssueReferenceFilter do
end
context "redmine project" do
- let(:project) { create(:redmine_project) }
+ let_it_be(:service) { create(:redmine_service, project: project) }
before do
project.update!(issues_enabled: false)
@@ -138,7 +140,7 @@ RSpec.describe Banzai::Filter::ExternalIssueReferenceFilter do
end
context "youtrack project" do
- let(:project) { create(:youtrack_project) }
+ let_it_be(:service) { create(:youtrack_service, project: project) }
before do
project.update!(issues_enabled: false)
@@ -181,7 +183,7 @@ RSpec.describe Banzai::Filter::ExternalIssueReferenceFilter do
end
context "jira project" do
- let(:project) { create(:jira_project) }
+ let_it_be(:service) { create(:jira_service, project: project) }
let(:reference) { issue.to_reference }
context "with right markdown" do
@@ -210,7 +212,7 @@ RSpec.describe Banzai::Filter::ExternalIssueReferenceFilter do
end
context "ewm project" do
- let_it_be(:project) { create(:ewm_project) }
+ let_it_be(:service) { create(:ewm_service, project: project) }
before do
project.update!(issues_enabled: false)
diff --git a/spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb b/spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb
index 8bdb24ab08c..d29af311ee5 100644
--- a/spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb
+++ b/spec/lib/banzai/filter/inline_grafana_metrics_filter_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe Banzai::Filter::InlineGrafanaMetricsFilter do
it_behaves_like 'a metrics embed filter'
around do |example|
- Timecop.freeze(Time.utc(2019, 3, 17, 13, 10)) { example.run }
+ travel_to(Time.utc(2019, 3, 17, 13, 10)) { example.run }
end
context 'when grafana is not configured' do
diff --git a/spec/lib/banzai/filter/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/issue_reference_filter_spec.rb
index 447802d18a7..4b8b575c1f0 100644
--- a/spec/lib/banzai/filter/issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/issue_reference_filter_spec.rb
@@ -296,6 +296,12 @@ RSpec.describe Banzai::Filter::IssueReferenceFilter do
.to eq reference
end
+ it 'link with trailing slash' do
+ doc = reference_filter("Fixed (#{issue_url + "/"}.)")
+
+ expect(doc.to_html).to match(%r{\(<a.+>#{Regexp.escape(issue.to_reference(project))}</a>\.\)})
+ end
+
it 'links with adjacent text' do
doc = reference_filter("Fixed (#{reference}.)")
diff --git a/spec/lib/banzai/filter/milestone_reference_filter_spec.rb b/spec/lib/banzai/filter/milestone_reference_filter_spec.rb
index 62b1711ee57..276fa7952be 100644
--- a/spec/lib/banzai/filter/milestone_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/milestone_reference_filter_spec.rb
@@ -5,9 +5,11 @@ require 'spec_helper'
RSpec.describe Banzai::Filter::MilestoneReferenceFilter do
include FilterSpecHelper
- let(:parent_group) { create(:group, :public) }
- let(:group) { create(:group, :public, parent: parent_group) }
- let(:project) { create(:project, :public, group: group) }
+ let_it_be(:parent_group) { create(:group, :public) }
+ let_it_be(:group) { create(:group, :public, parent: parent_group) }
+ let_it_be(:project) { create(:project, :public, group: group) }
+ let_it_be(:namespace) { create(:namespace) }
+ let_it_be(:another_project) { create(:project, :public, namespace: namespace) }
it 'requires project context' do
expect { described_class.call('') }.to raise_error(ArgumentError, /:project/)
@@ -188,11 +190,9 @@ RSpec.describe Banzai::Filter::MilestoneReferenceFilter do
end
shared_examples 'cross-project / cross-namespace complete reference' do
- let(:namespace) { create(:namespace) }
- let(:another_project) { create(:project, :public, namespace: namespace) }
- let(:milestone) { create(:milestone, project: another_project) }
- let(:reference) { "#{another_project.full_path}%#{milestone.iid}" }
- let!(:result) { reference_filter("See #{reference}") }
+ let_it_be(:milestone) { create(:milestone, project: another_project) }
+ let(:reference) { "#{another_project.full_path}%#{milestone.iid}" }
+ let!(:result) { reference_filter("See #{reference}") }
it 'points to referenced project milestone page' do
expect(result.css('a').first.attr('href')).to eq urls
@@ -226,12 +226,10 @@ RSpec.describe Banzai::Filter::MilestoneReferenceFilter do
end
shared_examples 'cross-project / same-namespace complete reference' do
- let(:namespace) { create(:namespace) }
- let(:project) { create(:project, :public, namespace: namespace) }
- let(:another_project) { create(:project, :public, namespace: namespace) }
- let(:milestone) { create(:milestone, project: another_project) }
- let(:reference) { "#{another_project.full_path}%#{milestone.iid}" }
- let!(:result) { reference_filter("See #{reference}") }
+ let_it_be(:project) { create(:project, :public, namespace: namespace) }
+ let_it_be(:milestone) { create(:milestone, project: another_project) }
+ let(:reference) { "#{another_project.full_path}%#{milestone.iid}" }
+ let!(:result) { reference_filter("See #{reference}") }
it 'points to referenced project milestone page' do
expect(result.css('a').first.attr('href')).to eq urls
@@ -265,12 +263,10 @@ RSpec.describe Banzai::Filter::MilestoneReferenceFilter do
end
shared_examples 'cross project shorthand reference' do
- let(:namespace) { create(:namespace) }
- let(:project) { create(:project, :public, namespace: namespace) }
- let(:another_project) { create(:project, :public, namespace: namespace) }
- let(:milestone) { create(:milestone, project: another_project) }
- let(:reference) { "#{another_project.path}%#{milestone.iid}" }
- let!(:result) { reference_filter("See #{reference}") }
+ let_it_be(:project) { create(:project, :public, namespace: namespace) }
+ let_it_be(:milestone) { create(:milestone, project: another_project) }
+ let(:reference) { "#{another_project.path}%#{milestone.iid}" }
+ let!(:result) { reference_filter("See #{reference}") }
it 'points to referenced project milestone page' do
expect(result.css('a').first.attr('href')).to eq urls
@@ -439,13 +435,13 @@ RSpec.describe Banzai::Filter::MilestoneReferenceFilter do
context 'when milestone is open' do
context 'project milestones' do
- let(:milestone) { create(:milestone, project: project) }
+ let_it_be_with_reload(:milestone) { create(:milestone, project: project) }
include_context 'project milestones'
end
context 'group milestones' do
- let(:milestone) { create(:milestone, group: group) }
+ let_it_be_with_reload(:milestone) { create(:milestone, group: group) }
include_context 'group milestones'
end
@@ -453,13 +449,13 @@ RSpec.describe Banzai::Filter::MilestoneReferenceFilter do
context 'when milestone is closed' do
context 'project milestones' do
- let(:milestone) { create(:milestone, :closed, project: project) }
+ let_it_be_with_reload(:milestone) { create(:milestone, :closed, project: project) }
include_context 'project milestones'
end
context 'group milestones' do
- let(:milestone) { create(:milestone, :closed, group: group) }
+ let_it_be_with_reload(:milestone) { create(:milestone, :closed, group: group) }
include_context 'group milestones'
end
diff --git a/spec/lib/feature/definition_spec.rb b/spec/lib/feature/definition_spec.rb
index 49224cf4279..fa0207d829a 100644
--- a/spec/lib/feature/definition_spec.rb
+++ b/spec/lib/feature/definition_spec.rb
@@ -105,6 +105,7 @@ RSpec.describe Feature::Definition do
describe '.load_all!' do
let(:store1) { Dir.mktmpdir('path1') }
let(:store2) { Dir.mktmpdir('path2') }
+ let(:definitions) { {} }
before do
allow(described_class).to receive(:paths).and_return(
@@ -115,28 +116,30 @@ RSpec.describe Feature::Definition do
)
end
+ subject { described_class.send(:load_all!) }
+
it "when there's no feature flags a list of definitions is empty" do
- expect(described_class.load_all!).to be_empty
+ is_expected.to be_empty
end
it "when there's a single feature flag it properly loads them" do
write_feature_flag(store1, path, yaml_content)
- expect(described_class.load_all!).to be_one
+ is_expected.to be_one
end
it "when the same feature flag is stored multiple times raises exception" do
write_feature_flag(store1, path, yaml_content)
write_feature_flag(store2, path, yaml_content)
- expect { described_class.load_all! }
+ expect { subject }
.to raise_error(/Feature flag 'feature_flag' is already defined/)
end
it "when one of the YAMLs is invalid it does raise exception" do
write_feature_flag(store1, path, '{}')
- expect { described_class.load_all! }
+ expect { subject }
.to raise_error(/Feature flag is missing name/)
end
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index acd7d97ac85..5dff9dbd995 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Feature, stub_feature_flags: false do
before do
# reset Flipper AR-engine
Feature.reset
+ skip_feature_flags_yaml_validation
end
describe '.get' do
@@ -253,6 +254,9 @@ RSpec.describe Feature, stub_feature_flags: false do
end
before do
+ stub_env('LAZILY_CREATE_FEATURE_FLAG', '0')
+
+ allow(Feature::Definition).to receive(:valid_usage!).and_call_original
allow(Feature::Definition).to receive(:definitions) do
{ definition.key => definition }
end
diff --git a/spec/lib/gitlab/alert_management/alert_params_spec.rb b/spec/lib/gitlab/alert_management/alert_params_spec.rb
deleted file mode 100644
index c3171be5e29..00000000000
--- a/spec/lib/gitlab/alert_management/alert_params_spec.rb
+++ /dev/null
@@ -1,101 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::AlertManagement::AlertParams do
- let_it_be(:project) { create(:project, :repository, :private) }
-
- describe '.from_generic_alert' do
- let(:started_at) { Time.current.change(usec: 0).rfc3339 }
- let(:default_payload) do
- {
- 'title' => 'Alert title',
- 'description' => 'Description',
- 'monitoring_tool' => 'Monitoring tool name',
- 'service' => 'Service',
- 'hosts' => ['gitlab.com'],
- 'start_time' => started_at,
- 'some' => { 'extra' => { 'payload' => 'here' } }
- }
- end
-
- let(:payload) { default_payload }
-
- subject { described_class.from_generic_alert(project: project, payload: payload) }
-
- it 'returns Alert compatible parameters' do
- is_expected.to eq(
- project_id: project.id,
- title: 'Alert title',
- description: 'Description',
- monitoring_tool: 'Monitoring tool name',
- service: 'Service',
- severity: 'critical',
- hosts: ['gitlab.com'],
- payload: payload,
- started_at: started_at,
- ended_at: nil,
- fingerprint: nil,
- environment: nil
- )
- end
-
- context 'when severity given' do
- let(:payload) { default_payload.merge(severity: 'low') }
-
- it 'returns Alert compatible parameters' do
- expect(subject[:severity]).to eq('low')
- end
- end
-
- context 'when there are no hosts in the payload' do
- let(:payload) { {} }
-
- it 'hosts param is an empty array' do
- expect(subject[:hosts]).to be_empty
- end
- end
- end
-
- describe '.from_prometheus_alert' do
- let(:payload) do
- {
- 'status' => 'firing',
- 'labels' => {
- 'alertname' => 'GitalyFileServerDown',
- 'channel' => 'gitaly',
- 'pager' => 'pagerduty',
- 'severity' => 's1'
- },
- 'annotations' => {
- 'description' => 'Alert description',
- 'runbook' => 'troubleshooting/gitaly-down.md',
- 'title' => 'Alert title'
- },
- 'startsAt' => '2020-04-27T10:10:22.265949279Z',
- 'endsAt' => '0001-01-01T00:00:00Z',
- 'generatorURL' => 'http://8d467bd4607a:9090/graph?g0.expr=vector%281%29&g0.tab=1',
- 'fingerprint' => 'b6ac4d42057c43c1'
- }
- end
-
- let(:parsed_alert) { Gitlab::Alerting::Alert.new(project: project, payload: payload) }
-
- subject { described_class.from_prometheus_alert(project: project, parsed_alert: parsed_alert) }
-
- it 'returns Alert-compatible params' do
- is_expected.to eq(
- project_id: project.id,
- title: 'Alert title',
- description: 'Alert description',
- monitoring_tool: 'Prometheus',
- payload: payload,
- started_at: parsed_alert.starts_at,
- ended_at: parsed_alert.ends_at,
- fingerprint: parsed_alert.gitlab_fingerprint,
- environment: parsed_alert.environment,
- prometheus_alert: parsed_alert.gitlab_alert
- )
- end
- end
-end
diff --git a/spec/lib/gitlab/alert_management/payload/generic_spec.rb b/spec/lib/gitlab/alert_management/payload/generic_spec.rb
index 538a822503e..b7660462b0d 100644
--- a/spec/lib/gitlab/alert_management/payload/generic_spec.rb
+++ b/spec/lib/gitlab/alert_management/payload/generic_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe Gitlab::AlertManagement::Payload::Generic do
subject { parsed_payload.starts_at }
around do |example|
- Timecop.freeze(current_time) { example.run }
+ travel_to(current_time) { example.run }
end
context 'without start_time' do
@@ -86,4 +86,34 @@ RSpec.describe Gitlab::AlertManagement::Payload::Generic do
it_behaves_like 'parsable alert payload field', 'gitlab_environment_name'
end
+
+ describe '#description' do
+ subject { parsed_payload.description }
+
+ it_behaves_like 'parsable alert payload field', 'description'
+ end
+
+ describe '#ends_at' do
+ let(:current_time) { Time.current.change(usec: 0).utc }
+
+ subject { parsed_payload.ends_at }
+
+ around do |example|
+ travel_to(current_time) { example.run }
+ end
+
+ context 'without end_time' do
+ it { is_expected.to be_nil }
+ end
+
+ context "with end_time" do
+ let(:value) { 10.minutes.ago.change(usec: 0).utc }
+
+ before do
+ raw_payload['end_time'] = value.to_s
+ end
+
+ it { is_expected.to eq(value) }
+ end
+ end
end
diff --git a/spec/lib/gitlab/alerting/alert_spec.rb b/spec/lib/gitlab/alerting/alert_spec.rb
deleted file mode 100644
index b53b71e3f3e..00000000000
--- a/spec/lib/gitlab/alerting/alert_spec.rb
+++ /dev/null
@@ -1,299 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Alerting::Alert do
- let_it_be(:project) { create(:project) }
-
- let(:alert) { build(:alerting_alert, project: project, payload: payload) }
- let(:payload) { {} }
-
- shared_context 'gitlab alert' do
- let!(:gitlab_alert) { create(:prometheus_alert, project: project) }
- let(:gitlab_alert_id) { gitlab_alert.id }
-
- before do
- payload['labels'] = {
- 'gitlab_alert_id' => gitlab_alert.prometheus_metric_id.to_s,
- 'gitlab_prometheus_alert_id' => gitlab_alert_id
- }
- end
- end
-
- shared_context 'full query' do
- before do
- payload['generatorURL'] = 'http://localhost:9090/graph?g0.expr=vector%281%29'
- end
- end
-
- shared_examples 'invalid alert' do
- it 'is invalid' do
- expect(alert).not_to be_valid
- end
- end
-
- shared_examples 'parse payload' do |*pairs|
- context 'without payload' do
- it { is_expected.to be_nil }
- end
-
- pairs.each do |pair|
- context "with #{pair}" do
- let(:value) { 'some value' }
-
- before do
- section, name = pair.split('/')
- payload[section] = { name => value }
- end
-
- it { is_expected.to eq(value) }
- end
- end
- end
-
- describe '#gitlab_alert' do
- subject { alert.gitlab_alert }
-
- context 'without payload' do
- it { is_expected.to be_nil }
- end
-
- context 'with gitlab alert' do
- include_context 'gitlab alert'
-
- it { is_expected.to eq(gitlab_alert) }
- end
-
- context 'with unknown gitlab alert' do
- include_context 'gitlab alert' do
- let(:gitlab_alert_id) { 'unknown' }
- end
-
- it { is_expected.to be_nil }
- end
-
- context 'when two alerts with the same metric exist' do
- include_context 'gitlab alert'
-
- let!(:second_gitlab_alert) do
- create(:prometheus_alert,
- project: project,
- prometheus_metric_id: gitlab_alert.prometheus_metric_id
- )
- end
-
- context 'alert id given in params' do
- before do
- payload['labels'] = {
- 'gitlab_alert_id' => gitlab_alert.prometheus_metric_id.to_s,
- 'gitlab_prometheus_alert_id' => second_gitlab_alert.id
- }
- end
-
- it { is_expected.to eq(second_gitlab_alert) }
- end
-
- context 'metric id given in params' do
- # This tests the case when two alerts are found, as metric id
- # is not unique.
-
- # Note the metric id was incorrectly named as 'gitlab_alert_id'
- # in PrometheusAlert#to_param.
- before do
- payload['labels'] = { 'gitlab_alert_id' => gitlab_alert.prometheus_metric_id }
- end
-
- it { is_expected.to be_nil }
- end
- end
- end
-
- describe '#title' do
- subject { alert.title }
-
- it_behaves_like 'parse payload',
- 'annotations/title',
- 'annotations/summary',
- 'labels/alertname'
-
- context 'with gitlab alert' do
- include_context 'gitlab alert'
-
- context 'with annotations/title' do
- let(:value) { 'annotation title' }
-
- before do
- payload['annotations'] = { 'title' => value }
- end
-
- it { is_expected.to eq(gitlab_alert.title) }
- end
- end
- end
-
- describe '#description' do
- subject { alert.description }
-
- it_behaves_like 'parse payload', 'annotations/description'
- end
-
- describe '#annotations' do
- subject { alert.annotations }
-
- context 'without payload' do
- it { is_expected.to eq([]) }
- end
-
- context 'with payload' do
- before do
- payload['annotations'] = { 'foo' => 'value1', 'bar' => 'value2' }
- end
-
- it 'parses annotations' do
- expect(subject.size).to eq(2)
- expect(subject.map(&:label)).to eq(%w[foo bar])
- expect(subject.map(&:value)).to eq(%w[value1 value2])
- end
- end
- end
-
- describe '#environment' do
- subject { alert.environment }
-
- context 'without gitlab_alert' do
- it { is_expected.to be_nil }
- end
-
- context 'with gitlab alert' do
- include_context 'gitlab alert'
-
- it { is_expected.to eq(gitlab_alert.environment) }
- end
- end
-
- describe '#starts_at' do
- subject { alert.starts_at }
-
- context 'with empty startsAt' do
- before do
- payload['startsAt'] = nil
- end
-
- it { is_expected.to be_nil }
- end
-
- context 'with invalid startsAt' do
- before do
- payload['startsAt'] = 'invalid'
- end
-
- it { is_expected.to be_nil }
- end
-
- context 'with payload' do
- let(:time) { Time.current.change(usec: 0) }
-
- before do
- payload['startsAt'] = time.rfc3339
- end
-
- it { is_expected.to eq(time) }
- end
- end
-
- describe '#full_query' do
- using RSpec::Parameterized::TableSyntax
-
- subject { alert.full_query }
-
- where(:generator_url, :expected_query) do
- nil | nil
- 'http://localhost' | nil
- 'invalid url' | nil
- 'http://localhost:9090/graph?g1.expr=vector%281%29' | nil
- 'http://localhost:9090/graph?g0.expr=vector%281%29' | 'vector(1)'
- end
-
- with_them do
- before do
- payload['generatorURL'] = generator_url
- end
-
- it { is_expected.to eq(expected_query) }
- end
-
- context 'with gitlab alert' do
- include_context 'gitlab alert'
- include_context 'full query'
-
- it { is_expected.to eq(gitlab_alert.full_query) }
- end
- end
-
- describe '#y_label' do
- subject { alert.y_label }
-
- it_behaves_like 'parse payload', 'annotations/gitlab_y_label'
-
- context 'when y_label is not included in the payload' do
- it_behaves_like 'parse payload', 'annotations/title'
- end
- end
-
- describe '#alert_markdown' do
- subject { alert.alert_markdown }
-
- it_behaves_like 'parse payload', 'annotations/gitlab_incident_markdown'
- end
-
- describe '#gitlab_fingerprint' do
- subject { alert.gitlab_fingerprint }
-
- context 'when the alert is a GitLab managed alert' do
- include_context 'gitlab alert'
-
- it 'returns a fingerprint' do
- plain_fingerprint = [alert.metric_id, alert.starts_at_raw].join('/')
-
- is_expected.to eq(Digest::SHA1.hexdigest(plain_fingerprint))
- end
- end
-
- context 'when the alert is from self managed Prometheus' do
- include_context 'full query'
-
- it 'returns a fingerprint' do
- plain_fingerprint = [alert.starts_at_raw, alert.title, alert.full_query].join('/')
-
- is_expected.to eq(Digest::SHA1.hexdigest(plain_fingerprint))
- end
- end
- end
-
- describe '#valid?' do
- before do
- payload.update(
- 'annotations' => { 'title' => 'some title' },
- 'startsAt' => Time.current.rfc3339
- )
- end
-
- subject { alert }
-
- it { is_expected.to be_valid }
-
- context 'without project' do
- let(:project) { nil }
-
- it { is_expected.not_to be_valid }
- end
-
- context 'without starts_at' do
- before do
- payload['startsAt'] = nil
- end
-
- it { is_expected.not_to be_valid }
- end
- end
-end
diff --git a/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb b/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb
deleted file mode 100644
index ff5ab1116fa..00000000000
--- a/spec/lib/gitlab/alerting/notification_payload_parser_spec.rb
+++ /dev/null
@@ -1,204 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Alerting::NotificationPayloadParser do
- let_it_be(:project) { build(:project) }
-
- describe '.call' do
- let(:starts_at) { Time.current.change(usec: 0) }
- let(:ends_at) { Time.current.change(usec: 0) }
- let(:payload) do
- {
- 'title' => 'alert title',
- 'start_time' => starts_at.rfc3339,
- 'end_time' => ends_at.rfc3339,
- 'description' => 'Description',
- 'monitoring_tool' => 'Monitoring tool name',
- 'service' => 'Service',
- 'hosts' => ['gitlab.com'],
- 'severity' => 'low'
- }
- end
-
- subject { described_class.call(payload, project) }
-
- it 'returns Prometheus-like payload' do
- is_expected.to eq(
- {
- 'annotations' => {
- 'title' => 'alert title',
- 'description' => 'Description',
- 'monitoring_tool' => 'Monitoring tool name',
- 'service' => 'Service',
- 'hosts' => ['gitlab.com'],
- 'severity' => 'low'
- },
- 'startsAt' => starts_at.rfc3339,
- 'endsAt' => ends_at.rfc3339
- }
- )
- end
-
- context 'when title is blank' do
- before do
- payload[:title] = ''
- end
-
- it 'sets a predefined title' do
- expect(subject.dig('annotations', 'title')).to eq('New: Incident')
- end
- end
-
- context 'when hosts attribute is a string' do
- before do
- payload[:hosts] = 'gitlab.com'
- end
-
- it 'returns hosts as an array of one element' do
- expect(subject.dig('annotations', 'hosts')).to eq(['gitlab.com'])
- end
- end
-
- context 'when the time is in unsupported format' do
- before do
- payload[:start_time] = 'invalid/date/format'
- end
-
- it 'sets startsAt to a current time in RFC3339 format' do
- expect(subject['startsAt']).to eq(starts_at.rfc3339)
- end
- end
-
- context 'when payload is blank' do
- let(:payload) { {} }
-
- it 'returns default parameters' do
- is_expected.to match(
- 'annotations' => {
- 'title' => described_class::DEFAULT_TITLE,
- 'severity' => described_class::DEFAULT_SEVERITY
- },
- 'startsAt' => starts_at.rfc3339
- )
- end
-
- context 'when severity is blank' do
- before do
- payload[:severity] = ''
- end
-
- it 'sets severity to the default ' do
- expect(subject.dig('annotations', 'severity')).to eq(described_class::DEFAULT_SEVERITY)
- end
- end
- end
-
- context 'with fingerprint' do
- before do
- payload[:fingerprint] = data
- end
-
- shared_examples 'fingerprint generation' do
- it 'generates the fingerprint correctly' do
- expect(result).to eq(Gitlab::AlertManagement::Fingerprint.generate(data))
- end
- end
-
- context 'with blank fingerprint' do
- it_behaves_like 'fingerprint generation' do
- let(:data) { ' ' }
- let(:result) { subject.dig('annotations', 'fingerprint') }
- end
- end
-
- context 'with fingerprint given' do
- it_behaves_like 'fingerprint generation' do
- let(:data) { 'fingerprint' }
- let(:result) { subject.dig('annotations', 'fingerprint') }
- end
- end
-
- context 'with array fingerprint given' do
- it_behaves_like 'fingerprint generation' do
- let(:data) { [1, 'fingerprint', 'given'] }
- let(:result) { subject.dig('annotations', 'fingerprint') }
- end
- end
- end
-
- context 'with environment' do
- let(:environment) { create(:environment, project: project) }
-
- before do
- payload[:gitlab_environment_name] = environment.name
- end
-
- it 'sets the environment ' do
- expect(subject.dig('annotations', 'environment')).to eq(environment)
- end
- end
-
- context 'when payload attributes have blank lines' do
- let(:payload) do
- {
- 'title' => '',
- 'start_time' => '',
- 'end_time' => '',
- 'description' => '',
- 'monitoring_tool' => '',
- 'service' => '',
- 'hosts' => ['']
- }
- end
-
- it 'returns default parameters' do
- is_expected.to eq(
- 'annotations' => {
- 'title' => 'New: Incident',
- 'severity' => described_class::DEFAULT_SEVERITY
- },
- 'startsAt' => starts_at.rfc3339
- )
- end
- end
-
- context 'when payload has secondary params' do
- let(:payload) do
- {
- 'description' => 'Description',
- 'additional' => {
- 'params' => {
- '1' => 'Some value 1',
- '2' => 'Some value 2',
- 'blank' => ''
- }
- }
- }
- end
-
- it 'adds secondary params to annotations' do
- is_expected.to eq(
- 'annotations' => {
- 'title' => 'New: Incident',
- 'severity' => described_class::DEFAULT_SEVERITY,
- 'description' => 'Description',
- 'additional.params.1' => 'Some value 1',
- 'additional.params.2' => 'Some value 2'
- },
- 'startsAt' => starts_at.rfc3339
- )
- end
- end
-
- context 'when secondary params hash is too big' do
- before do
- allow(Gitlab::Utils::SafeInlineHash).to receive(:merge_keys!).and_raise(ArgumentError)
- end
-
- it 'catches and re-raises an error' do
- expect { subject }.to raise_error Gitlab::Alerting::NotificationPayloadParser::BadPayloadError, 'The payload is too big'
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/analytics/unique_visits_spec.rb b/spec/lib/gitlab/analytics/unique_visits_spec.rb
index 1432c9ac58f..6ac58e13f4c 100644
--- a/spec/lib/gitlab/analytics/unique_visits_spec.rb
+++ b/spec/lib/gitlab/analytics/unique_visits_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Gitlab::Analytics::UniqueVisits, :clean_gitlab_redis_shared_state
# Without freezing the time, the test may behave inconsistently
# depending on which day of the week test is run.
reference_time = Time.utc(2020, 6, 1)
- Timecop.freeze(reference_time) { example.run }
+ travel_to(reference_time) { example.run }
end
describe '#track_visit' do
diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb
index 1ac8ebe1369..2ebde145bfd 100644
--- a/spec/lib/gitlab/auth/auth_finders_spec.rb
+++ b/spec/lib/gitlab/auth/auth_finders_spec.rb
@@ -419,10 +419,30 @@ RSpec.describe Gitlab::Auth::AuthFinders do
expect(find_user_from_web_access_token(:ics)).to eq(user)
end
- it 'returns the user for API requests' do
- set_header('SCRIPT_NAME', '/api/endpoint')
+ context 'for API requests' do
+ it 'returns the user' do
+ set_header('SCRIPT_NAME', '/api/endpoint')
+
+ expect(find_user_from_web_access_token(:api)).to eq(user)
+ end
+
+ it 'returns nil if URL does not start with /api/' do
+ set_header('SCRIPT_NAME', '/relative_root/api/endpoint')
+
+ expect(find_user_from_web_access_token(:api)).to be_nil
+ end
- expect(find_user_from_web_access_token(:api)).to eq(user)
+ context 'when relative_url_root is set' do
+ before do
+ stub_config_setting(relative_url_root: '/relative_root')
+ end
+
+ it 'returns the user' do
+ set_header('SCRIPT_NAME', '/relative_root/api/endpoint')
+
+ expect(find_user_from_web_access_token(:api)).to eq(user)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/auth/current_user_mode_spec.rb b/spec/lib/gitlab/auth/current_user_mode_spec.rb
index 60b403780c0..ffd7813190a 100644
--- a/spec/lib/gitlab/auth/current_user_mode_spec.rb
+++ b/spec/lib/gitlab/auth/current_user_mode_spec.rb
@@ -121,7 +121,7 @@ RSpec.describe Gitlab::Auth::CurrentUserMode, :do_not_mock_admin_mode, :request_
subject.enable_admin_mode!(password: user.password)
expect(subject.admin_mode?).to be(true), 'admin mode is not active in the present'
- Timecop.freeze(Gitlab::Auth::CurrentUserMode::MAX_ADMIN_MODE_TIME.from_now) do
+ travel_to(Gitlab::Auth::CurrentUserMode::MAX_ADMIN_MODE_TIME.from_now) do
# in the future this will be a new request, simulate by clearing the RequestStore
Gitlab::SafeRequestStore.clear!
diff --git a/spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb b/spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb
index a3840e3a22e..85a9c88ebff 100644
--- a/spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb
+++ b/spec/lib/gitlab/background_migration/merge_request_assignees_migration_progress_check_spec.rb
@@ -73,7 +73,7 @@ RSpec.describe Gitlab::BackgroundMigration::MergeRequestAssigneesMigrationProgre
described_class.new.perform
- expect(Feature.enabled?(:multiple_merge_request_assignees)).to eq(true)
+ expect(Feature.enabled?(:multiple_merge_request_assignees, type: :licensed)).to eq(true)
end
end
diff --git a/spec/lib/gitlab/background_migration/migrate_users_bio_to_user_details_spec.rb b/spec/lib/gitlab/background_migration/migrate_users_bio_to_user_details_spec.rb
index db3cbe7ccdc..3cec5cb4c35 100644
--- a/spec/lib/gitlab/background_migration/migrate_users_bio_to_user_details_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_users_bio_to_user_details_spec.rb
@@ -82,21 +82,4 @@ RSpec.describe Gitlab::BackgroundMigration::MigrateUsersBioToUserDetails, :migra
expect(user_detail).to be_nil
end
-
- context 'when `migrate_bio_to_user_details` feature flag is off' do
- before do
- stub_feature_flags(migrate_bio_to_user_details: false)
- end
-
- it 'does nothing' do
- already_existing_user_details = user_details.where(user_id: [
- user_has_different_details.id,
- user_already_has_details.id
- ])
-
- subject
-
- expect(user_details.all).to match_array(already_existing_user_details)
- end
- end
end
diff --git a/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb b/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb
index 392b44d1a1f..2dae4a65eeb 100644
--- a/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb
+++ b/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb
@@ -74,14 +74,14 @@ RSpec.describe Gitlab::BackgroundMigration::UserMentions::CreateResourceUserMent
let(:user_mentions) { merge_request_user_mentions }
let(:resource) { merge_request }
- it_behaves_like 'resource mentions migration', MigrateMergeRequestMentionsToDb, MergeRequest
+ it_behaves_like 'resource mentions migration', MigrateMergeRequestMentionsToDb, 'MergeRequest'
context 'when FF disabled' do
before do
stub_feature_flags(migrate_user_mentions: false)
end
- it_behaves_like 'resource migration not run', MigrateMergeRequestMentionsToDb, MergeRequest
+ it_behaves_like 'resource migration not run', MigrateMergeRequestMentionsToDb, 'MergeRequest'
end
end
@@ -103,14 +103,14 @@ RSpec.describe Gitlab::BackgroundMigration::UserMentions::CreateResourceUserMent
let(:user_mentions) { commit_user_mentions }
let(:resource) { commit }
- it_behaves_like 'resource notes mentions migration', MigrateCommitNotesMentionsToDb, Commit
+ it_behaves_like 'resource notes mentions migration', MigrateCommitNotesMentionsToDb, 'Commit'
context 'when FF disabled' do
before do
stub_feature_flags(migrate_user_mentions: false)
end
- it_behaves_like 'resource notes migration not run', MigrateCommitNotesMentionsToDb, Commit
+ it_behaves_like 'resource notes migration not run', MigrateCommitNotesMentionsToDb, 'Commit'
end
end
end
diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
index d4483bf1754..b723c31c4aa 100644
--- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
@@ -312,7 +312,7 @@ RSpec.describe Gitlab::BitbucketImport::Importer do
# attributes later.
existing_label.reload
- Timecop.freeze(Time.now + 1.minute) do
+ travel_to(Time.now + 1.minute) do
importer.execute
label_after_import = project.labels.find(existing_label.id)
diff --git a/spec/lib/gitlab/checks/matching_merge_request_spec.rb b/spec/lib/gitlab/checks/matching_merge_request_spec.rb
new file mode 100644
index 00000000000..ca7ee784ee3
--- /dev/null
+++ b/spec/lib/gitlab/checks/matching_merge_request_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Checks::MatchingMergeRequest do
+ describe '#match?' do
+ let_it_be(:newrev) { '012345678' }
+ let_it_be(:target_branch) { 'feature' }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:locked_merge_request) do
+ create(:merge_request,
+ :locked,
+ source_project: project,
+ target_project: project,
+ target_branch: target_branch,
+ in_progress_merge_commit_sha: newrev)
+ end
+
+ subject { described_class.new(newrev, target_branch, project) }
+
+ it 'matches a merge request' do
+ expect(subject.match?).to be true
+ end
+
+ it 'does not match any merge request' do
+ matcher = described_class.new(newrev, 'test', project)
+
+ expect(matcher.match?).to be false
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/ansi2json/line_spec.rb b/spec/lib/gitlab/ci/ansi2json/line_spec.rb
index 8b1cd812a70..d681447a0e8 100644
--- a/spec/lib/gitlab/ci/ansi2json/line_spec.rb
+++ b/spec/lib/gitlab/ci/ansi2json/line_spec.rb
@@ -58,6 +58,15 @@ RSpec.describe Gitlab::Ci::Ansi2json::Line do
end
end
+ describe '#set_section_options' do
+ it 'sets the current section\'s options' do
+ options = { collapsed: true }
+ subject.set_section_options(options)
+
+ expect(subject.to_h[:section_options]).to eq(options)
+ end
+ end
+
describe '#set_as_section_header' do
it 'change the section_header to true' do
expect { subject.set_as_section_header }
diff --git a/spec/lib/gitlab/ci/ansi2json_spec.rb b/spec/lib/gitlab/ci/ansi2json_spec.rb
index cb6949fddc2..c9c0d1a744e 100644
--- a/spec/lib/gitlab/ci/ansi2json_spec.rb
+++ b/spec/lib/gitlab/ci/ansi2json_spec.rb
@@ -229,7 +229,7 @@ RSpec.describe Gitlab::Ci::Ansi2json do
expect(convert_json(trace)).to eq([
{
offset: 0,
- content: [{ text: "section_end:1:2<div>hello</div>" }],
+ content: [{ text: 'section_end:1:2<div>hello</div>' }],
section: 'prepare-script',
section_header: true
},
@@ -329,6 +329,32 @@ RSpec.describe Gitlab::Ci::Ansi2json do
])
end
end
+
+ context 'with section options' do
+ let(:option_section_start) { "section_start:#{section_start_time.to_i}:#{section_name}[collapsed=true,unused_option=123]\r\033[0K"}
+
+ it 'provides section options when set' do
+ trace = "#{option_section_start}hello#{section_end}"
+ expect(convert_json(trace)).to eq([
+ {
+ offset: 0,
+ content: [{ text: 'hello' }],
+ section: 'prepare-script',
+ section_header: true,
+ section_options: {
+ 'collapsed' => 'true',
+ 'unused_option' => '123'
+ }
+ },
+ {
+ offset: 83,
+ content: [],
+ section: 'prepare-script',
+ section_duration: '01:03'
+ }
+ ])
+ end
+ end
end
describe 'incremental updates' do
@@ -339,7 +365,7 @@ RSpec.describe Gitlab::Ci::Ansi2json do
context 'with split word' do
let(:pre_text) { "\e[1mHello " }
- let(:text) { "World" }
+ let(:text) { 'World' }
let(:lines) do
[
@@ -355,7 +381,7 @@ RSpec.describe Gitlab::Ci::Ansi2json do
context 'with split word on second line' do
let(:pre_text) { "Good\nmorning " }
- let(:text) { "World" }
+ let(:text) { 'World' }
let(:lines) do
[
@@ -514,7 +540,7 @@ RSpec.describe Gitlab::Ci::Ansi2json do
end
describe 'trucates' do
- let(:text) { "Hello World" }
+ let(:text) { 'Hello World' }
let(:stream) { StringIO.new(text) }
let(:subject) { described_class.convert(stream) }
@@ -522,11 +548,11 @@ RSpec.describe Gitlab::Ci::Ansi2json do
stream.seek(3, IO::SEEK_SET)
end
- it "returns truncated output" do
+ it 'returns truncated output' do
expect(subject.truncated).to be_truthy
end
- it "does not append output" do
+ it 'does not append output' do
expect(subject.append).to be_falsey
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb b/spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb
index 39697884e3b..3388ae0af2f 100644
--- a/spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
require_dependency 'active_model'
RSpec.describe ::Gitlab::Ci::Config::Entry::Product::Matrix do
@@ -46,33 +46,140 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Product::Matrix do
end
end
- context 'when entry config has only one variable' do
- let(:config) do
- [
- {
- 'VAR_1' => %w[test]
- }
- ]
+ context 'with one_dimensional_matrix feature flag enabled' do
+ before do
+ stub_feature_flags(one_dimensional_matrix: true)
+ matrix.compose!
end
- describe '#valid?' do
- it { is_expected.not_to be_valid }
- end
+ context 'when entry config has only one variable with multiple values' do
+ let(:config) do
+ [
+ {
+ 'VAR_1' => %w[build test]
+ }
+ ]
+ end
- describe '#errors' do
- it 'returns error about too many jobs' do
- expect(matrix.errors)
- .to include('variables config requires at least 2 items')
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns no errors' do
+ expect(matrix.errors)
+ .to be_empty
+ end
+ end
+
+ describe '#value' do
+ before do
+ matrix.compose!
+ end
+
+ it 'returns the value without raising an error' do
+ expect(matrix.value).to eq([{ 'VAR_1' => %w[build test] }])
+ end
end
+
+ context 'when entry config has only one variable with one value' do
+ let(:config) do
+ [
+ {
+ 'VAR_1' => %w[test]
+ }
+ ]
+ end
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns no errors' do
+ expect(matrix.errors)
+ .to be_empty
+ end
+ end
+
+ describe '#value' do
+ before do
+ matrix.compose!
+ end
+
+ it 'returns the value without raising an error' do
+ expect(matrix.value).to eq([{ 'VAR_1' => %w[test] }])
+ end
+ end
+ end
+ end
+ end
+
+ context 'with one_dimensional_matrix feature flag disabled' do
+ before do
+ stub_feature_flags(one_dimensional_matrix: false)
+ matrix.compose!
end
- describe '#value' do
- before do
- matrix.compose!
+ context 'when entry config has only one variable with multiple values' do
+ let(:config) do
+ [
+ {
+ 'VAR_1' => %w[build test]
+ }
+ ]
end
- it 'returns the value without raising an error' do
- expect(matrix.value).to eq([{ 'VAR_1' => ['test'] }])
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns error about too many jobs' do
+ expect(matrix.errors)
+ .to include('variables config requires at least 2 items')
+ end
+ end
+
+ describe '#value' do
+ before do
+ matrix.compose!
+ end
+
+ it 'returns the value without raising an error' do
+ expect(matrix.value).to eq([{ 'VAR_1' => %w[build test] }])
+ end
+ end
+
+ context 'when entry config has only one variable with one value' do
+ let(:config) do
+ [
+ {
+ 'VAR_1' => %w[test]
+ }
+ ]
+ end
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns no errors' do
+ expect(matrix.errors)
+ .to include('variables config requires at least 2 items')
+ end
+ end
+
+ describe '#value' do
+ before do
+ matrix.compose!
+ end
+
+ it 'returns the value without raising an error' do
+ expect(matrix.value).to eq([{ 'VAR_1' => %w[test] }])
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/product/variables_spec.rb b/spec/lib/gitlab/ci/config/entry/product/variables_spec.rb
index 230b001d620..407efb438b5 100644
--- a/spec/lib/gitlab/ci/config/entry/product/variables_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/product/variables_spec.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+# After Feature one_dimensional_matrix is removed, this can be changed back to fast_spec_helper
+require 'spec_helper'
require_dependency 'active_model'
RSpec.describe Gitlab::Ci::Config::Entry::Product::Variables do
@@ -45,43 +46,71 @@ RSpec.describe Gitlab::Ci::Config::Entry::Product::Variables do
end
end
- context 'when entry value is not correct' do
- shared_examples 'invalid variables' do |message|
- describe '#errors' do
- it 'saves errors' do
- expect(entry.errors).to include(message)
- end
+ context 'with one_dimensional_matrix feature flag enabled' do
+ context 'with only one variable' do
+ before do
+ stub_feature_flags(one_dimensional_matrix: true)
end
+ let(:config) { { VAR: 'test' } }
describe '#valid?' do
- it 'is not valid' do
- expect(entry).not_to be_valid
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ describe '#errors' do
+ it 'does not append errors' do
+ expect(entry.errors).to be_empty
end
end
end
+ end
- context 'with array' do
- let(:config) { [:VAR, 'test'] }
+ context 'with one_dimensional_matrix feature flag disabled' do
+ context 'when entry value is not correct' do
+ before do
+ stub_feature_flags(one_dimensional_matrix: false)
+ end
+ shared_examples 'invalid variables' do |message|
+ describe '#errors' do
+ it 'saves errors' do
+ expect(entry.errors).to include(message)
+ end
+ end
- it_behaves_like 'invalid variables', /should be a hash of key value pairs/
- end
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(entry).not_to be_valid
+ end
+ end
+ end
- context 'with empty array' do
- let(:config) { { VAR: 'test', VAR2: [] } }
+ context 'with array' do
+ let(:config) { [:VAR, 'test'] }
- it_behaves_like 'invalid variables', /should be a hash of key value pairs/
- end
+ it_behaves_like 'invalid variables', /should be a hash of key value pairs/
+ end
- context 'with nested array' do
- let(:config) { { VAR: 'test', VAR2: [1, [2]] } }
+ context 'with empty array' do
+ let(:config) { { VAR: 'test', VAR2: [] } }
- it_behaves_like 'invalid variables', /should be a hash of key value pairs/
- end
+ it_behaves_like 'invalid variables', /should be a hash of key value pairs/
+ end
- context 'with only one variable' do
- let(:config) { { VAR: 'test' } }
+ context 'with nested array' do
+ let(:config) { { VAR: 'test', VAR2: [1, [2]] } }
+
+ it_behaves_like 'invalid variables', /should be a hash of key value pairs/
+ end
- it_behaves_like 'invalid variables', /variables config requires at least 2 items/
+ context 'with one_dimensional_matrix feature flag disabled' do
+ context 'with only one variable' do
+ let(:config) { { VAR: 'test' } }
+
+ it_behaves_like 'invalid variables', /variables config requires at least 2 items/
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/cron_parser_spec.rb b/spec/lib/gitlab/ci/cron_parser_spec.rb
index f724825a9cc..dd27b4045c9 100644
--- a/spec/lib/gitlab/ci/cron_parser_spec.rb
+++ b/spec/lib/gitlab/ci/cron_parser_spec.rb
@@ -82,7 +82,7 @@ RSpec.describe Gitlab::Ci::CronParser do
context 'when PST (Pacific Standard Time)' do
it 'converts time in server time zone' do
- Timecop.freeze(Time.utc(2017, 1, 1)) do
+ travel_to(Time.utc(2017, 1, 1)) do
expect(subject.hour).to eq(hour_in_utc)
end
end
@@ -90,7 +90,7 @@ RSpec.describe Gitlab::Ci::CronParser do
context 'when PDT (Pacific Daylight Time)' do
it 'converts time in server time zone' do
- Timecop.freeze(Time.utc(2017, 6, 1)) do
+ travel_to(Time.utc(2017, 6, 1)) do
expect(subject.hour).to eq(hour_in_utc)
end
end
@@ -117,7 +117,7 @@ RSpec.describe Gitlab::Ci::CronParser do
context 'when CET (Central European Time)' do
it 'converts time in server time zone' do
- Timecop.freeze(Time.utc(2017, 1, 1)) do
+ travel_to(Time.utc(2017, 1, 1)) do
expect(subject.hour).to eq(hour_in_utc)
end
end
@@ -125,7 +125,7 @@ RSpec.describe Gitlab::Ci::CronParser do
context 'when CEST (Central European Summer Time)' do
it 'converts time in server time zone' do
- Timecop.freeze(Time.utc(2017, 6, 1)) do
+ travel_to(Time.utc(2017, 6, 1)) do
expect(subject.hour).to eq(hour_in_utc)
end
end
@@ -152,7 +152,7 @@ RSpec.describe Gitlab::Ci::CronParser do
context 'when EST (Eastern Standard Time)' do
it 'converts time in server time zone' do
- Timecop.freeze(Time.utc(2017, 1, 1)) do
+ travel_to(Time.utc(2017, 1, 1)) do
expect(subject.hour).to eq(hour_in_utc)
end
end
@@ -160,7 +160,7 @@ RSpec.describe Gitlab::Ci::CronParser do
context 'when EDT (Eastern Daylight Time)' do
it 'converts time in server time zone' do
- Timecop.freeze(Time.utc(2017, 6, 1)) do
+ travel_to(Time.utc(2017, 6, 1)) do
expect(subject.hour).to eq(hour_in_utc)
end
end
@@ -174,7 +174,7 @@ RSpec.describe Gitlab::Ci::CronParser do
# (e.g. America/Chicago) at the start of the test. Stubbing
# TZ doesn't appear to be enough.
it 'generates day without TZInfo::AmbiguousTime error' do
- Timecop.freeze(Time.utc(2020, 1, 1)) do
+ travel_to(Time.utc(2020, 1, 1)) do
expect(subject.year).to eq(year)
expect(subject.month).to eq(12)
expect(subject.day).to eq(1)
diff --git a/spec/lib/gitlab/ci/parsers/test/junit_spec.rb b/spec/lib/gitlab/ci/parsers/test/junit_spec.rb
index 1f497dea2bf..5a3a9b53da6 100644
--- a/spec/lib/gitlab/ci/parsers/test/junit_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/test/junit_spec.rb
@@ -4,7 +4,7 @@ require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Parsers::Test::Junit do
describe '#parse!' do
- subject { described_class.new.parse!(junit, test_suite, args) }
+ subject { described_class.new.parse!(junit, test_suite, **args) }
let(:test_suite) { Gitlab::Ci::Reports::TestSuite.new('rspec') }
let(:test_cases) { flattened_test_cases(test_suite) }
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 34df0e86a18..0b961336f3f 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
- let(:project) { create(:project, :repository) }
- let(:head_sha) { project.repository.head_commit.id }
- let(:pipeline) { create(:ci_empty_pipeline, project: project, sha: head_sha) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:head_sha) { project.repository.head_commit.id }
+ let(:pipeline) { build(:ci_empty_pipeline, project: project, sha: head_sha) }
let(:attributes) { { name: 'rspec', ref: 'master', scheduling_type: :stage } }
let(:previous_stages) { [] }
@@ -503,7 +503,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
using RSpec::Parameterized
let(:pipeline) do
- build(:ci_empty_pipeline, ref: 'deploy', tag: false, source: source)
+ build(:ci_empty_pipeline, ref: 'deploy', tag: false, source: source, project: project)
end
context 'matches' do
@@ -766,7 +766,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
context 'with a matching changes: rule' do
let(:pipeline) do
- create(:ci_pipeline, project: project).tap do |pipeline|
+ build(:ci_pipeline, project: project).tap do |pipeline|
stub_pipeline_modified_paths(pipeline, %w[app/models/ci/pipeline.rb spec/models/ci/pipeline_spec.rb .gitlab-ci.yml])
end
end
diff --git a/spec/lib/gitlab/ci/runner/backoff_spec.rb b/spec/lib/gitlab/ci/runner/backoff_spec.rb
new file mode 100644
index 00000000000..f147d69f7cd
--- /dev/null
+++ b/spec/lib/gitlab/ci/runner/backoff_spec.rb
@@ -0,0 +1,126 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+require 'active_support/testing/time_helpers'
+
+RSpec.describe Gitlab::Ci::Runner::Backoff do
+ include ActiveSupport::Testing::TimeHelpers
+
+ describe '#duration' do
+ it 'returns backoff duration from start' do
+ freeze_time do
+ described_class.new(5.minutes.ago).then do |backoff|
+ expect(backoff.duration).to eq 5.minutes
+ end
+ end
+ end
+
+ it 'returns an integer value' do
+ freeze_time do
+ described_class.new(5.seconds.ago).then do |backoff|
+ expect(backoff.duration).to be 5
+ end
+ end
+ end
+
+ it 'returns the smallest number greater than or equal to duration' do
+ freeze_time do
+ described_class.new(0.5.seconds.ago).then do |backoff|
+ expect(backoff.duration).to be 1
+ end
+ end
+ end
+ end
+
+ describe '#slot' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:started, :slot) do
+ 0 | 0
+ 0.1 | 0
+ 0.9 | 0
+ 1 | 0
+ 1.1 | 0
+ 1.9 | 0
+ 2 | 0
+ 2.9 | 0
+ 3 | 0
+ 4 | 1
+ 5 | 1
+ 6 | 1
+ 7 | 1
+ 8 | 2
+ 9 | 2
+ 9.9 | 2
+ 10 | 2
+ 15 | 2
+ 16 | 3
+ 31 | 3
+ 32 | 4
+ 63 | 4
+ 64 | 5
+ 127 | 5
+ 128 | 6
+ 250 | 6
+ 310 | 7
+ 520 | 8
+ 999 | 8
+ end
+
+ with_them do
+ it 'falls into an appropaite backoff slot' do
+ freeze_time do
+ backoff = described_class.new(started.seconds.ago)
+ expect(backoff.slot).to eq slot
+ end
+ end
+ end
+ end
+
+ describe '#to_seconds' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:started, :backoff) do
+ 0 | 1
+ 0.1 | 1
+ 0.9 | 1
+ 1 | 1
+ 1.1 | 1
+ 1.9 | 1
+ 2 | 1
+ 3 | 1
+ 4 | 2
+ 5 | 2
+ 6 | 2
+ 6.5 | 2
+ 7 | 2
+ 8 | 4
+ 9 | 4
+ 9.9 | 4
+ 10 | 4
+ 15 | 4
+ 16 | 8
+ 31 | 8
+ 32 | 16
+ 63 | 16
+ 64 | 32
+ 127 | 32
+ 128 | 64
+ 250 | 64
+ 310 | 64
+ 520 | 64
+ 999 | 64
+ end
+
+ with_them do
+ it 'calculates backoff based on an appropriate slot' do
+ freeze_time do
+ described_class.new(started.seconds.ago).then do |delay|
+ expect(delay.to_seconds).to eq backoff
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/trace/checksum_spec.rb b/spec/lib/gitlab/ci/trace/checksum_spec.rb
new file mode 100644
index 00000000000..794794c3f69
--- /dev/null
+++ b/spec/lib/gitlab/ci/trace/checksum_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Trace::Checksum do
+ let(:build) { create(:ci_build, :running) }
+
+ subject { described_class.new(build) }
+
+ context 'when build pending state exists' do
+ before do
+ create(:ci_build_pending_state, build: build, trace_checksum: 'crc32:d4777540')
+ end
+
+ context 'when matching persisted trace chunks exist' do
+ before do
+ create_chunk(index: 0, data: 'a' * 128.kilobytes)
+ create_chunk(index: 1, data: 'b' * 128.kilobytes)
+ create_chunk(index: 2, data: 'ccccccccccccccccc')
+ end
+
+ it 'calculates combined trace chunks CRC32 correctly' do
+ expect(subject.chunks_crc32).to eq 3564598592
+ expect(subject).to be_valid
+ end
+ end
+
+ context 'when trace chunks were persisted in a wrong order' do
+ before do
+ create_chunk(index: 0, data: 'b' * 128.kilobytes)
+ create_chunk(index: 1, data: 'a' * 128.kilobytes)
+ create_chunk(index: 2, data: 'ccccccccccccccccc')
+ end
+
+ it 'makes trace checksum invalid' do
+ expect(subject).not_to be_valid
+ end
+ end
+
+ context 'when one of the trace chunks is missing' do
+ before do
+ create_chunk(index: 0, data: 'a' * 128.kilobytes)
+ create_chunk(index: 2, data: 'ccccccccccccccccc')
+ end
+
+ it 'makes trace checksum invalid' do
+ expect(subject).not_to be_valid
+ end
+ end
+
+ context 'when checksums of persisted trace chunks do not match' do
+ before do
+ create_chunk(index: 0, data: 'a' * 128.kilobytes)
+ create_chunk(index: 1, data: 'X' * 128.kilobytes)
+ create_chunk(index: 2, data: 'ccccccccccccccccc')
+ end
+
+ it 'makes trace checksum invalid' do
+ expect(subject).not_to be_valid
+ end
+ end
+
+ context 'when persisted trace chunks are missing' do
+ it 'makes trace checksum invalid' do
+ expect(subject.state_crc32).to eq 3564598592
+ expect(subject).not_to be_valid
+ end
+ end
+ end
+
+ context 'when build pending state is missing' do
+ describe '#state_crc32' do
+ it 'returns nil' do
+ expect(subject.state_crc32).to be_nil
+ end
+ end
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+ end
+
+ describe '#trace_chunks' do
+ before do
+ create_chunk(index: 0, data: 'abcdefg')
+ end
+
+ it 'does not load raw_data from a database store' do
+ subject.trace_chunks.first.then do |chunk|
+ expect(chunk).to be_database
+ expect { chunk.raw_data }
+ .to raise_error ActiveModel::MissingAttributeError
+ end
+ end
+ end
+
+ describe '#last_chunk' do
+ context 'when there are no chunks' do
+ it 'returns nil' do
+ expect(subject.last_chunk).to be_nil
+ end
+ end
+
+ context 'when there are multiple chunks' do
+ before do
+ create_chunk(index: 1, data: '1234')
+ create_chunk(index: 0, data: 'abcd')
+ end
+
+ it 'returns chunk with the highest index' do
+ expect(subject.last_chunk.chunk_index).to eq 1
+ end
+ end
+ end
+
+ def create_chunk(index:, data:)
+ create(:ci_build_trace_chunk, :persisted, build: build,
+ chunk_index: index,
+ initial_data: data)
+ end
+end
diff --git a/spec/lib/gitlab/ci/trace/metrics_spec.rb b/spec/lib/gitlab/ci/trace/metrics_spec.rb
new file mode 100644
index 00000000000..6518d0ab075
--- /dev/null
+++ b/spec/lib/gitlab/ci/trace/metrics_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Trace::Metrics, :prometheus do
+ describe '#increment_trace_bytes' do
+ context 'when incrementing by more than one' do
+ it 'increments a single counter' do
+ subject.increment_trace_bytes(10)
+ subject.increment_trace_bytes(20)
+ subject.increment_trace_bytes(30)
+
+ expect(described_class.trace_bytes.get).to eq 60
+ expect(described_class.trace_bytes.values.count).to eq 1
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb
index 171877dbaee..f037e803fb4 100644
--- a/spec/lib/gitlab/ci/trace_spec.rb
+++ b/spec/lib/gitlab/ci/trace_spec.rb
@@ -111,4 +111,13 @@ RSpec.describe Gitlab::Ci::Trace, :clean_gitlab_redis_shared_state do
end
end
end
+
+ describe '#lock' do
+ it 'acquires an exclusive lease on the trace' do
+ trace.lock do
+ expect { trace.lock }
+ .to raise_error described_class::LockedError
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/yaml_processor/result_spec.rb b/spec/lib/gitlab/ci/yaml_processor/result_spec.rb
new file mode 100644
index 00000000000..7e3cd7ec254
--- /dev/null
+++ b/spec/lib/gitlab/ci/yaml_processor/result_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+module Gitlab
+ module Ci
+ class YamlProcessor
+ RSpec.describe Result do
+ include StubRequests
+
+ let(:user) { create(:user) }
+ let(:ci_config) { Gitlab::Ci::Config.new(config_content, user: user) }
+ let(:result) { described_class.new(ci_config: ci_config, warnings: ci_config&.warnings) }
+
+ describe '#merged_yaml' do
+ subject(:merged_yaml) { result.merged_yaml }
+
+ let(:config_content) do
+ YAML.dump(
+ include: { remote: 'https://example.com/sample.yml' },
+ test: { stage: 'test', script: 'echo' }
+ )
+ end
+
+ let(:included_yml) do
+ YAML.dump(
+ another_test: { stage: 'test', script: 'echo 2' }
+ )
+ end
+
+ before do
+ stub_full_request('https://example.com/sample.yml').to_return(body: included_yml)
+ end
+
+ it 'returns expanded yaml config' do
+ expanded_config = YAML.safe_load(merged_yaml, [Symbol])
+ included_config = YAML.safe_load(included_yml, [Symbol])
+
+ expect(expanded_config).to include(*included_config.keys)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb b/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb
index efdfc0a980b..6b568320953 100644
--- a/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb
+++ b/spec/lib/gitlab/cleanup/orphan_lfs_file_references_spec.rb
@@ -42,12 +42,24 @@ RSpec.describe Gitlab::Cleanup::OrphanLfsFileReferences do
expect(null_logger).to receive(:info).with("Looking for orphan LFS files for project #{project.name_with_namespace}")
expect(null_logger).to receive(:info).with("Removed invalid references: 1")
expect(ProjectCacheWorker).to receive(:perform_async).with(project.id, [], [:lfs_objects_size])
+ expect(service).to receive(:remove_orphan_references).and_call_original
expect { service.run! }.to change { project.lfs_objects.count }.from(2).to(1)
expect(LfsObjectsProject.exists?(invalid_reference.id)).to be_falsey
end
+ it 'does nothing if the project has no LFS objects' do
+ expect(null_logger).to receive(:info).with(/Looking for orphan LFS files/)
+ expect(null_logger).to receive(:info).with(/Nothing to do/)
+
+ project.lfs_objects_projects.delete_all
+
+ expect(service).not_to receive(:remove_orphan_references)
+
+ service.run!
+ end
+
context 'LFS object is in design repository' do
before do
expect(project.design_repository).to receive(:exists?).and_return(true)
diff --git a/spec/lib/gitlab/code_navigation_path_spec.rb b/spec/lib/gitlab/code_navigation_path_spec.rb
index 4dc864b158d..206541f7c0d 100644
--- a/spec/lib/gitlab/code_navigation_path_spec.rb
+++ b/spec/lib/gitlab/code_navigation_path_spec.rb
@@ -16,10 +16,6 @@ RSpec.describe Gitlab::CodeNavigationPath do
subject { described_class.new(project, commit_sha).full_json_path_for(path) }
- before do
- stub_feature_flags(code_navigation: project)
- end
-
context 'when a pipeline exist for a sha' do
it 'returns path to a file in the artifact' do
expect(subject).to eq(lsif_path)
@@ -41,15 +37,5 @@ RSpec.describe Gitlab::CodeNavigationPath do
expect(subject).to eq(lsif_path)
end
end
-
- context 'when code_navigation feature is disabled' do
- before do
- stub_feature_flags(code_navigation: false)
- end
-
- it 'returns nil' do
- expect(subject).to be_nil
- end
- end
end
end
diff --git a/spec/lib/gitlab/cycle_analytics/events_spec.rb b/spec/lib/gitlab/cycle_analytics/events_spec.rb
index e0a8e2c17a3..a31f34d82d7 100644
--- a/spec/lib/gitlab/cycle_analytics/events_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/events_spec.rb
@@ -2,16 +2,20 @@
require 'spec_helper'
-RSpec.describe 'cycle analytics events' do
- let(:project) { create(:project, :repository) }
+RSpec.describe 'cycle analytics events', :aggregate_failures do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user, :admin) }
let(:from_date) { 10.days.ago }
- let(:user) { create(:user, :admin) }
let!(:context) { create(:issue, project: project, created_at: 2.days.ago) }
let(:events) do
- CycleAnalytics::ProjectLevel.new(project, options: { from: from_date, current_user: user })[stage].events
+ CycleAnalytics::ProjectLevel
+ .new(project, options: { from: from_date, current_user: user })[stage]
+ .events
end
+ let(:event) { events.first }
+
before do
setup(context)
end
@@ -19,36 +23,15 @@ RSpec.describe 'cycle analytics events' do
describe '#issue_events' do
let(:stage) { :issue }
- it 'has the total time' do
- expect(events.first[:total_time]).not_to be_empty
- end
-
- it 'has a title' do
- expect(events.first[:title]).to eq(context.title)
- end
-
- it 'has the URL' do
- expect(events.first[:url]).not_to be_nil
- end
-
- it 'has an iid' do
- expect(events.first[:iid]).to eq(context.iid.to_s)
- end
-
- it 'has a created_at timestamp' do
- expect(events.first[:created_at]).to end_with('ago')
- end
-
- it "has the author's URL" do
- expect(events.first[:author][:web_url]).not_to be_nil
- end
-
- it "has the author's avatar URL" do
- expect(events.first[:author][:avatar_url]).not_to be_nil
- end
-
- it "has the author's name" do
- expect(events.first[:author][:name]).to eq(context.author.name)
+ it 'has correct attributes' do
+ expect(event[:total_time]).not_to be_empty
+ expect(event[:title]).to eq(context.title)
+ expect(event[:url]).not_to be_nil
+ expect(event[:iid]).to eq(context.iid.to_s)
+ expect(event[:created_at]).to end_with('ago')
+ expect(event[:author][:web_url]).not_to be_nil
+ expect(event[:author][:avatar_url]).not_to be_nil
+ expect(event[:author][:name]).to eq(context.author.name)
end
end
@@ -59,36 +42,15 @@ RSpec.describe 'cycle analytics events' do
create_commit_referencing_issue(context)
end
- it 'has the total time' do
- expect(events.first[:total_time]).not_to be_empty
- end
-
- it 'has a title' do
- expect(events.first[:title]).to eq(context.title)
- end
-
- it 'has the URL' do
- expect(events.first[:url]).not_to be_nil
- end
-
- it 'has an iid' do
- expect(events.first[:iid]).to eq(context.iid.to_s)
- end
-
- it 'has a created_at timestamp' do
- expect(events.first[:created_at]).to end_with('ago')
- end
-
- it "has the author's URL" do
- expect(events.first[:author][:web_url]).not_to be_nil
- end
-
- it "has the author's avatar URL" do
- expect(events.first[:author][:avatar_url]).not_to be_nil
- end
-
- it "has the author's name" do
- expect(events.first[:author][:name]).to eq(context.author.name)
+ it 'has correct attributes' do
+ expect(event[:total_time]).not_to be_empty
+ expect(event[:title]).to eq(context.title)
+ expect(event[:url]).not_to be_nil
+ expect(event[:iid]).to eq(context.iid.to_s)
+ expect(event[:created_at]).to end_with('ago')
+ expect(event[:author][:web_url]).not_to be_nil
+ expect(event[:author][:avatar_url]).not_to be_nil
+ expect(event[:author][:name]).to eq(context.author.name)
end
end
@@ -100,32 +62,14 @@ RSpec.describe 'cycle analytics events' do
create_commit_referencing_issue(context)
end
- it 'has the total time' do
- expect(events.first[:total_time]).not_to be_empty
- end
-
- it 'has a title' do
- expect(events.first[:title]).to eq('Awesome merge_request')
- end
-
- it 'has an iid' do
- expect(events.first[:iid]).to eq(context.iid.to_s)
- end
-
- it 'has a created_at timestamp' do
- expect(events.first[:created_at]).to end_with('ago')
- end
-
- it "has the author's URL" do
- expect(events.first[:author][:web_url]).not_to be_nil
- end
-
- it "has the author's avatar URL" do
- expect(events.first[:author][:avatar_url]).not_to be_nil
- end
-
- it "has the author's name" do
- expect(events.first[:author][:name]).to eq(MergeRequest.first.author.name)
+ it 'has correct attributes' do
+ expect(event[:total_time]).not_to be_empty
+ expect(event[:title]).to eq('Awesome merge_request')
+ expect(event[:iid]).to eq(context.iid.to_s)
+ expect(event[:created_at]).to end_with('ago')
+ expect(event[:author][:web_url]).not_to be_nil
+ expect(event[:author][:avatar_url]).not_to be_nil
+ expect(event[:author][:name]).to eq(MergeRequest.first.author.name)
end
end
@@ -152,40 +96,16 @@ RSpec.describe 'cycle analytics events' do
merge_merge_requests_closing_issue(user, project, context)
end
- it 'has the name' do
- expect(events.first[:name]).not_to be_nil
- end
-
- it 'has the ID' do
- expect(events.first[:id]).not_to be_nil
- end
-
- it 'has the URL' do
- expect(events.first[:url]).not_to be_nil
- end
-
- it 'has the branch name' do
- expect(events.first[:branch]).not_to be_nil
- end
-
- it 'has the branch URL' do
- expect(events.first[:branch][:url]).not_to be_nil
- end
-
- it 'has the short SHA' do
- expect(events.first[:short_sha]).not_to be_nil
- end
-
- it 'has the commit URL' do
- expect(events.first[:commit_url]).not_to be_nil
- end
-
- it 'has the date' do
- expect(events.first[:date]).not_to be_nil
- end
-
- it 'has the total time' do
- expect(events.first[:total_time]).not_to be_empty
+ it 'has correct attributes' do
+ expect(event[:name]).not_to be_nil
+ expect(event[:id]).not_to be_nil
+ expect(event[:url]).not_to be_nil
+ expect(event[:branch]).not_to be_nil
+ expect(event[:branch][:url]).not_to be_nil
+ expect(event[:short_sha]).not_to be_nil
+ expect(event[:commit_url]).not_to be_nil
+ expect(event[:date]).not_to be_nil
+ expect(event[:total_time]).not_to be_empty
end
end
@@ -197,40 +117,16 @@ RSpec.describe 'cycle analytics events' do
merge_merge_requests_closing_issue(user, project, context)
end
- it 'has the total time' do
- expect(events.first[:total_time]).not_to be_empty
- end
-
- it 'has a title' do
- expect(events.first[:title]).to eq('Awesome merge_request')
- end
-
- it 'has an iid' do
- expect(events.first[:iid]).to eq(context.iid.to_s)
- end
-
- it 'has the URL' do
- expect(events.first[:url]).not_to be_nil
- end
-
- it 'has a state' do
- expect(events.first[:state]).not_to be_nil
- end
-
- it 'has a created_at timestamp' do
- expect(events.first[:created_at]).not_to be_nil
- end
-
- it "has the author's URL" do
- expect(events.first[:author][:web_url]).not_to be_nil
- end
-
- it "has the author's avatar URL" do
- expect(events.first[:author][:avatar_url]).not_to be_nil
- end
-
- it "has the author's name" do
- expect(events.first[:author][:name]).to eq(MergeRequest.first.author.name)
+ it 'has correct attributes' do
+ expect(event[:total_time]).not_to be_empty
+ expect(event[:title]).to eq('Awesome merge_request')
+ expect(event[:iid]).to eq(context.iid.to_s)
+ expect(event[:url]).not_to be_nil
+ expect(event[:state]).not_to be_nil
+ expect(event[:created_at]).not_to be_nil
+ expect(event[:author][:web_url]).not_to be_nil
+ expect(event[:author][:avatar_url]).not_to be_nil
+ expect(event[:author][:name]).to eq(MergeRequest.first.author.name)
end
end
@@ -257,58 +153,25 @@ RSpec.describe 'cycle analytics events' do
deploy_master(user, project)
end
- it 'has the name' do
- expect(events.first[:name]).not_to be_nil
- end
-
- it 'has the ID' do
- expect(events.first[:id]).not_to be_nil
- end
-
- it 'has the URL' do
- expect(events.first[:url]).not_to be_nil
- end
-
- it 'has the branch name' do
- expect(events.first[:branch]).not_to be_nil
- end
-
- it 'has the branch URL' do
- expect(events.first[:branch][:url]).not_to be_nil
- end
-
- it 'has the short SHA' do
- expect(events.first[:short_sha]).not_to be_nil
- end
-
- it 'has the commit URL' do
- expect(events.first[:commit_url]).not_to be_nil
- end
-
- it 'has the date' do
- expect(events.first[:date]).not_to be_nil
- end
-
- it 'has the total time' do
- expect(events.first[:total_time]).not_to be_empty
- end
-
- it "has the author's URL" do
- expect(events.first[:author][:web_url]).not_to be_nil
- end
-
- it "has the author's avatar URL" do
- expect(events.first[:author][:avatar_url]).not_to be_nil
- end
-
- it "has the author's name" do
- expect(events.first[:author][:name]).to eq(MergeRequest.first.author.name)
+ it 'has correct attributes' do
+ expect(event[:name]).not_to be_nil
+ expect(event[:id]).not_to be_nil
+ expect(event[:url]).not_to be_nil
+ expect(event[:branch]).not_to be_nil
+ expect(event[:branch][:url]).not_to be_nil
+ expect(event[:short_sha]).not_to be_nil
+ expect(event[:commit_url]).not_to be_nil
+ expect(event[:date]).not_to be_nil
+ expect(event[:total_time]).not_to be_empty
+ expect(event[:author][:web_url]).not_to be_nil
+ expect(event[:author][:avatar_url]).not_to be_nil
+ expect(event[:author][:name]).to eq(MergeRequest.first.author.name)
end
end
def setup(context)
milestone = create(:milestone, project: project)
- context.update(milestone: milestone)
+ context.update!(milestone: milestone)
mr = create_merge_request_closing_issue(user, project, context, commit_message: "References #{context.to_reference}")
ProcessCommitWorker.new.perform(project.id, user.id, mr.commits.last.to_hash)
diff --git a/spec/lib/gitlab/danger/commit_linter_spec.rb b/spec/lib/gitlab/danger/commit_linter_spec.rb
index c31522c538d..882cede759b 100644
--- a/spec/lib/gitlab/danger/commit_linter_spec.rb
+++ b/spec/lib/gitlab/danger/commit_linter_spec.rb
@@ -323,6 +323,16 @@ RSpec.describe Gitlab::Danger::CommitLinter do
end
end
+ context 'when message includes a value that is surrounded by backticks' do
+ let(:commit_message) { "A commit message `%20`" }
+
+ it 'does not add a problem' do
+ expect(commit_linter).not_to receive(:add_problem)
+
+ commit_linter.lint
+ end
+ end
+
context 'when message includes a short reference' do
[
'A commit message to fix #1234',
@@ -336,7 +346,9 @@ RSpec.describe Gitlab::Danger::CommitLinter do
'A commit message to fix gitlab-org/gitlab#1234',
'A commit message to fix gitlab-org/gitlab!1234',
'A commit message to fix gitlab-org/gitlab&1234',
- 'A commit message to fix gitlab-org/gitlab%1234'
+ 'A commit message to fix gitlab-org/gitlab%1234',
+ 'A commit message to fix "gitlab-org/gitlab%1234"',
+ 'A commit message to fix `gitlab-org/gitlab%1234'
].each do |message|
let(:commit_message) { message }
diff --git a/spec/lib/gitlab/danger/helper_spec.rb b/spec/lib/gitlab/danger/helper_spec.rb
index c7d55c396ef..708e9a13aed 100644
--- a/spec/lib/gitlab/danger/helper_spec.rb
+++ b/spec/lib/gitlab/danger/helper_spec.rb
@@ -435,6 +435,28 @@ RSpec.describe Gitlab::Danger::Helper do
end
end
+ describe '#draft_mr?' do
+ it 'returns false when `gitlab_helper` is unavailable' do
+ expect(helper).to receive(:gitlab_helper).and_return(nil)
+
+ expect(helper).not_to be_draft_mr
+ end
+
+ it 'returns true for a draft MR' do
+ expect(fake_gitlab).to receive(:mr_json)
+ .and_return('title' => 'Draft: My MR title')
+
+ expect(helper).to be_draft_mr
+ end
+
+ it 'returns false for non draft MR' do
+ expect(fake_gitlab).to receive(:mr_json)
+ .and_return('title' => 'My MR title')
+
+ expect(helper).not_to be_draft_mr
+ end
+ end
+
describe '#cherry_pick_mr?' do
it 'returns false when `gitlab_helper` is unavailable' do
expect(helper).to receive(:gitlab_helper).and_return(nil)
diff --git a/spec/lib/gitlab/danger/roulette_spec.rb b/spec/lib/gitlab/danger/roulette_spec.rb
index b471e17e2e7..9acaa57ee10 100644
--- a/spec/lib/gitlab/danger/roulette_spec.rb
+++ b/spec/lib/gitlab/danger/roulette_spec.rb
@@ -7,7 +7,7 @@ require 'gitlab/danger/roulette'
RSpec.describe Gitlab::Danger::Roulette do
around do |example|
- Timecop.freeze(Time.utc(2020, 06, 22, 10)) { example.run }
+ travel_to(Time.utc(2020, 06, 22, 10)) { example.run }
end
let(:backend_available) { true }
@@ -67,14 +67,18 @@ RSpec.describe Gitlab::Danger::Roulette do
)
end
- let(:teammate_json) do
+ let(:teammates) do
[
backend_maintainer.to_h,
frontend_maintainer.to_h,
frontend_reviewer.to_h,
software_engineer_in_test.to_h,
engineering_productivity_reviewer.to_h
- ].to_json
+ ]
+ end
+
+ let(:teammate_json) do
+ teammates.to_json
end
subject(:roulette) { Object.new.extend(described_class) }
@@ -210,6 +214,69 @@ RSpec.describe Gitlab::Danger::Roulette do
end
end
end
+
+ describe 'reviewer suggestion probability' do
+ let(:reviewer) { teammate_with_capability('reviewer', 'reviewer backend') }
+ let(:hungry_reviewer) { teammate_with_capability('hungry_reviewer', 'reviewer backend', hungry: true) }
+ let(:traintainer) { teammate_with_capability('traintainer', 'trainee_maintainer backend') }
+ let(:hungry_traintainer) { teammate_with_capability('hungry_traintainer', 'trainee_maintainer backend', hungry: true) }
+ let(:teammates) do
+ [
+ reviewer.to_h,
+ hungry_reviewer.to_h,
+ traintainer.to_h,
+ hungry_traintainer.to_h
+ ]
+ end
+
+ let(:categories) { [:backend] }
+
+ # This test is testing probability with inherent randomness.
+ # The variance is inversely related to sample size
+ # Given large enough sample size, the variance would be smaller,
+ # but the test would take longer.
+ # Given smaller sample size, the variance would be larger,
+ # but the test would take less time.
+ let!(:sample_size) { 500 }
+ let!(:variance) { 0.1 }
+
+ before do
+ # This test needs actual randomness to simulate probabilities
+ allow(subject).to receive(:new_random).and_return(Random.new)
+ WebMock
+ .stub_request(:get, described_class::ROULETTE_DATA_URL)
+ .to_return(body: teammate_json)
+ end
+
+ it 'has 1:2:3:4 probability of picking reviewer, hungry_reviewer, traintainer, hungry_traintainer' do
+ picks = Array.new(sample_size).map do
+ spins = subject.spin(project, categories, timezone_experiment: timezone_experiment)
+ spins.first.reviewer.name
+ end
+
+ expect(probability(picks, 'reviewer')).to be_within(variance).of(0.1)
+ expect(probability(picks, 'hungry_reviewer')).to be_within(variance).of(0.2)
+ expect(probability(picks, 'traintainer')).to be_within(variance).of(0.3)
+ expect(probability(picks, 'hungry_traintainer')).to be_within(variance).of(0.4)
+ end
+
+ def probability(picks, role)
+ picks.count(role).to_f / picks.length
+ end
+
+ def teammate_with_capability(name, capability, hungry: false)
+ Gitlab::Danger::Teammate.new(
+ {
+ 'name' => name,
+ 'projects' => {
+ 'gitlab' => capability
+ },
+ 'available' => true,
+ 'hungry' => hungry
+ }
+ )
+ end
+ end
end
RSpec::Matchers.define :match_teammates do |expected|
diff --git a/spec/lib/gitlab/danger/teammate_spec.rb b/spec/lib/gitlab/danger/teammate_spec.rb
index 6fd32493d6b..5a47d74a7f3 100644
--- a/spec/lib/gitlab/danger/teammate_spec.rb
+++ b/spec/lib/gitlab/danger/teammate_spec.rb
@@ -149,7 +149,7 @@ RSpec.describe Gitlab::Danger::Teammate do
describe '#local_hour' do
around do |example|
- Timecop.freeze(Time.utc(2020, 6, 23, 10)) { example.run }
+ travel_to(Time.utc(2020, 6, 23, 10)) { example.run }
end
context 'when author is given' do
diff --git a/spec/lib/gitlab/database/background_migration_job_spec.rb b/spec/lib/gitlab/database/background_migration_job_spec.rb
index dd5bf8b512f..42695925a1c 100644
--- a/spec/lib/gitlab/database/background_migration_job_spec.rb
+++ b/spec/lib/gitlab/database/background_migration_job_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe Gitlab::Database::BackgroundMigrationJob do
let!(:job1) { create(:background_migration_job, :succeeded, created_at: initial_time, updated_at: initial_time) }
it 'does not update non-pending jobs' do
- Timecop.freeze(initial_time + 1.day) do
+ travel_to(initial_time + 1.day) do
expect { described_class.mark_all_as_succeeded('TestJob', [1, 100]) }
.to change { described_class.succeeded.count }.from(1).to(2)
end
diff --git a/spec/lib/gitlab/database/batch_count_spec.rb b/spec/lib/gitlab/database/batch_count_spec.rb
index 71d3666602f..93c499eb56d 100644
--- a/spec/lib/gitlab/database/batch_count_spec.rb
+++ b/spec/lib/gitlab/database/batch_count_spec.rb
@@ -108,6 +108,24 @@ RSpec.describe Gitlab::Database::BatchCount do
expect { described_class.batch_count(model.distinct(column)) }.to raise_error 'Use distinct count for optimized distinct counting'
end
end
+
+ context 'when a relation is grouped' do
+ let!(:one_more_issue) { create(:issue, author: user, project: model.first.project) }
+
+ before do
+ stub_const('Gitlab::Database::BatchCounter::MIN_REQUIRED_BATCH_SIZE', 1)
+ end
+
+ context 'count by default column' do
+ let(:count) do
+ described_class.batch_count(model.group(column), batch_size: 2)
+ end
+
+ it 'counts grouped records' do
+ expect(count).to eq({ user.id => 4, another_user.id => 2 })
+ end
+ end
+ end
end
describe '#batch_distinct_count' do
@@ -175,6 +193,24 @@ RSpec.describe Gitlab::Database::BatchCount do
end.to raise_error 'Use distinct count only with non id fields'
end
end
+
+ context 'when a relation is grouped' do
+ let!(:one_more_issue) { create(:issue, author: user, project: model.first.project) }
+
+ before do
+ stub_const('Gitlab::Database::BatchCounter::MIN_REQUIRED_BATCH_SIZE', 1)
+ end
+
+ context 'distinct count by non-unique column' do
+ let(:count) do
+ described_class.batch_distinct_count(model.group(column), :project_id, batch_size: 2)
+ end
+
+ it 'counts grouped records' do
+ expect(count).to eq({ user.id => 3, another_user.id => 2 })
+ end
+ end
+ end
end
describe '#batch_sum' do
diff --git a/spec/lib/gitlab/database/concurrent_reindex_spec.rb b/spec/lib/gitlab/database/concurrent_reindex_spec.rb
deleted file mode 100644
index 4e2c3f547d4..00000000000
--- a/spec/lib/gitlab/database/concurrent_reindex_spec.rb
+++ /dev/null
@@ -1,207 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::ConcurrentReindex, '#execute' do
- subject { described_class.new(index_name, logger: logger) }
-
- let(:table_name) { '_test_reindex_table' }
- let(:column_name) { '_test_column' }
- let(:index_name) { '_test_reindex_index' }
- let(:logger) { double('logger', debug: nil, info: nil, error: nil ) }
- let(:connection) { ActiveRecord::Base.connection }
-
- before do
- connection.execute(<<~SQL)
- CREATE TABLE #{table_name} (
- id serial NOT NULL PRIMARY KEY,
- #{column_name} integer NOT NULL);
-
- CREATE INDEX #{index_name} ON #{table_name} (#{column_name});
- SQL
- end
-
- context 'when the index does not exist' do
- before do
- connection.execute(<<~SQL)
- DROP INDEX #{index_name}
- SQL
- end
-
- it 'raises an error' do
- expect { subject.execute }.to raise_error(described_class::ReindexError, /does not exist/)
- end
- end
-
- context 'when the index is unique' do
- before do
- connection.execute(<<~SQL)
- DROP INDEX #{index_name};
- CREATE UNIQUE INDEX #{index_name} ON #{table_name} (#{column_name})
- SQL
- end
-
- it 'raises an error' do
- expect do
- subject.execute
- end.to raise_error(described_class::ReindexError, /UNIQUE indexes are currently not supported/)
- end
- end
-
- context 'replacing the original index with a rebuilt copy' do
- let(:replacement_name) { 'tmp_reindex__test_reindex_index' }
- let(:replaced_name) { 'old_reindex__test_reindex_index' }
-
- let(:create_index) { "CREATE INDEX CONCURRENTLY #{replacement_name} ON public.#{table_name} USING btree (#{column_name})" }
- let(:drop_index) { "DROP INDEX CONCURRENTLY IF EXISTS #{replacement_name}" }
-
- let!(:original_index) { find_index_create_statement }
-
- before do
- allow(subject).to receive(:connection).and_return(connection)
- allow(subject).to receive(:disable_statement_timeout).and_yield
- end
-
- it 'replaces the existing index with an identical index' do
- expect(subject).to receive(:disable_statement_timeout).exactly(3).times.and_yield
-
- expect_to_execute_concurrently_in_order(drop_index)
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
- end
-
- expect_to_execute_in_order("ALTER INDEX #{index_name} RENAME TO #{replaced_name}")
- expect_to_execute_in_order("ALTER INDEX #{replacement_name} RENAME TO #{index_name}")
- expect_to_execute_in_order("ALTER INDEX #{replaced_name} RENAME TO #{replacement_name}")
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- subject.execute
-
- check_index_exists
- end
-
- context 'when a dangling index is left from a previous run' do
- before do
- connection.execute("CREATE INDEX #{replacement_name} ON #{table_name} (#{column_name})")
- end
-
- it 'replaces the existing index with an identical index' do
- expect(subject).to receive(:disable_statement_timeout).exactly(3).times.and_yield
-
- expect_to_execute_concurrently_in_order(drop_index)
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
- end
-
- expect_to_execute_in_order("ALTER INDEX #{index_name} RENAME TO #{replaced_name}")
- expect_to_execute_in_order("ALTER INDEX #{replacement_name} RENAME TO #{index_name}")
- expect_to_execute_in_order("ALTER INDEX #{replaced_name} RENAME TO #{replacement_name}")
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- subject.execute
-
- check_index_exists
- end
- end
-
- context 'when it fails to create the replacement index' do
- it 'safely cleans up and signals the error' do
- expect_to_execute_concurrently_in_order(drop_index)
-
- expect(connection).to receive(:execute).with(create_index).ordered
- .and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- expect { subject.execute }.to raise_error(described_class::ReindexError, /connect timeout/)
-
- check_index_exists
- end
- end
-
- context 'when the replacement index is not valid' do
- it 'safely cleans up and signals the error' do
- expect_to_execute_concurrently_in_order(drop_index)
- expect_to_execute_concurrently_in_order(create_index)
-
- expect(subject).to receive(:replacement_index_valid?).and_return(false)
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- expect { subject.execute }.to raise_error(described_class::ReindexError, /replacement index was created as INVALID/)
-
- check_index_exists
- end
- end
-
- context 'when a database error occurs while swapping the indexes' do
- it 'safely cleans up and signals the error' do
- expect_to_execute_concurrently_in_order(drop_index)
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
- end
-
- expect(connection).to receive(:execute).ordered
- .with("ALTER INDEX #{index_name} RENAME TO #{replaced_name}")
- .and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- expect { subject.execute }.to raise_error(described_class::ReindexError, /connect timeout/)
-
- check_index_exists
- end
- end
-
- context 'when with_lock_retries fails to acquire the lock' do
- it 'safely cleans up and signals the error' do
- expect_to_execute_concurrently_in_order(drop_index)
- expect_to_execute_concurrently_in_order(create_index)
-
- expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
- expect(instance).to receive(:run).with(raise_on_exhaustion: true)
- .and_raise(::Gitlab::Database::WithLockRetries::AttemptsExhaustedError, 'exhausted')
- end
-
- expect_to_execute_concurrently_in_order(drop_index)
-
- expect { subject.execute }.to raise_error(described_class::ReindexError, /exhausted/)
-
- check_index_exists
- end
- end
- end
-
- def expect_to_execute_concurrently_in_order(sql)
- # Indexes cannot be created CONCURRENTLY in a transaction. Since the tests are wrapped in transactions,
- # verify the original call but pass through the non-concurrent form.
- expect(connection).to receive(:execute).with(sql).ordered.and_wrap_original do |method, sql|
- method.call(sql.sub(/CONCURRENTLY/, ''))
- end
- end
-
- def expect_to_execute_in_order(sql)
- expect(connection).to receive(:execute).with(sql).ordered.and_call_original
- end
-
- def find_index_create_statement
- ActiveRecord::Base.connection.select_value(<<~SQL)
- SELECT indexdef
- FROM pg_indexes
- WHERE schemaname = 'public'
- AND indexname = #{ActiveRecord::Base.connection.quote(index_name)}
- SQL
- end
-
- def check_index_exists
- expect(find_index_create_statement).to eq(original_index)
- end
-end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 0bdcca630aa..727ad243349 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -1128,7 +1128,65 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
name: 'index_on_issues_gl_project_id',
length: [],
order: [],
- opclasses: { 'gl_project_id' => 'bar' })
+ opclass: { 'gl_project_id' => 'bar' })
+
+ model.copy_indexes(:issues, :project_id, :gl_project_id)
+ end
+ end
+
+ context 'using an index with multiple columns and custom operator classes' do
+ it 'copies the index' do
+ index = double(:index,
+ columns: %w(project_id foobar),
+ name: 'index_on_issues_project_id_foobar',
+ using: :gin,
+ where: nil,
+ opclasses: { 'project_id' => 'bar', 'foobar' => :gin_trgm_ops },
+ unique: false,
+ lengths: [],
+ orders: [])
+
+ allow(model).to receive(:indexes_for).with(:issues, 'project_id')
+ .and_return([index])
+
+ expect(model).to receive(:add_concurrent_index)
+ .with(:issues,
+ %w(gl_project_id foobar),
+ unique: false,
+ name: 'index_on_issues_gl_project_id_foobar',
+ length: [],
+ order: [],
+ opclass: { 'gl_project_id' => 'bar', 'foobar' => :gin_trgm_ops },
+ using: :gin)
+
+ model.copy_indexes(:issues, :project_id, :gl_project_id)
+ end
+ end
+
+ context 'using an index with multiple columns and a custom operator class on the non affected column' do
+ it 'copies the index' do
+ index = double(:index,
+ columns: %w(project_id foobar),
+ name: 'index_on_issues_project_id_foobar',
+ using: :gin,
+ where: nil,
+ opclasses: { 'foobar' => :gin_trgm_ops },
+ unique: false,
+ lengths: [],
+ orders: [])
+
+ allow(model).to receive(:indexes_for).with(:issues, 'project_id')
+ .and_return([index])
+
+ expect(model).to receive(:add_concurrent_index)
+ .with(:issues,
+ %w(gl_project_id foobar),
+ unique: false,
+ name: 'index_on_issues_gl_project_id_foobar',
+ length: [],
+ order: [],
+ opclass: { 'foobar' => :gin_trgm_ops },
+ using: :gin)
model.copy_indexes(:issues, :project_id, :gl_project_id)
end
@@ -1400,15 +1458,32 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
)
end
- after do
- 'DROP INDEX IF EXISTS test_index;'
- end
-
it 'returns true if an index exists' do
expect(model.index_exists_by_name?(:projects, 'test_index'))
.to be_truthy
end
end
+
+ context 'when an index exists for a table with the same name in another schema' do
+ before do
+ ActiveRecord::Base.connection.execute(
+ 'CREATE SCHEMA new_test_schema'
+ )
+
+ ActiveRecord::Base.connection.execute(
+ 'CREATE TABLE new_test_schema.projects (id integer, name character varying)'
+ )
+
+ ActiveRecord::Base.connection.execute(
+ 'CREATE INDEX test_index_on_name ON new_test_schema.projects (LOWER(name));'
+ )
+ end
+
+ it 'returns false if the index does not exist in the current schema' do
+ expect(model.index_exists_by_name?(:projects, 'test_index_on_name'))
+ .to be_falsy
+ end
+ end
end
describe '#create_or_update_plan_limit' do
@@ -1863,11 +1938,17 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
ActiveRecord::Base.connection.execute(
'ALTER TABLE projects ADD CONSTRAINT check_1 CHECK (char_length(path) <= 5) NOT VALID'
)
- end
- after do
ActiveRecord::Base.connection.execute(
- 'ALTER TABLE projects DROP CONSTRAINT IF EXISTS check_1'
+ 'CREATE SCHEMA new_test_schema'
+ )
+
+ ActiveRecord::Base.connection.execute(
+ 'CREATE TABLE new_test_schema.projects (id integer, name character varying)'
+ )
+
+ ActiveRecord::Base.connection.execute(
+ 'ALTER TABLE new_test_schema.projects ADD CONSTRAINT check_2 CHECK (char_length(name) <= 5)'
)
end
@@ -1885,6 +1966,11 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model.check_constraint_exists?(:users, 'check_1'))
.to be_falsy
end
+
+ it 'returns false if a constraint with the same name exists for the same table in another schema' do
+ expect(model.check_constraint_exists?(:projects, 'check_2'))
+ .to be_falsy
+ end
end
describe '#add_check_constraint' do
diff --git a/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb b/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
index 034bf966db7..8a35d8149ad 100644
--- a/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
+++ b/spec/lib/gitlab/database/obsolete_ignored_columns_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe Gitlab::Database::ObsoleteIgnoredColumns do
describe '#execute' do
it 'returns a list of class names and columns pairs' do
- Timecop.freeze(REMOVE_DATE) do
+ travel_to(REMOVE_DATE) do
expect(subject.execute).to eq([
['Testing::A', {
'unused' => IgnorableColumns::ColumnIgnore.new(Date.parse('2019-01-01'), '12.0'),
diff --git a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
index 334cac653cf..885eef5723e 100644
--- a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do
let(:partitioning_key) { :created_at }
around do |example|
- Timecop.freeze(Date.parse('2020-08-22')) { example.run }
+ travel_to(Date.parse('2020-08-22')) { example.run }
end
context 'with existing partitions' do
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb
index ec3d0a6dbcb..c43b51e10a0 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table_spec.rb
@@ -116,23 +116,6 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::BackfillPartition
expect(jobs_updated).to eq(1)
end
- context 'when the feature flag is disabled' do
- let(:mock_connection) { double('connection') }
-
- before do
- allow(subject).to receive(:connection).and_return(mock_connection)
- stub_feature_flags(backfill_partitioned_audit_events: false)
- end
-
- it 'exits without attempting to copy data' do
- expect(mock_connection).not_to receive(:execute)
-
- subject.perform(1, 100, source_table, destination_table, unique_key)
-
- expect(destination_model.count).to eq(0)
- end
- end
-
context 'when the job is run within an explicit transaction block' do
let(:mock_connection) { double('connection') }
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
index 44ef0b307fe..147637cf471 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
@@ -213,7 +213,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
it 'creates partitions including the next month from today' do
today = Date.new(2020, 5, 8)
- Timecop.freeze(today) do
+ travel_to(today) do
migration.partition_table_by_date source_table, partition_column, min_date: min_date
expect_range_partitions_for(partitioned_table, {
@@ -233,7 +233,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
context 'without min_date, max_date' do
it 'creates partitions for the current and next month' do
current_date = Date.new(2020, 05, 22)
- Timecop.freeze(current_date.to_time) do
+ travel_to(current_date.to_time) do
migration.partition_table_by_date source_table, partition_column
expect_range_partitions_for(partitioned_table, {
@@ -514,6 +514,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
allow(migration).to receive(:table_exists?).with(partitioned_table).and_return(true)
allow(migration).to receive(:copy_missed_records)
allow(migration).to receive(:execute).with(/VACUUM/)
+ allow(migration).to receive(:execute).with(/^(RE)?SET/)
end
it 'finishes remaining jobs for the correct table' do
@@ -567,6 +568,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
allow(Gitlab::BackgroundMigration).to receive(:steal)
allow(migration).to receive(:execute).with(/VACUUM/)
+ allow(migration).to receive(:execute).with(/^(RE)?SET/)
end
it 'idempotently cleans up after failed background migrations' do
diff --git a/spec/lib/gitlab/database/postgres_index_spec.rb b/spec/lib/gitlab/database/postgres_index_spec.rb
new file mode 100644
index 00000000000..1da67a5a6c0
--- /dev/null
+++ b/spec/lib/gitlab/database/postgres_index_spec.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::PostgresIndex do
+ before do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ CREATE INDEX foo_idx ON public.users (name);
+ CREATE UNIQUE INDEX bar_key ON public.users (id);
+
+ CREATE TABLE example_table (id serial primary key);
+ SQL
+ end
+
+ def find(name)
+ described_class.by_identifier(name)
+ end
+
+ describe '.by_identifier' do
+ it 'finds the index' do
+ expect(find('public.foo_idx')).to be_a(Gitlab::Database::PostgresIndex)
+ end
+
+ it 'raises an error if not found' do
+ expect { find('public.idontexist') }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ it 'raises ArgumentError if given a non-fully qualified index name' do
+ expect { find('foo') }.to raise_error(ArgumentError, /not fully qualified/)
+ end
+ end
+
+ describe '.regular' do
+ it 'only non-unique indexes' do
+ expect(described_class.regular).to all(have_attributes(unique: false))
+ end
+
+ it 'only non partitioned indexes ' do
+ expect(described_class.regular).to all(have_attributes(partitioned: false))
+ end
+
+ it 'only indexes that dont serve an exclusion constraint' do
+ expect(described_class.regular).to all(have_attributes(exclusion: false))
+ end
+ end
+
+ describe '.not_match' do
+ it 'excludes indexes matching the given regex' do
+ expect(described_class.not_match('^bar_k').map(&:name)).to all(match(/^(?!bar_k).*/))
+ end
+
+ it 'matches indexes without this prefix regex' do
+ expect(described_class.not_match('^bar_k')).not_to be_empty
+ end
+ end
+
+ describe '.random_few' do
+ it 'limits to two records by default' do
+ expect(described_class.random_few(2).size).to eq(2)
+ end
+ end
+
+ describe '#unique?' do
+ it 'returns true for a unique index' do
+ expect(find('public.bar_key')).to be_unique
+ end
+
+ it 'returns false for a regular, non-unique index' do
+ expect(find('public.foo_idx')).not_to be_unique
+ end
+
+ it 'returns true for a primary key index' do
+ expect(find('public.example_table_pkey')).to be_unique
+ end
+ end
+
+ describe '#valid_index?' do
+ it 'returns true if the index is invalid' do
+ expect(find('public.foo_idx')).to be_valid_index
+ end
+
+ it 'returns false if the index is marked as invalid' do
+ ActiveRecord::Base.connection.execute(<<~SQL)
+ UPDATE pg_index SET indisvalid=false
+ FROM pg_class
+ WHERE pg_class.relname = 'foo_idx' AND pg_index.indexrelid = pg_class.oid
+ SQL
+
+ expect(find('public.foo_idx')).not_to be_valid_index
+ end
+ end
+
+ describe '#to_s' do
+ it 'returns the index name' do
+ expect(find('public.foo_idx').to_s).to eq('foo_idx')
+ end
+ end
+
+ describe '#name' do
+ it 'returns the name' do
+ expect(find('public.foo_idx').name).to eq('foo_idx')
+ end
+ end
+
+ describe '#schema' do
+ it 'returns the index schema' do
+ expect(find('public.foo_idx').schema).to eq('public')
+ end
+ end
+
+ describe '#definition' do
+ it 'returns the index definition' do
+ expect(find('public.foo_idx').definition).to eq('CREATE INDEX foo_idx ON public.users USING btree (name)')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb b/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb
new file mode 100644
index 00000000000..a80bf8176d2
--- /dev/null
+++ b/spec/lib/gitlab/database/reindexing/concurrent_reindex_spec.rb
@@ -0,0 +1,255 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Reindexing::ConcurrentReindex, '#perform' do
+ subject { described_class.new(index, logger: logger) }
+
+ let(:table_name) { '_test_reindex_table' }
+ let(:column_name) { '_test_column' }
+ let(:index_name) { '_test_reindex_index' }
+ let(:index) { instance_double(Gitlab::Database::PostgresIndex, indexrelid: 42, name: index_name, schema: 'public', partitioned?: false, unique?: false, exclusion?: false, definition: 'CREATE INDEX _test_reindex_index ON public._test_reindex_table USING btree (_test_column)') }
+ let(:logger) { double('logger', debug: nil, info: nil, error: nil ) }
+ let(:connection) { ActiveRecord::Base.connection }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{table_name} (
+ id serial NOT NULL PRIMARY KEY,
+ #{column_name} integer NOT NULL);
+
+ CREATE INDEX #{index.name} ON #{table_name} (#{column_name});
+ SQL
+ end
+
+ context 'when the index is unique' do
+ before do
+ allow(index).to receive(:unique?).and_return(true)
+ end
+
+ it 'raises an error' do
+ expect do
+ subject.perform
+ end.to raise_error(described_class::ReindexError, /UNIQUE indexes are currently not supported/)
+ end
+ end
+
+ context 'when the index is partitioned' do
+ before do
+ allow(index).to receive(:partitioned?).and_return(true)
+ end
+
+ it 'raises an error' do
+ expect do
+ subject.perform
+ end.to raise_error(described_class::ReindexError, /partitioned indexes are currently not supported/)
+ end
+ end
+
+ context 'when the index serves an exclusion constraint' do
+ before do
+ allow(index).to receive(:exclusion?).and_return(true)
+ end
+
+ it 'raises an error' do
+ expect do
+ subject.perform
+ end.to raise_error(described_class::ReindexError, /indexes serving an exclusion constraint are currently not supported/)
+ end
+ end
+
+ context 'when the index is a lingering temporary index from a previous reindexing run' do
+ context 'with the temporary index prefix' do
+ let(:index_name) { 'tmp_reindex_something' }
+
+ it 'raises an error' do
+ expect do
+ subject.perform
+ end.to raise_error(described_class::ReindexError, /left-over temporary index/)
+ end
+ end
+
+ context 'with the replaced index prefix' do
+ let(:index_name) { 'old_reindex_something' }
+
+ it 'raises an error' do
+ expect do
+ subject.perform
+ end.to raise_error(described_class::ReindexError, /left-over temporary index/)
+ end
+ end
+ end
+
+ context 'replacing the original index with a rebuilt copy' do
+ let(:replacement_name) { 'tmp_reindex_42' }
+ let(:replaced_name) { 'old_reindex_42' }
+
+ let(:create_index) { "CREATE INDEX CONCURRENTLY #{replacement_name} ON public.#{table_name} USING btree (#{column_name})" }
+ let(:drop_index) do
+ <<~SQL
+ DROP INDEX CONCURRENTLY
+ IF EXISTS "public"."#{replacement_name}"
+ SQL
+ end
+
+ let!(:original_index) { find_index_create_statement }
+
+ it 'integration test: executing full index replacement without mocks' do
+ allow(connection).to receive(:execute).and_wrap_original do |method, sql|
+ method.call(sql.sub(/CONCURRENTLY/, ''))
+ end
+
+ subject.perform
+
+ check_index_exists
+ end
+
+ context 'mocked specs' do
+ before do
+ allow(subject).to receive(:connection).and_return(connection)
+ allow(subject).to receive(:disable_statement_timeout).and_yield
+ end
+
+ it 'replaces the existing index with an identical index' do
+ expect(subject).to receive(:disable_statement_timeout).twice.and_yield
+
+ expect_to_execute_concurrently_in_order(create_index)
+
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
+ end
+
+ expect_index_rename(index.name, replaced_name)
+ expect_index_rename(replacement_name, index.name)
+ expect_index_rename(replaced_name, replacement_name)
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ subject.perform
+
+ check_index_exists
+ end
+
+ context 'when a dangling index is left from a previous run' do
+ before do
+ connection.execute("CREATE INDEX #{replacement_name} ON #{table_name} (#{column_name})")
+ end
+
+ it 'replaces the existing index with an identical index' do
+ expect(subject).to receive(:disable_statement_timeout).exactly(3).times.and_yield
+
+ expect_to_execute_concurrently_in_order(drop_index)
+ expect_to_execute_concurrently_in_order(create_index)
+
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
+ end
+
+ expect_index_rename(index.name, replaced_name)
+ expect_index_rename(replacement_name, index.name)
+ expect_index_rename(replaced_name, replacement_name)
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ subject.perform
+
+ check_index_exists
+ end
+ end
+
+ context 'when it fails to create the replacement index' do
+ it 'safely cleans up and signals the error' do
+ expect(connection).to receive(:execute).with(create_index).ordered
+ .and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ expect { subject.perform }.to raise_error(ActiveRecord::ConnectionTimeoutError, /connect timeout/)
+
+ check_index_exists
+ end
+ end
+
+ context 'when the replacement index is not valid' do
+ it 'safely cleans up and signals the error' do
+ replacement_index = double('replacement index', valid_index?: false)
+ allow(Gitlab::Database::PostgresIndex).to receive(:find_by).with(schema: 'public', name: replacement_name).and_return(nil, replacement_index)
+
+ expect_to_execute_concurrently_in_order(create_index)
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ expect { subject.perform }.to raise_error(described_class::ReindexError, /replacement index was created as INVALID/)
+
+ check_index_exists
+ end
+ end
+
+ context 'when a database error occurs while swapping the indexes' do
+ it 'safely cleans up and signals the error' do
+ replacement_index = double('replacement index', valid_index?: true)
+ allow(Gitlab::Database::PostgresIndex).to receive(:find_by).with(schema: 'public', name: replacement_name).and_return(nil, replacement_index)
+
+ expect_to_execute_concurrently_in_order(create_index)
+
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: true).and_yield
+ end
+
+ expect_index_rename(index.name, replaced_name).and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ expect { subject.perform }.to raise_error(ActiveRecord::ConnectionTimeoutError, /connect timeout/)
+
+ check_index_exists
+ end
+ end
+
+ context 'when with_lock_retries fails to acquire the lock' do
+ it 'safely cleans up and signals the error' do
+ expect_to_execute_concurrently_in_order(create_index)
+
+ expect_next_instance_of(::Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).with(raise_on_exhaustion: true)
+ .and_raise(::Gitlab::Database::WithLockRetries::AttemptsExhaustedError, 'exhausted')
+ end
+
+ expect_to_execute_concurrently_in_order(drop_index)
+
+ expect { subject.perform }.to raise_error(::Gitlab::Database::WithLockRetries::AttemptsExhaustedError, /exhausted/)
+
+ check_index_exists
+ end
+ end
+ end
+ end
+
+ def expect_to_execute_concurrently_in_order(sql)
+ # Indexes cannot be created CONCURRENTLY in a transaction. Since the tests are wrapped in transactions,
+ # verify the original call but pass through the non-concurrent form.
+ expect(connection).to receive(:execute).with(sql).ordered.and_wrap_original do |method, sql|
+ method.call(sql.sub(/CONCURRENTLY/, ''))
+ end
+ end
+
+ def expect_index_rename(from, to)
+ expect(connection).to receive(:execute).with(<<~SQL).ordered
+ ALTER INDEX "public"."#{from}"
+ RENAME TO "#{to}"
+ SQL
+ end
+
+ def find_index_create_statement
+ ActiveRecord::Base.connection.select_value(<<~SQL)
+ SELECT indexdef
+ FROM pg_indexes
+ WHERE schemaname = 'public'
+ AND indexname = #{ActiveRecord::Base.connection.quote(index.name)}
+ SQL
+ end
+
+ def check_index_exists
+ expect(find_index_create_statement).to eq(original_index)
+ end
+end
diff --git a/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb b/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb
new file mode 100644
index 00000000000..efb5b8463a1
--- /dev/null
+++ b/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Reindexing::ReindexAction, '.keep_track_of' do
+ let(:index) { double('index', identifier: 'public.something', ondisk_size_bytes: 10240, reload: nil) }
+ let(:size_after) { 512 }
+
+ it 'yields to the caller' do
+ expect { |b| described_class.keep_track_of(index, &b) }.to yield_control
+ end
+
+ def find_record
+ described_class.find_by(index_identifier: index.identifier)
+ end
+
+ it 'creates the record with a start time and updates its end time' do
+ freeze_time do
+ described_class.keep_track_of(index) do
+ expect(find_record.action_start).to be_within(1.second).of(Time.zone.now)
+
+ travel(10.seconds)
+ end
+
+ duration = find_record.action_end - find_record.action_start
+
+ expect(duration).to be_within(1.second).of(10.seconds)
+ end
+ end
+
+ it 'creates the record with its status set to :started and updates its state to :finished' do
+ described_class.keep_track_of(index) do
+ expect(find_record).to be_started
+ end
+
+ expect(find_record).to be_finished
+ end
+
+ it 'creates the record with the indexes start size and updates its end size' do
+ described_class.keep_track_of(index) do
+ expect(find_record.ondisk_size_bytes_start).to eq(index.ondisk_size_bytes)
+
+ expect(index).to receive(:reload).once
+ allow(index).to receive(:ondisk_size_bytes).and_return(size_after)
+ end
+
+ expect(find_record.ondisk_size_bytes_end).to eq(size_after)
+ end
+
+ context 'in case of errors' do
+ it 'sets the state to failed' do
+ expect do
+ described_class.keep_track_of(index) do
+ raise 'something went wrong'
+ end
+ end.to raise_error(/something went wrong/)
+
+ expect(find_record).to be_failed
+ end
+
+ it 'records the end time' do
+ freeze_time do
+ expect do
+ described_class.keep_track_of(index) do
+ raise 'something went wrong'
+ end
+ end.to raise_error(/something went wrong/)
+
+ expect(find_record.action_end).to be_within(1.second).of(Time.zone.now)
+ end
+ end
+
+ it 'records the resulting index size' do
+ expect(index).to receive(:reload).once
+ allow(index).to receive(:ondisk_size_bytes).and_return(size_after)
+
+ expect do
+ described_class.keep_track_of(index) do
+ raise 'something went wrong'
+ end
+ end.to raise_error(/something went wrong/)
+
+ expect(find_record.ondisk_size_bytes_end).to eq(size_after)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/reindexing_spec.rb b/spec/lib/gitlab/database/reindexing_spec.rb
new file mode 100644
index 00000000000..26954a9a32f
--- /dev/null
+++ b/spec/lib/gitlab/database/reindexing_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::Reindexing do
+ describe '.perform' do
+ before do
+ allow(Gitlab::Database::Reindexing::ReindexAction).to receive(:keep_track_of).and_yield
+ end
+
+ shared_examples_for 'reindexing' do
+ before do
+ indexes.zip(reindexers).each do |index, reindexer|
+ allow(Gitlab::Database::Reindexing::ConcurrentReindex).to receive(:new).with(index).and_return(reindexer)
+ allow(reindexer).to receive(:perform)
+ end
+ end
+
+ it 'performs concurrent reindexing for each index' do
+ indexes.zip(reindexers).each do |index, reindexer|
+ expect(Gitlab::Database::Reindexing::ConcurrentReindex).to receive(:new).with(index).ordered.and_return(reindexer)
+ expect(reindexer).to receive(:perform)
+ end
+
+ subject
+ end
+
+ it 'keeps track of actions and creates ReindexAction records' do
+ indexes.each do |index|
+ expect(Gitlab::Database::Reindexing::ReindexAction).to receive(:keep_track_of).with(index).and_yield
+ end
+
+ subject
+ end
+ end
+
+ context 'with multiple indexes' do
+ subject { described_class.perform(indexes) }
+
+ let(:indexes) { [instance_double('Gitlab::Database::PostgresIndex'), instance_double('Gitlab::Database::PostgresIndex')] }
+ let(:reindexers) { [instance_double('Gitlab::Database::Reindexing::ConcurrentReindex'), instance_double('Gitlab::Database::Reindexing::ConcurrentReindex')] }
+
+ it_behaves_like 'reindexing'
+ end
+
+ context 'single index' do
+ subject { described_class.perform(indexes.first) }
+
+ let(:indexes) { [instance_double('Gitlab::Database::PostgresIndex')] }
+ let(:reindexers) { [instance_double('Gitlab::Database::Reindexing::ConcurrentReindex')] }
+
+ it_behaves_like 'reindexing'
+ end
+ end
+
+ describe '.candidate_indexes' do
+ subject { described_class.candidate_indexes }
+
+ it 'retrieves regular indexes that are no left-overs from previous runs' do
+ result = double
+ expect(Gitlab::Database::PostgresIndex).to receive_message_chain('regular.not_match.not_match').with(no_args).with('^tmp_reindex_').with('^old_reindex_').and_return(result)
+
+ expect(subject).to eq(result)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/similarity_score_spec.rb b/spec/lib/gitlab/database/similarity_score_spec.rb
index e36a4f610e1..cf75e5a72d9 100644
--- a/spec/lib/gitlab/database/similarity_score_spec.rb
+++ b/spec/lib/gitlab/database/similarity_score_spec.rb
@@ -90,4 +90,15 @@ RSpec.describe Gitlab::Database::SimilarityScore do
expect(subject).to eq(%w[different same gitlab-danger])
end
end
+
+ describe 'annotation' do
+ it 'annotates the generated SQL expression' do
+ expression = Gitlab::Database::SimilarityScore.build_expression(search: 'test', rules: [
+ { column: Arel.sql('path'), multiplier: 1 },
+ { column: Arel.sql('name'), multiplier: 0.8 }
+ ])
+
+ expect(Gitlab::Database::SimilarityScore).to be_order_by_similarity(expression)
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb
index 2cc6e175500..220ae705e71 100644
--- a/spec/lib/gitlab/database/with_lock_retries_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb
@@ -104,9 +104,69 @@ RSpec.describe Gitlab::Database::WithLockRetries do
end
context 'after 3 iterations' do
- let(:retry_count) { 4 }
+ it_behaves_like 'retriable exclusive lock on `projects`' do
+ let(:retry_count) { 4 }
+ end
+
+ context 'setting the idle transaction timeout' do
+ context 'when there is no outer transaction: disable_ddl_transaction! is set in the migration' do
+ it 'does not disable the idle transaction timeout' do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ allow(subject).to receive(:run_block_with_transaction).once.and_raise(ActiveRecord::LockWaitTimeout)
+ allow(subject).to receive(:run_block_with_transaction).once
+
+ expect(subject).not_to receive(:disable_idle_in_transaction_timeout)
+
+ subject.run {}
+ end
+ end
- it_behaves_like 'retriable exclusive lock on `projects`'
+ context 'when there is outer transaction: disable_ddl_transaction! is not set in the migration' do
+ it 'disables the idle transaction timeout so the code can sleep and retry' do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(true)
+
+ n = 0
+ allow(subject).to receive(:run_block_with_transaction).twice do
+ n += 1
+ raise(ActiveRecord::LockWaitTimeout) if n == 1
+ end
+
+ expect(subject).to receive(:disable_idle_in_transaction_timeout).once
+
+ subject.run {}
+ end
+ end
+ end
+ end
+
+ context 'after the retries are exhausted' do
+ let(:timing_configuration) do
+ [
+ [1.second, 1.second]
+ ]
+ end
+
+ context 'when there is no outer transaction: disable_ddl_transaction! is set in the migration' do
+ it 'does not disable the lock_timeout' do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
+ allow(subject).to receive(:run_block_with_transaction).once.and_raise(ActiveRecord::LockWaitTimeout)
+
+ expect(subject).not_to receive(:disable_lock_timeout)
+
+ subject.run {}
+ end
+ end
+
+ context 'when there is outer transaction: disable_ddl_transaction! is not set in the migration' do
+ it 'disables the lock_timeout' do
+ allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(true)
+ allow(subject).to receive(:run_block_with_transaction).once.and_raise(ActiveRecord::LockWaitTimeout)
+
+ expect(subject).to receive(:disable_lock_timeout)
+
+ subject.run {}
+ end
+ end
end
context 'after the retries, without setting lock_timeout' do
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index 420aa0a8df6..dd3daf1b2c5 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -70,25 +70,6 @@ RSpec.describe Gitlab::Database do
end
end
- describe '.postgresql_9_or_less?' do
- it 'returns true when using postgresql 8.4' do
- allow(described_class).to receive(:version).and_return('8.4')
- expect(described_class.postgresql_9_or_less?).to eq(true)
- end
-
- it 'returns true when using PostgreSQL 9.6' do
- allow(described_class).to receive(:version).and_return('9.6')
-
- expect(described_class.postgresql_9_or_less?).to eq(true)
- end
-
- it 'returns false when using PostgreSQL 10 or newer' do
- allow(described_class).to receive(:version).and_return('10')
-
- expect(described_class.postgresql_9_or_less?).to eq(false)
- end
- end
-
describe '.postgresql_minimum_supported_version?' do
it 'returns false when using PostgreSQL 10' do
allow(described_class).to receive(:version).and_return('10')
@@ -150,68 +131,6 @@ RSpec.describe Gitlab::Database do
end
end
- describe '.pg_wal_lsn_diff' do
- it 'returns old name when using PostgreSQL 9.6' do
- allow(described_class).to receive(:version).and_return('9.6')
-
- expect(described_class.pg_wal_lsn_diff).to eq('pg_xlog_location_diff')
- end
-
- it 'returns new name when using PostgreSQL 10 or newer' do
- allow(described_class).to receive(:version).and_return('10')
-
- expect(described_class.pg_wal_lsn_diff).to eq('pg_wal_lsn_diff')
- end
- end
-
- describe '.pg_current_wal_insert_lsn' do
- it 'returns old name when using PostgreSQL 9.6' do
- allow(described_class).to receive(:version).and_return('9.6')
-
- expect(described_class.pg_current_wal_insert_lsn).to eq('pg_current_xlog_insert_location')
- end
-
- it 'returns new name when using PostgreSQL 10 or newer' do
- allow(described_class).to receive(:version).and_return('10')
-
- expect(described_class.pg_current_wal_insert_lsn).to eq('pg_current_wal_insert_lsn')
- end
- end
-
- describe '.pg_last_wal_receive_lsn' do
- it 'returns old name when using PostgreSQL 9.6' do
- allow(described_class).to receive(:version).and_return('9.6')
-
- expect(described_class.pg_last_wal_receive_lsn).to eq('pg_last_xlog_receive_location')
- end
-
- it 'returns new name when using PostgreSQL 10 or newer' do
- allow(described_class).to receive(:version).and_return('10')
-
- expect(described_class.pg_last_wal_receive_lsn).to eq('pg_last_wal_receive_lsn')
- end
- end
-
- describe '.pg_last_wal_replay_lsn' do
- it 'returns old name when using PostgreSQL 9.6' do
- allow(described_class).to receive(:version).and_return('9.6')
-
- expect(described_class.pg_last_wal_replay_lsn).to eq('pg_last_xlog_replay_location')
- end
-
- it 'returns new name when using PostgreSQL 10 or newer' do
- allow(described_class).to receive(:version).and_return('10')
-
- expect(described_class.pg_last_wal_replay_lsn).to eq('pg_last_wal_replay_lsn')
- end
- end
-
- describe '.pg_last_xact_replay_timestamp' do
- it 'returns pg_last_xact_replay_timestamp' do
- expect(described_class.pg_last_xact_replay_timestamp).to eq('pg_last_xact_replay_timestamp')
- end
- end
-
describe '.nulls_last_order' do
it { expect(described_class.nulls_last_order('column', 'ASC')).to eq 'column ASC NULLS LAST'}
it { expect(described_class.nulls_last_order('column', 'DESC')).to eq 'column DESC NULLS LAST'}
diff --git a/spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb b/spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb
index bd60c24859c..72a66b0451e 100644
--- a/spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb
@@ -120,7 +120,7 @@ RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBatch do
described_class.new(merge_request.merge_request_diff,
batch_page,
batch_size,
- collection_default_args)
+ **collection_default_args)
end
end
diff --git a/spec/lib/gitlab/diff/highlight_cache_spec.rb b/spec/lib/gitlab/diff/highlight_cache_spec.rb
index 7e926f86096..f6810d7a966 100644
--- a/spec/lib/gitlab/diff/highlight_cache_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_cache_spec.rb
@@ -43,7 +43,8 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
describe '#decorate' do
# Manually creates a Diff::File object to avoid triggering the cache on
- # the FileCollection::MergeRequestDiff
+ # the FileCollection::MergeRequestDiff
+ #
let(:diff_file) do
diffs = merge_request.diffs
raw_diff = diffs.diffable.raw_diffs(diffs.diff_options.merge(paths: ['CHANGELOG'])).first
@@ -73,6 +74,37 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
expect(rich_texts).to all(be_html_safe)
end
+
+ context "when diff_file is uncached due to default_max_patch_bytes change" do
+ before do
+ expect(cache).to receive(:read_file).at_least(:once).and_return([])
+
+ # Stub out the application's default and current patch size limits. We
+ # want them to be different, and the diff file to be sized between
+ # the 2 values.
+ #
+ diff_file_size_kb = (diff_file.diff.diff.bytesize * 10)
+
+ stub_const("#{diff_file.diff.class}::DEFAULT_MAX_PATCH_BYTES", diff_file_size_kb - 1 )
+ expect(diff_file.diff.class).to receive(:patch_safe_limit_bytes).and_return(diff_file_size_kb + 1)
+ expect(diff_file.diff.class)
+ .to receive(:patch_safe_limit_bytes)
+ .with(diff_file.diff.class::DEFAULT_MAX_PATCH_BYTES)
+ .and_call_original
+ end
+
+ it "manually writes highlighted lines to the cache" do
+ expect(cache).to receive(:write_to_redis_hash).and_call_original
+
+ cache.decorate(diff_file)
+ end
+
+ it "assigns highlighted diff lines to the DiffFile" do
+ expect(diff_file.highlighted_diff_lines.size).to be > 5
+
+ cache.decorate(diff_file)
+ end
+ end
end
shared_examples 'caches missing entries' do
diff --git a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
index 07b8070be30..ef448ee96a4 100644
--- a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
@@ -65,24 +65,15 @@ RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
end
end
- [true, false].each do |state_tracking_enabled|
- context "and current user can update noteable #{state_tracking_enabled ? 'enabled' : 'disabled'}" do
- before do
- stub_feature_flags(track_resource_state_change_events: state_tracking_enabled)
-
- project.add_developer(user)
- end
+ context "and current user can update noteable" do
+ before do
+ project.add_developer(user)
+ end
- it 'does not raise an error' do
- if state_tracking_enabled
- expect { receiver.execute }.to change { noteable.resource_state_events.count }.by(1)
- else
- # One system note is created for the 'close' event
- expect { receiver.execute }.to change { noteable.notes.count }.by(1)
- end
+ it 'does not raise an error' do
+ expect { receiver.execute }.to change { noteable.resource_state_events.count }.by(1)
- expect(noteable.reload).to be_closed
- end
+ expect(noteable.reload).to be_closed
end
end
end
diff --git a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
index 01e2fe8ce17..40669f06371 100644
--- a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
+++ b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
@@ -25,13 +25,17 @@ RSpec.describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state d
let!(:lease) { stub_exclusive_lease(unique_key, 'uuid') }
it 'calls the given block' do
- expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(false)
+ expect { |b| class_instance.in_lock(unique_key, &b) }
+ .to yield_with_args(false, an_instance_of(described_class::SleepingLock))
end
it 'calls the given block continuously' do
- expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(false)
- expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(false)
- expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(false)
+ expect { |b| class_instance.in_lock(unique_key, &b) }
+ .to yield_with_args(false, an_instance_of(described_class::SleepingLock))
+ expect { |b| class_instance.in_lock(unique_key, &b) }
+ .to yield_with_args(false, an_instance_of(described_class::SleepingLock))
+ expect { |b| class_instance.in_lock(unique_key, &b) }
+ .to yield_with_args(false, an_instance_of(described_class::SleepingLock))
end
it 'cancels the exclusive lease after the block' do
@@ -74,7 +78,8 @@ RSpec.describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state d
expect(lease).to receive(:try_obtain).exactly(3).times { nil }
expect(lease).to receive(:try_obtain).once { unique_key }
- expect { |b| class_instance.in_lock(unique_key, &b) }.to yield_with_args(true)
+ expect { |b| class_instance.in_lock(unique_key, &b) }
+ .to yield_with_args(true, an_instance_of(described_class::SleepingLock))
end
end
end
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
index 9bc865f4d29..aecbc8734db 100644
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ b/spec/lib/gitlab/experimentation_spec.rb
@@ -69,12 +69,26 @@ RSpec.describe Gitlab::Experimentation do
end
end
+ describe '#push_frontend_experiment' do
+ it 'pushes an experiment to the frontend' do
+ gon = instance_double('gon')
+ experiments = { experiments: { 'myExperiment' => true } }
+
+ stub_experiment_for_user(my_experiment: true)
+ allow(controller).to receive(:gon).and_return(gon)
+
+ expect(gon).to receive(:push).with(experiments, true)
+
+ controller.push_frontend_experiment(:my_experiment)
+ end
+ end
+
describe '#experiment_enabled?' do
subject { controller.experiment_enabled?(:test_experiment) }
context 'cookie is not present' do
- it 'calls Gitlab::Experimentation.enabled_for_user? with the name of the experiment and an experimentation_subject_index of nil' do
- expect(Gitlab::Experimentation).to receive(:enabled_for_user?).with(:test_experiment, nil)
+ it 'calls Gitlab::Experimentation.enabled_for_value? with the name of the experiment and an experimentation_subject_index of nil' do
+ expect(Gitlab::Experimentation).to receive(:enabled_for_value?).with(:test_experiment, nil)
controller.experiment_enabled?(:test_experiment)
end
end
@@ -85,22 +99,22 @@ RSpec.describe Gitlab::Experimentation do
get :index
end
- it 'calls Gitlab::Experimentation.enabled_for_user? with the name of the experiment and an experimentation_subject_index of the modulo 100 of the hex value of the uuid' do
+ it 'calls Gitlab::Experimentation.enabled_for_value? with the name of the experiment and an experimentation_subject_index of the modulo 100 of the hex value of the uuid' do
# 'abcd1234'.hex % 100 = 76
- expect(Gitlab::Experimentation).to receive(:enabled_for_user?).with(:test_experiment, 76)
+ expect(Gitlab::Experimentation).to receive(:enabled_for_value?).with(:test_experiment, 76)
controller.experiment_enabled?(:test_experiment)
end
end
it 'returns true when DNT: 0 is set in the request' do
- allow(Gitlab::Experimentation).to receive(:enabled_for_user?) { true }
+ allow(Gitlab::Experimentation).to receive(:enabled_for_value?) { true }
controller.request.headers['DNT'] = '0'
is_expected.to be_truthy
end
it 'returns false when DNT: 1 is set in the request' do
- allow(Gitlab::Experimentation).to receive(:enabled_for_user?) { true }
+ allow(Gitlab::Experimentation).to receive(:enabled_for_value?) { true }
controller.request.headers['DNT'] = '1'
is_expected.to be_falsy
@@ -336,8 +350,8 @@ RSpec.describe Gitlab::Experimentation do
end
end
- describe '.enabled_for_user?' do
- subject { described_class.enabled_for_user?(:test_experiment, experimentation_subject_index) }
+ describe '.enabled_for_value?' do
+ subject { described_class.enabled_for_value?(:test_experiment, experimentation_subject_index) }
let(:experimentation_subject_index) { 9 }
@@ -377,4 +391,32 @@ RSpec.describe Gitlab::Experimentation do
end
end
end
+
+ describe '.enabled_for_attribute?' do
+ subject { described_class.enabled_for_attribute?(:test_experiment, attribute) }
+
+ let(:attribute) { 'abcd' } # Digest::SHA1.hexdigest('abcd').hex % 100 = 7
+
+ context 'experiment is disabled' do
+ before do
+ allow(described_class).to receive(:enabled?).and_return(false)
+ end
+
+ it { is_expected.to be false }
+ end
+
+ context 'experiment is enabled' do
+ before do
+ allow(described_class).to receive(:enabled?).and_return(true)
+ end
+
+ it { is_expected.to be true }
+
+ context 'outside enabled ratio' do
+ let(:attribute) { 'abc' } # Digest::SHA1.hexdigest('abc').hex % 100 = 17
+
+ it { is_expected.to be false }
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/git/branch_spec.rb b/spec/lib/gitlab/git/branch_spec.rb
index e1bcf4aeeb1..9271f635b14 100644
--- a/spec/lib/gitlab/git/branch_spec.rb
+++ b/spec/lib/gitlab/git/branch_spec.rb
@@ -85,9 +85,9 @@ RSpec.describe Gitlab::Git::Branch, :seed_helper do
}
end
- let(:stale_sha) { Timecop.freeze(Gitlab::Git::Branch::STALE_BRANCH_THRESHOLD.ago - 5.days) { create_commit } }
- let(:active_sha) { Timecop.freeze(Gitlab::Git::Branch::STALE_BRANCH_THRESHOLD.ago + 5.days) { create_commit } }
- let(:future_sha) { Timecop.freeze(100.days.since) { create_commit } }
+ let(:stale_sha) { travel_to(Gitlab::Git::Branch::STALE_BRANCH_THRESHOLD.ago - 5.days) { create_commit } }
+ let(:active_sha) { travel_to(Gitlab::Git::Branch::STALE_BRANCH_THRESHOLD.ago + 5.days) { create_commit } }
+ let(:future_sha) { travel_to(100.days.since) { create_commit } }
before do
repository.create_branch('stale-1', stale_sha)
diff --git a/spec/lib/gitlab/git/diff_collection_spec.rb b/spec/lib/gitlab/git/diff_collection_spec.rb
index b202015464f..8198c2651a7 100644
--- a/spec/lib/gitlab/git/diff_collection_spec.rb
+++ b/spec/lib/gitlab/git/diff_collection_spec.rb
@@ -531,7 +531,9 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
let(:iterator) { [fake_diff(1, 1)] * 4 }
before do
- stub_const('Gitlab::Git::DiffCollection::DEFAULT_LIMITS', { max_files: 2, max_lines: max_lines })
+ allow(Gitlab::Git::DiffCollection)
+ .to receive(:default_limits)
+ .and_return({ max_files: 2, max_lines: max_lines })
end
it 'prunes diffs by default even little ones' do
@@ -556,7 +558,9 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
end
before do
- stub_const('Gitlab::Git::DiffCollection::DEFAULT_LIMITS', { max_files: max_files, max_lines: 80 })
+ allow(Gitlab::Git::DiffCollection)
+ .to receive(:default_limits)
+ .and_return({ max_files: max_files, max_lines: 80 })
end
it 'prunes diffs by default even little ones' do
@@ -581,7 +585,9 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
end
before do
- stub_const('Gitlab::Git::DiffCollection::DEFAULT_LIMITS', { max_files: max_files, max_lines: 80 })
+ allow(Gitlab::Git::DiffCollection)
+ .to receive(:default_limits)
+ .and_return({ max_files: max_files, max_lines: 80 })
end
it 'prunes diffs by default even little ones' do
@@ -665,8 +671,9 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
end
before do
- stub_const('Gitlab::Git::DiffCollection::DEFAULT_LIMITS',
- { max_files: max_files, max_lines: 80 })
+ allow(Gitlab::Git::DiffCollection)
+ .to receive(:default_limits)
+ .and_return({ max_files: max_files, max_lines: 80 })
end
it 'considers size of diffs before the offset for prunning' do
diff --git a/spec/lib/gitlab/git/diff_spec.rb b/spec/lib/gitlab/git/diff_spec.rb
index 117c519e98d..980a52bb61e 100644
--- a/spec/lib/gitlab/git/diff_spec.rb
+++ b/spec/lib/gitlab/git/diff_spec.rb
@@ -284,13 +284,21 @@ EOT
end
describe '#line_count' do
- it 'returns the correct number of lines' do
- diff = described_class.new(gitaly_diff)
+ let(:diff) { described_class.new(gitaly_diff) }
+ it 'returns the correct number of lines' do
expect(diff.line_count).to eq(7)
end
end
+ describe "#diff_bytesize" do
+ let(:diff) { described_class.new(gitaly_diff) }
+
+ it "returns the size of the diff in bytes" do
+ expect(diff.diff_bytesize).to eq(diff.diff.bytesize)
+ end
+ end
+
describe '#too_large?' do
it 'returns true for a diff that is too large' do
diff = described_class.new(diff: 'a' * 204800)
diff --git a/spec/lib/gitlab/git/object_pool_spec.rb b/spec/lib/gitlab/git/object_pool_spec.rb
index c8fbc674c73..e1873c6ddb5 100644
--- a/spec/lib/gitlab/git/object_pool_spec.rb
+++ b/spec/lib/gitlab/git/object_pool_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Gitlab::Git::ObjectPool do
describe '#create' do
before do
- subject.create
+ subject.create # rubocop:disable Rails/SaveBang
end
context "when the pool doesn't exist yet" do
@@ -31,7 +31,7 @@ RSpec.describe Gitlab::Git::ObjectPool do
context 'when the pool already exists' do
it 'raises an FailedPrecondition' do
expect do
- subject.create
+ subject.create # rubocop:disable Rails/SaveBang
end.to raise_error(GRPC::FailedPrecondition)
end
end
diff --git a/spec/lib/gitlab/git/remote_mirror_spec.rb b/spec/lib/gitlab/git/remote_mirror_spec.rb
index 423c4aa9620..92504b7aafe 100644
--- a/spec/lib/gitlab/git/remote_mirror_spec.rb
+++ b/spec/lib/gitlab/git/remote_mirror_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::Git::RemoteMirror do
.to receive(:update_remote_mirror)
.with(ref_name, ['master'], ssh_key: 'KEY', known_hosts: 'KNOWN HOSTS', keep_divergent_refs: true)
- remote_mirror.update
+ remote_mirror.update # rubocop:disable Rails/SaveBang
end
it 'wraps gitaly errors' do
@@ -24,7 +24,7 @@ RSpec.describe Gitlab::Git::RemoteMirror do
.to receive(:update_remote_mirror)
.and_raise(StandardError)
- expect { remote_mirror.update }.to raise_error(StandardError)
+ expect { remote_mirror.update }.to raise_error(StandardError) # rubocop:disable Rails/SaveBang
end
end
end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index 73eecd3401a..3dc0db1bc3c 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -2079,7 +2079,7 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
let(:object_pool_rugged) { Rugged::Repository.new(object_pool_path) }
before do
- object_pool.create
+ object_pool.create # rubocop:disable Rails/SaveBang
end
it 'does not raise an error when disconnecting a non-linked repository' do
diff --git a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
index 4f6a3fb823e..16cea1dc1a3 100644
--- a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
+++ b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
@@ -7,7 +7,7 @@ require 'tempfile'
RSpec.describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
- let(:feature_flag_name) { 'feature-flag-name' }
+ let(:feature_flag_name) { wrapper.rugged_feature_keys.first }
let(:temp_gitaly_metadata_file) { create_temporary_gitaly_metadata_file }
before(:all) do
@@ -47,7 +47,7 @@ RSpec.describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
end
end
- context 'when feature flag is not persisted' do
+ context 'when feature flag is not persisted', stub_feature_flags: false do
context 'when running puma with multiple threads' do
before do
allow(subject).to receive(:running_puma_with_multiple_threads?).and_return(true)
diff --git a/spec/lib/gitlab/git_access_snippet_spec.rb b/spec/lib/gitlab/git_access_snippet_spec.rb
index 3b8b5fd82c6..362ea3c006e 100644
--- a/spec/lib/gitlab/git_access_snippet_spec.rb
+++ b/spec/lib/gitlab/git_access_snippet_spec.rb
@@ -232,29 +232,6 @@ RSpec.describe Gitlab::GitAccessSnippet do
end
end
- context 'when geo is enabled', if: Gitlab.ee? do
- let(:user) { snippet.author }
- let!(:primary_node) { FactoryBot.create(:geo_node, :primary) }
-
- before do
- allow(::Gitlab::Database).to receive(:read_only?).and_return(true)
- allow(::Gitlab::Geo).to receive(:secondary_with_primary?).and_return(true)
- end
-
- # Without override, push access would return Gitlab::GitAccessResult::CustomAction
- it 'skips geo for snippet' do
- expect { push_access_check }.to raise_forbidden(/You can't push code to a read-only GitLab instance/)
- end
-
- context 'when user is migration bot' do
- let(:user) { migration_bot }
-
- it 'skips geo for snippet' do
- expect { push_access_check }.to raise_forbidden(/You can't push code to a read-only GitLab instance/)
- end
- end
- end
-
context 'when changes are specific' do
let(:changes) { "2d1db523e11e777e49377cfb22d368deec3f0793 ddd0f15ae83993f5cb66a927a28673882e99100b master" }
let(:user) { snippet.author }
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index 9581b017839..f977fe1638f 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -13,6 +13,10 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
let(:client) { described_class.new(repository) }
describe '#diff_from_parent' do
+ before do
+ stub_feature_flags(increased_diff_limits: false)
+ end
+
context 'when a commit has a parent' do
it 'sends an RPC request with the parent ID as left commit' do
request = Gitaly::CommitDiffRequest.new(
diff --git a/spec/lib/gitlab/gon_helper_spec.rb b/spec/lib/gitlab/gon_helper_spec.rb
index 95db6b2b4e0..3d3f381b6d2 100644
--- a/spec/lib/gitlab/gon_helper_spec.rb
+++ b/spec/lib/gitlab/gon_helper_spec.rb
@@ -10,6 +10,10 @@ RSpec.describe Gitlab::GonHelper do
end
describe '#push_frontend_feature_flag' do
+ before do
+ skip_feature_flags_yaml_validation
+ end
+
it 'pushes a feature flag to the frontend' do
gon = instance_double('gon')
thing = stub_feature_flag_gate('thing')
diff --git a/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb
index e68c1446502..9538c4bae2b 100644
--- a/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb
+++ b/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe Gitlab::GrapeLogging::Loggers::QueueDurationLogger do
end
it 'returns the correct duration in seconds' do
- Timecop.freeze(start_time) do
+ travel_to(start_time) do
subject.before
expect(subject.parameters(mock_request, nil)).to eq( { 'queue_duration_s': 1.hour.to_f })
diff --git a/spec/lib/gitlab/graphql/markdown_field/resolver_spec.rb b/spec/lib/gitlab/graphql/markdown_field/resolver_spec.rb
deleted file mode 100644
index af604e1c7d5..00000000000
--- a/spec/lib/gitlab/graphql/markdown_field/resolver_spec.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe Gitlab::Graphql::MarkdownField::Resolver do
- include Gitlab::Routing
- let(:resolver) { described_class.new(:note) }
-
- describe '#proc' do
- let(:project) { create(:project, :public) }
- let(:issue) { create(:issue, project: project) }
- let(:note) do
- create(:note,
- note: "Referencing #{issue.to_reference(full: true)}")
- end
-
- it 'renders markdown correctly' do
- expect(resolver.proc.call(note, {}, {})).to include(issue_path(issue))
- end
-
- context 'when the issue is not publicly accessible' do
- let(:project) { create(:project, :private) }
-
- it 'hides the references from users that are not allowed to see the reference' do
- expect(resolver.proc.call(note, {}, {})).not_to include(issue_path(issue))
- end
-
- it 'shows the reference to users that are allowed to see it' do
- expect(resolver.proc.call(note, {}, { current_user: project.owner }))
- .to include(issue_path(issue))
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/graphql/markdown_field_spec.rb b/spec/lib/gitlab/graphql/markdown_field_spec.rb
index e3da925376e..82090f992eb 100644
--- a/spec/lib/gitlab/graphql/markdown_field_spec.rb
+++ b/spec/lib/gitlab/graphql/markdown_field_spec.rb
@@ -2,6 +2,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Graphql::MarkdownField do
+ include Gitlab::Routing
+
describe '.markdown_field' do
it 'creates the field with some default attributes' do
field = class_with_markdown_field(:test_html, null: true, method: :hello).fields['testHtml']
@@ -13,7 +15,7 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
end
context 'developer warnings' do
- let(:expected_error) { /Only `method` is allowed to specify the markdown field/ }
+ let_it_be(:expected_error) { /Only `method` is allowed to specify the markdown field/ }
it 'raises when passing a resolver' do
expect { class_with_markdown_field(:test_html, null: true, resolver: 'not really') }
@@ -27,30 +29,61 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
end
context 'resolving markdown' do
- let(:note) { build(:note, note: '# Markdown!') }
- let(:thing_with_markdown) { double('markdown thing', object: note) }
- let(:expected_markdown) { '<h1 data-sourcepos="1:1-1:11" dir="auto">Markdown!</h1>' }
- let(:query_type) { GraphQL::ObjectType.new }
- let(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
- let(:context) { GraphQL::Query::Context.new(query: OpenStruct.new(schema: schema), values: nil, object: nil) }
+ let_it_be(:note) { build(:note, note: '# Markdown!') }
+ let_it_be(:expected_markdown) { '<h1 data-sourcepos="1:1-1:11" dir="auto">Markdown!</h1>' }
+ let_it_be(:query_type) { GraphQL::ObjectType.new }
+ let_it_be(:schema) { GraphQL::Schema.define(query: query_type, mutation: nil)}
+ let_it_be(:query) { GraphQL::Query.new(schema, document: nil, context: {}, variables: {}) }
+ let_it_be(:context) { GraphQL::Query::Context.new(query: query, values: {}, object: nil) }
+
+ let(:type_class) { class_with_markdown_field(:note_html, null: false) }
+ let(:type_instance) { type_class.authorized_new(note, context) }
+ let(:field) { type_class.fields['noteHtml'] }
it 'renders markdown from the same property as the field name without the `_html` suffix' do
- field = class_with_markdown_field(:note_html, null: false).fields['noteHtml']
+ expect(field.to_graphql.resolve(type_instance, {}, context)).to eq(expected_markdown)
+ end
+
+ context 'when a `method` argument is passed' do
+ let(:type_class) { class_with_markdown_field(:test_html, null: false, method: :note) }
+ let(:field) { type_class.fields['testHtml'] }
- expect(field.to_graphql.resolve(thing_with_markdown, {}, context)).to eq(expected_markdown)
+ it 'renders markdown from a specific property' do
+ expect(field.to_graphql.resolve(type_instance, {}, context)).to eq(expected_markdown)
+ end
end
- it 'renders markdown from a specific property when a `method` argument is passed' do
- field = class_with_markdown_field(:test_html, null: false, method: :note).fields['testHtml']
+ describe 'basic verification that references work' do
+ let_it_be(:project) { create(:project, :public) }
+ let(:issue) { create(:issue, project: project) }
+ let(:note) { build(:note, note: "Referencing #{issue.to_reference(full: true)}") }
+
+ it 'renders markdown correctly' do
+ expect(field.to_graphql.resolve(type_instance, {}, context)).to include(issue_path(issue))
+ end
+
+ context 'when the issue is not publicly accessible' do
+ let_it_be(:project) { create(:project, :private) }
+
+ it 'hides the references from users that are not allowed to see the reference' do
+ expect(field.to_graphql.resolve(type_instance, {}, context)).not_to include(issue_path(issue))
+ end
+
+ it 'shows the reference to users that are allowed to see it' do
+ context = GraphQL::Query::Context.new(query: query, values: { current_user: project.owner }, object: nil)
+ type_instance = type_class.authorized_new(note, context)
- expect(field.to_graphql.resolve(thing_with_markdown, {}, context)).to eq(expected_markdown)
+ expect(field.to_graphql.resolve(type_instance, {}, context)).to include(issue_path(issue))
+ end
+ end
end
end
end
def class_with_markdown_field(name, **args)
- Class.new(GraphQL::Schema::Object) do
+ Class.new(Types::BaseObject) do
prepend Gitlab::Graphql::MarkdownField
+ graphql_name 'MarkdownFieldTest'
markdown_field name, **args
end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb
index 444c10074a0..77a8588e2cb 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb
@@ -63,6 +63,17 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::OrderInfo do
expect(order_list.first.sort_direction).to eq :desc
end
end
+
+ context 'when ordering by CASE', :aggregate_failuers do
+ let(:relation) { Project.order(Arel::Nodes::Case.new(Project.arel_table[:pending_delete]).when(true).then(100).else(1000).asc) }
+
+ it 'assigns the right attribute name, named function, and direction' do
+ expect(order_list.count).to eq 1
+ expect(order_list.first.attribute_name).to eq 'pending_delete'
+ expect(order_list.first.named_function).to be_kind_of(Arel::Nodes::Case)
+ expect(order_list.first.sort_direction).to eq :asc
+ end
+ end
end
describe '#validate_ordering' do
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb
index c7e7db4d535..fa631aa5666 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb
@@ -136,11 +136,12 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::QueryBuilder do
let(:relation) { Project.sorted_by_similarity_desc('test', include_in_select: true) }
let(:arel_table) { Project.arel_table }
let(:decoded_cursor) { { 'similarity' => 0.5, 'id' => 100 } }
+ let(:similarity_function_call) { Gitlab::Database::SimilarityScore::SIMILARITY_FUNCTION_CALL_WITH_ANNOTATION }
let(:similarity_sql) do
[
- '(SIMILARITY(COALESCE("projects"."path", \'\'), \'test\') * CAST(\'1\' AS numeric))',
- '(SIMILARITY(COALESCE("projects"."name", \'\'), \'test\') * CAST(\'0.7\' AS numeric))',
- '(SIMILARITY(COALESCE("projects"."description", \'\'), \'test\') * CAST(\'0.2\' AS numeric))'
+ "(#{similarity_function_call}(COALESCE(\"projects\".\"path\", ''), 'test') * CAST('1' AS numeric))",
+ "(#{similarity_function_call}(COALESCE(\"projects\".\"name\", ''), 'test') * CAST('0.7' AS numeric))",
+ "(#{similarity_function_call}(COALESCE(\"projects\".\"description\", ''), 'test') * CAST('0.2' AS numeric))"
].join(' + ')
end
diff --git a/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb b/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb
index 89d2ab8bb87..c8432513185 100644
--- a/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb
+++ b/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe Gitlab::Graphql::QueryAnalyzers::LoggerAnalyzer do
end
it 'returns a duration in seconds' do
- allow(GraphQL::Analysis).to receive(:analyze_query).and_return([4, 2])
+ allow(GraphQL::Analysis).to receive(:analyze_query).and_return([4, 2, [[], []]])
allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(monotonic_time_before, monotonic_time_after)
allow(Gitlab::GraphqlLogger).to receive(:info)
diff --git a/spec/lib/gitlab/group_search_results_spec.rb b/spec/lib/gitlab/group_search_results_spec.rb
index 045c922783a..009f66d2108 100644
--- a/spec/lib/gitlab/group_search_results_spec.rb
+++ b/spec/lib/gitlab/group_search_results_spec.rb
@@ -17,10 +17,17 @@ RSpec.describe Gitlab::GroupSearchResults do
describe 'issues search' do
let_it_be(:opened_result) { create(:issue, :opened, project: project, title: 'foo opened') }
let_it_be(:closed_result) { create(:issue, :closed, project: project, title: 'foo closed') }
+ let_it_be(:confidential_result) { create(:issue, :confidential, project: project, title: 'foo confidential') }
+
let(:query) { 'foo' }
let(:scope) { 'issues' }
+ before do
+ project.add_developer(user)
+ end
+
include_examples 'search results filtered by state'
+ include_examples 'search results filtered by confidential'
end
describe 'merge_requests search' do
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 3126d87a0d6..7c33f29ebf3 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -51,6 +51,7 @@ issues:
- status_page_published_incident
- namespace
- note_authors
+- issue_email_participants
events:
- author
- project
@@ -242,6 +243,7 @@ ci_pipelines:
- latest_builds_report_results
- messages
- pipeline_artifacts
+- latest_statuses
ci_refs:
- project
- ci_pipelines
@@ -536,6 +538,8 @@ project:
- vulnerability_historical_statistics
- product_analytics_events
- pipeline_artifacts
+- terraform_states
+- alert_management_http_integrations
award_emoji:
- awardable
- user
diff --git a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
index 93b6f93f0ec..d084b9d7f7e 100644
--- a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
@@ -10,14 +10,17 @@ RSpec.describe Gitlab::ImportExport::FastHashSerializer do
# all items are properly serialized while traversing the simple hash.
subject { Gitlab::Json.parse(Gitlab::Json.generate(described_class.new(project, tree).execute)) }
- let!(:project) { setup_project }
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { setup_project }
let(:shared) { project.import_export_shared }
let(:reader) { Gitlab::ImportExport::Reader.new(shared: shared) }
let(:tree) { reader.project_tree }
- before do
+ before_all do
project.add_maintainer(user)
+ end
+
+ before do
allow_any_instance_of(MergeRequest).to receive(:source_branch_sha).and_return('ABCD')
allow_any_instance_of(MergeRequest).to receive(:target_branch_sha).and_return('DCBA')
end
@@ -224,7 +227,6 @@ RSpec.describe Gitlab::ImportExport::FastHashSerializer do
group: group,
approvals_before_merge: 1
)
- allow(project).to receive(:commit).and_return(Commit.new(RepoHelpers.sample_commit, project))
issue = create(:issue, assignees: [user], project: project)
snippet = create(:project_snippet, project: project)
diff --git a/spec/lib/gitlab/import_export/group/relation_factory_spec.rb b/spec/lib/gitlab/import_export/group/relation_factory_spec.rb
index eb9a3fa9bd8..6b2f80cc80a 100644
--- a/spec/lib/gitlab/import_export/group/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/group/relation_factory_spec.rb
@@ -5,16 +5,19 @@ require 'spec_helper'
RSpec.describe Gitlab::ImportExport::Group::RelationFactory do
let(:group) { create(:group) }
let(:members_mapper) { double('members_mapper').as_null_object }
- let(:user) { create(:admin) }
+ let(:admin) { create(:admin) }
+ let(:importer_user) { admin }
let(:excluded_keys) { [] }
let(:created_object) do
- described_class.create(relation_sym: relation_sym,
- relation_hash: relation_hash,
- members_mapper: members_mapper,
- object_builder: Gitlab::ImportExport::Group::ObjectBuilder,
- user: user,
- importable: group,
- excluded_keys: excluded_keys)
+ described_class.create(
+ relation_sym: relation_sym,
+ relation_hash: relation_hash,
+ members_mapper: members_mapper,
+ object_builder: Gitlab::ImportExport::Group::ObjectBuilder,
+ user: importer_user,
+ importable: group,
+ excluded_keys: excluded_keys
+ )
end
context 'label object' do
@@ -24,18 +27,18 @@ RSpec.describe Gitlab::ImportExport::Group::RelationFactory do
let(:relation_hash) do
{
- 'id' => 123456,
- 'title' => 'Bruffefunc',
- 'color' => '#1d2da4',
- 'project_id' => nil,
- 'created_at' => '2019-11-20T17:02:20.546Z',
- 'updated_at' => '2019-11-20T17:02:20.546Z',
- 'template' => false,
+ 'id' => 123456,
+ 'title' => 'Bruffefunc',
+ 'color' => '#1d2da4',
+ 'project_id' => nil,
+ 'created_at' => '2019-11-20T17:02:20.546Z',
+ 'updated_at' => '2019-11-20T17:02:20.546Z',
+ 'template' => false,
'description' => 'Description',
- 'group_id' => original_group_id,
- 'type' => 'GroupLabel',
- 'priorities' => [],
- 'textColor' => '#FFFFFF'
+ 'group_id' => original_group_id,
+ 'type' => 'GroupLabel',
+ 'priorities' => [],
+ 'textColor' => '#FFFFFF'
}
end
@@ -60,58 +63,28 @@ RSpec.describe Gitlab::ImportExport::Group::RelationFactory do
end
end
- context 'Notes user references' do
- let(:relation_sym) { :notes }
- let(:new_user) { create(:user) }
- let(:exported_member) do
- {
- 'id' => 111,
- 'access_level' => 30,
- 'source_id' => 1,
- 'source_type' => 'Namespace',
- 'user_id' => 3,
- 'notification_level' => 3,
- 'created_at' => '2016-11-18T09:29:42.634Z',
- 'updated_at' => '2016-11-18T09:29:42.634Z',
- 'user' => {
- 'id' => 999,
- 'email' => new_user.email,
- 'username' => new_user.username
- }
- }
- end
-
+ it_behaves_like 'Notes user references' do
+ let(:importable) { group }
let(:relation_hash) do
{
- 'id' => 4947,
- 'note' => 'note',
+ 'id' => 4947,
+ 'note' => 'note',
'noteable_type' => 'Epic',
- 'author_id' => 999,
- 'created_at' => '2016-11-18T09:29:42.634Z',
- 'updated_at' => '2016-11-18T09:29:42.634Z',
- 'project_id' => 1,
- 'attachment' => {
+ 'author_id' => 999,
+ 'created_at' => '2016-11-18T09:29:42.634Z',
+ 'updated_at' => '2016-11-18T09:29:42.634Z',
+ 'project_id' => 1,
+ 'attachment' => {
'url' => nil
},
- 'noteable_id' => 377,
- 'system' => true,
- 'author' => {
+ 'noteable_id' => 377,
+ 'system' => true,
+ 'author' => {
'name' => 'Administrator'
},
'events' => []
}
end
-
- let(:members_mapper) do
- Gitlab::ImportExport::MembersMapper.new(
- exported_members: [exported_member],
- user: user,
- importable: group)
- end
-
- it 'maps the right author to the imported note' do
- expect(created_object.author).to eq(new_user)
- end
end
def random_id
diff --git a/spec/lib/gitlab/import_export/lfs_saver_spec.rb b/spec/lib/gitlab/import_export/lfs_saver_spec.rb
index db76eb9538b..55b4f7479b8 100644
--- a/spec/lib/gitlab/import_export/lfs_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/lfs_saver_spec.rb
@@ -74,14 +74,6 @@ RSpec.describe Gitlab::ImportExport::LfsSaver do
}
)
end
-
- it 'does not save a json file if feature is disabled' do
- stub_feature_flags(export_lfs_objects_projects: false)
-
- saver.save
-
- expect(File.exist?(lfs_json_file)).to eq(false)
- end
end
end
diff --git a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
index 31cf2362628..50bc6a30044 100644
--- a/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/project/relation_factory_spec.rb
@@ -3,19 +3,22 @@
require 'spec_helper'
RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
- let(:group) { create(:group) }
+ let(:group) { create(:group) }
let(:project) { create(:project, :repository, group: group) }
let(:members_mapper) { double('members_mapper').as_null_object }
- let(:user) { create(:admin) }
+ let(:admin) { create(:admin) }
+ let(:importer_user) { admin }
let(:excluded_keys) { [] }
let(:created_object) do
- described_class.create(relation_sym: relation_sym,
- relation_hash: relation_hash,
- object_builder: Gitlab::ImportExport::Project::ObjectBuilder,
- members_mapper: members_mapper,
- user: user,
- importable: project,
- excluded_keys: excluded_keys)
+ described_class.create(
+ relation_sym: relation_sym,
+ relation_hash: relation_hash,
+ object_builder: Gitlab::ImportExport::Project::ObjectBuilder,
+ members_mapper: members_mapper,
+ user: importer_user,
+ importable: project,
+ excluded_keys: excluded_keys
+ )
end
before do
@@ -113,9 +116,9 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
"created_at" => "2016-11-18T09:29:42.634Z",
"updated_at" => "2016-11-18T09:29:42.634Z",
"user" => {
- "id" => user.id,
- "email" => user.email,
- "username" => user.username
+ "id" => admin.id,
+ "email" => admin.email,
+ "username" => admin.username
}
}
end
@@ -123,7 +126,7 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
let(:members_mapper) do
Gitlab::ImportExport::MembersMapper.new(
exported_members: [exported_member],
- user: user,
+ user: importer_user,
importable: project)
end
@@ -134,9 +137,9 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
'source_branch' => "feature_conflict",
'source_project_id' => project.id,
'target_project_id' => project.id,
- 'author_id' => user.id,
- 'assignee_id' => user.id,
- 'updated_by_id' => user.id,
+ 'author_id' => admin.id,
+ 'assignee_id' => admin.id,
+ 'updated_by_id' => admin.id,
'title' => "MR1",
'created_at' => "2016-06-14T15:02:36.568Z",
'updated_at' => "2016-06-14T15:02:56.815Z",
@@ -151,11 +154,11 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
end
it 'has preloaded author' do
- expect(created_object.author).to equal(user)
+ expect(created_object.author).to equal(admin)
end
it 'has preloaded updated_by' do
- expect(created_object.updated_by).to equal(user)
+ expect(created_object.updated_by).to equal(admin)
end
it 'has preloaded source project' do
@@ -264,27 +267,8 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
end
end
- context 'Notes user references' do
- let(:relation_sym) { :notes }
- let(:new_user) { create(:user) }
- let(:exported_member) do
- {
- "id" => 111,
- "access_level" => 30,
- "source_id" => 1,
- "source_type" => "Project",
- "user_id" => 3,
- "notification_level" => 3,
- "created_at" => "2016-11-18T09:29:42.634Z",
- "updated_at" => "2016-11-18T09:29:42.634Z",
- "user" => {
- "id" => 999,
- "email" => new_user.email,
- "username" => new_user.username
- }
- }
- end
-
+ it_behaves_like 'Notes user references' do
+ let(:importable) { project }
let(:relation_hash) do
{
"id" => 4947,
@@ -305,17 +289,6 @@ RSpec.describe Gitlab::ImportExport::Project::RelationFactory do
"events" => []
}
end
-
- let(:members_mapper) do
- Gitlab::ImportExport::MembersMapper.new(
- exported_members: [exported_member],
- user: user,
- importable: project)
- end
-
- it 'maps the right author to the imported note' do
- expect(created_object.author).to eq(new_user)
- end
end
context 'encrypted attributes' do
diff --git a/spec/lib/gitlab/job_waiter_spec.rb b/spec/lib/gitlab/job_waiter_spec.rb
index 7aa0a3485fb..a9edb2b530b 100644
--- a/spec/lib/gitlab/job_waiter_spec.rb
+++ b/spec/lib/gitlab/job_waiter_spec.rb
@@ -2,23 +2,26 @@
require 'spec_helper'
-RSpec.describe Gitlab::JobWaiter do
+RSpec.describe Gitlab::JobWaiter, :redis do
describe '.notify' do
it 'pushes the jid to the named queue' do
- key = 'gitlab:job_waiter:foo'
- jid = 1
+ key = described_class.new.key
- redis = double('redis')
- expect(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis)
- expect(redis).to receive(:lpush).with(key, jid)
+ described_class.notify(key, 123)
- described_class.notify(key, jid)
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.ttl(key)).to be > 0
+ end
end
end
describe '#wait' do
let(:waiter) { described_class.new(2) }
+ before do
+ allow_any_instance_of(described_class).to receive(:wait).and_call_original
+ end
+
it 'returns when all jobs have been completed' do
described_class.notify(waiter.key, 'a')
described_class.notify(waiter.key, 'b')
diff --git a/spec/lib/gitlab/lfs/client_spec.rb b/spec/lib/gitlab/lfs/client_spec.rb
index 03563a632d6..14af9d02cb5 100644
--- a/spec/lib/gitlab/lfs/client_spec.rb
+++ b/spec/lib/gitlab/lfs/client_spec.rb
@@ -7,6 +7,8 @@ RSpec.describe Gitlab::Lfs::Client do
let(:username) { 'user' }
let(:password) { 'password' }
let(:credentials) { { user: username, password: password, auth_method: 'password' } }
+ let(:git_lfs_content_type) { 'application/vnd.git-lfs+json' }
+ let(:git_lfs_user_agent) { "GitLab #{Gitlab::VERSION} LFS client" }
let(:basic_auth_headers) do
{ 'Authorization' => "Basic #{Base64.strict_encode64("#{username}:#{password}")}" }
@@ -21,6 +23,15 @@ RSpec.describe Gitlab::Lfs::Client do
}
end
+ let(:verify_action) do
+ {
+ "href" => "#{base_url}/some/file/verify",
+ "header" => {
+ "Key" => "value"
+ }
+ }
+ end
+
subject(:lfs_client) { described_class.new(base_url, credentials: credentials) }
describe '#batch' do
@@ -34,10 +45,10 @@ RSpec.describe Gitlab::Lfs::Client do
).to_return(
status: 200,
body: { 'objects' => 'anything', 'transfer' => 'basic' }.to_json,
- headers: { 'Content-Type' => 'application/vnd.git-lfs+json' }
+ headers: { 'Content-Type' => git_lfs_content_type }
)
- result = lfs_client.batch('upload', objects)
+ result = lfs_client.batch!('upload', objects)
expect(stub).to have_been_requested
expect(result).to eq('objects' => 'anything', 'transfer' => 'basic')
@@ -48,7 +59,7 @@ RSpec.describe Gitlab::Lfs::Client do
it 'raises an error' do
stub_batch(objects: objects, headers: basic_auth_headers).to_return(status: 400)
- expect { lfs_client.batch('upload', objects) }.to raise_error(/Failed/)
+ expect { lfs_client.batch!('upload', objects) }.to raise_error(/Failed/)
end
end
@@ -56,7 +67,7 @@ RSpec.describe Gitlab::Lfs::Client do
it 'raises an error' do
stub_batch(objects: objects, headers: basic_auth_headers).to_return(status: 400)
- expect { lfs_client.batch('upload', objects) }.to raise_error(/Failed/)
+ expect { lfs_client.batch!('upload', objects) }.to raise_error(/Failed/)
end
end
@@ -68,17 +79,23 @@ RSpec.describe Gitlab::Lfs::Client do
).to_return(
status: 200,
body: { 'transfer' => 'carrier-pigeon' }.to_json,
- headers: { 'Content-Type' => 'application/vnd.git-lfs+json' }
+ headers: { 'Content-Type' => git_lfs_content_type }
)
- expect { lfs_client.batch('upload', objects) }.to raise_error(/Unsupported transfer/)
+ expect { lfs_client.batch!('upload', objects) }.to raise_error(/Unsupported transfer/)
end
end
def stub_batch(objects:, headers:, operation: 'upload', transfer: 'basic')
- objects = objects.map { |o| { oid: o.oid, size: o.size } }
+ objects = objects.as_json(only: [:oid, :size])
body = { operation: operation, 'transfers': [transfer], objects: objects }.to_json
+ headers = {
+ 'Accept' => git_lfs_content_type,
+ 'Content-Type' => git_lfs_content_type,
+ 'User-Agent' => git_lfs_user_agent
+ }.merge(headers)
+
stub_request(:post, base_url + '/info/lfs/objects/batch').with(body: body, headers: headers)
end
end
@@ -90,7 +107,7 @@ RSpec.describe Gitlab::Lfs::Client do
it "makes an HTTP PUT with expected parameters" do
stub_upload(object: object, headers: upload_action['header']).to_return(status: 200)
- lfs_client.upload(object, upload_action, authenticated: true)
+ lfs_client.upload!(object, upload_action, authenticated: true)
end
end
@@ -101,7 +118,7 @@ RSpec.describe Gitlab::Lfs::Client do
headers: basic_auth_headers.merge(upload_action['header'])
).to_return(status: 200)
- lfs_client.upload(object, upload_action, authenticated: false)
+ lfs_client.upload!(object, upload_action, authenticated: false)
expect(stub).to have_been_requested
end
@@ -110,13 +127,13 @@ RSpec.describe Gitlab::Lfs::Client do
context 'LFS object has no file' do
let(:object) { LfsObject.new }
- it 'makes an HJTT PUT with expected parameters' do
+ it 'makes an HTTP PUT with expected parameters' do
stub = stub_upload(
object: object,
headers: upload_action['header']
).to_return(status: 200)
- lfs_client.upload(object, upload_action, authenticated: true)
+ lfs_client.upload!(object, upload_action, authenticated: true)
expect(stub).to have_been_requested
end
@@ -126,7 +143,7 @@ RSpec.describe Gitlab::Lfs::Client do
it 'raises an error' do
stub_upload(object: object, headers: upload_action['header']).to_return(status: 400)
- expect { lfs_client.upload(object, upload_action, authenticated: true) }.to raise_error(/Failed/)
+ expect { lfs_client.upload!(object, upload_action, authenticated: true) }.to raise_error(/Failed/)
end
end
@@ -134,15 +151,75 @@ RSpec.describe Gitlab::Lfs::Client do
it 'raises an error' do
stub_upload(object: object, headers: upload_action['header']).to_return(status: 500)
- expect { lfs_client.upload(object, upload_action, authenticated: true) }.to raise_error(/Failed/)
+ expect { lfs_client.upload!(object, upload_action, authenticated: true) }.to raise_error(/Failed/)
end
end
def stub_upload(object:, headers:)
+ headers = {
+ 'Content-Type' => 'application/octet-stream',
+ 'Content-Length' => object.size.to_s,
+ 'User-Agent' => git_lfs_user_agent
+ }.merge(headers)
+
stub_request(:put, upload_action['href']).with(
body: object.file.read,
headers: headers.merge('Content-Length' => object.size.to_s)
)
end
end
+
+ describe "#verify" do
+ let_it_be(:object) { create(:lfs_object) }
+
+ context 'server returns 200 OK to an authenticated request' do
+ it "makes an HTTP POST with expected parameters" do
+ stub_verify(object: object, headers: verify_action['header']).to_return(status: 200)
+
+ lfs_client.verify!(object, verify_action, authenticated: true)
+ end
+ end
+
+ context 'server returns 200 OK to an unauthenticated request' do
+ it "makes an HTTP POST with expected parameters" do
+ stub = stub_verify(
+ object: object,
+ headers: basic_auth_headers.merge(upload_action['header'])
+ ).to_return(status: 200)
+
+ lfs_client.verify!(object, verify_action, authenticated: false)
+
+ expect(stub).to have_been_requested
+ end
+ end
+
+ context 'server returns 400 error' do
+ it 'raises an error' do
+ stub_verify(object: object, headers: verify_action['header']).to_return(status: 400)
+
+ expect { lfs_client.verify!(object, verify_action, authenticated: true) }.to raise_error(/Failed/)
+ end
+ end
+
+ context 'server returns 500 error' do
+ it 'raises an error' do
+ stub_verify(object: object, headers: verify_action['header']).to_return(status: 500)
+
+ expect { lfs_client.verify!(object, verify_action, authenticated: true) }.to raise_error(/Failed/)
+ end
+ end
+
+ def stub_verify(object:, headers:)
+ headers = {
+ 'Accept' => git_lfs_content_type,
+ 'Content-Type' => git_lfs_content_type,
+ 'User-Agent' => git_lfs_user_agent
+ }.merge(headers)
+
+ stub_request(:post, verify_action['href']).with(
+ body: object.to_json(only: [:oid, :size]),
+ headers: headers
+ )
+ end
+ end
end
diff --git a/spec/lib/gitlab/lfs_token_spec.rb b/spec/lib/gitlab/lfs_token_spec.rb
index 9b8b2c1417a..4b40e8960b2 100644
--- a/spec/lib/gitlab/lfs_token_spec.rb
+++ b/spec/lib/gitlab/lfs_token_spec.rb
@@ -104,7 +104,7 @@ RSpec.describe Gitlab::LfsToken, :clean_gitlab_redis_shared_state do
# Needs to be at least LfsToken::DEFAULT_EXPIRE_TIME + 60 seconds
# in order to check whether it is valid 1 minute after it has expired
- Timecop.freeze(Time.now + described_class::DEFAULT_EXPIRE_TIME + 60) do
+ travel_to(Time.now + described_class::DEFAULT_EXPIRE_TIME + 60) do
expect(lfs_token.token_valid?(expired_token)).to be false
end
end
diff --git a/spec/lib/gitlab/manifest_import/manifest_spec.rb b/spec/lib/gitlab/manifest_import/manifest_spec.rb
index 2e8753b0880..352120c079d 100644
--- a/spec/lib/gitlab/manifest_import/manifest_spec.rb
+++ b/spec/lib/gitlab/manifest_import/manifest_spec.rb
@@ -12,19 +12,7 @@ RSpec.describe Gitlab::ManifestImport::Manifest do
end
context 'missing or invalid attributes' do
- let(:file) { Tempfile.new('foo') }
-
- before do
- content = <<~EOS
- <manifest>
- <remote review="invalid-url" />
- <project name="platform/build"/>
- </manifest>
- EOS
-
- file.write(content)
- file.rewind
- end
+ let(:file) { File.open(Rails.root.join('spec/fixtures/invalid_manifest.xml')) }
it { expect(manifest.valid?).to be false }
diff --git a/spec/lib/gitlab/manifest_import/metadata_spec.rb b/spec/lib/gitlab/manifest_import/metadata_spec.rb
new file mode 100644
index 00000000000..c8158d3e148
--- /dev/null
+++ b/spec/lib/gitlab/manifest_import/metadata_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ManifestImport::Metadata, :clean_gitlab_redis_shared_state do
+ let(:user) { double(id: 1) }
+ let(:repositories) do
+ [
+ { id: 'test1', url: 'http://demo.host/test1' },
+ { id: 'test2', url: 'http://demo.host/test2' }
+ ]
+ end
+
+ describe '#save' do
+ it 'stores data in Redis with an expiry of EXPIRY_TIME' do
+ status = described_class.new(user)
+ repositories_key = 'manifest_import:metadata:user:1:repositories'
+ group_id_key = 'manifest_import:metadata:user:1:group_id'
+
+ status.save(repositories, 2)
+
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.ttl(repositories_key)).to be_within(5).of(described_class::EXPIRY_TIME)
+ expect(redis.ttl(group_id_key)).to be_within(5).of(described_class::EXPIRY_TIME)
+ end
+ end
+ end
+
+ describe '#repositories' do
+ it 'allows repositories to round-trip with symbol keys' do
+ status = described_class.new(user)
+
+ status.save(repositories, 2)
+
+ expect(status.repositories).to eq(repositories)
+ end
+
+ it 'uses the fallback when there is nothing in Redis' do
+ fallback = { manifest_import_repositories: repositories }
+ status = described_class.new(user, fallback: fallback)
+
+ expect(status.repositories).to eq(repositories)
+ end
+ end
+
+ describe '#group_id' do
+ it 'returns the group ID as an integer' do
+ status = described_class.new(user)
+
+ status.save(repositories, 2)
+
+ expect(status.group_id).to eq(2)
+ end
+
+ it 'uses the fallback when there is nothing in Redis' do
+ fallback = { manifest_import_group_id: 3 }
+ status = described_class.new(user, fallback: fallback)
+
+ expect(status.group_id).to eq(3)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/dashboard/importers/prometheus_metrics_spec.rb b/spec/lib/gitlab/metrics/dashboard/importers/prometheus_metrics_spec.rb
index 09d5e048f6a..ff8f5797f9d 100644
--- a/spec/lib/gitlab/metrics/dashboard/importers/prometheus_metrics_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/importers/prometheus_metrics_spec.rb
@@ -8,9 +8,16 @@ RSpec.describe Gitlab::Metrics::Dashboard::Importers::PrometheusMetrics do
describe '#execute' do
let(:project) { create(:project) }
let(:dashboard_path) { 'path/to/dashboard.yml' }
+ let(:prometheus_adapter) { double('adapter', clear_prometheus_reactive_cache!: nil) }
subject { described_class.new(dashboard_hash, project: project, dashboard_path: dashboard_path) }
+ before do
+ allow_next_instance_of(::Clusters::Applications::ScheduleUpdateService) do |update_service|
+ allow(update_service).to receive(:execute)
+ end
+ end
+
context 'valid dashboard' do
let(:dashboard_hash) { load_sample_dashboard }
@@ -21,20 +28,32 @@ RSpec.describe Gitlab::Metrics::Dashboard::Importers::PrometheusMetrics do
end
context 'with existing metrics' do
+ let(:existing_metric_attributes) do
+ {
+ project: project,
+ identifier: 'metric_b',
+ title: 'overwrite',
+ y_label: 'overwrite',
+ query: 'overwrite',
+ unit: 'overwrite',
+ legend: 'overwrite',
+ dashboard_path: dashboard_path
+ }
+ end
+
let!(:existing_metric) do
- create(:prometheus_metric, {
- project: project,
- identifier: 'metric_b',
- title: 'overwrite',
- y_label: 'overwrite',
- query: 'overwrite',
- unit: 'overwrite',
- legend: 'overwrite'
- })
+ create(:prometheus_metric, existing_metric_attributes)
+ end
+
+ let!(:existing_alert) do
+ alert = create(:prometheus_alert, project: project, prometheus_metric: existing_metric)
+ existing_metric.prometheus_alerts << alert
+
+ alert
end
it 'updates existing PrometheusMetrics' do
- described_class.new(dashboard_hash, project: project, dashboard_path: dashboard_path).execute
+ subject.execute
expect(existing_metric.reload.attributes.with_indifferent_access).to include({
title: 'Super Chart B',
@@ -49,6 +68,15 @@ RSpec.describe Gitlab::Metrics::Dashboard::Importers::PrometheusMetrics do
expect { subject.execute }.to change { PrometheusMetric.count }.by(2)
end
+ it 'updates affected environments' do
+ expect(::Clusters::Applications::ScheduleUpdateService).to receive(:new).with(
+ existing_alert.environment.cluster_prometheus_adapter,
+ project
+ ).and_return(double('ScheduleUpdateService', execute: true))
+
+ subject.execute
+ end
+
context 'with stale metrics' do
let!(:stale_metric) do
create(:prometheus_metric,
@@ -59,11 +87,45 @@ RSpec.describe Gitlab::Metrics::Dashboard::Importers::PrometheusMetrics do
)
end
+ let!(:stale_alert) do
+ alert = create(:prometheus_alert, project: project, prometheus_metric: stale_metric)
+ stale_metric.prometheus_alerts << alert
+
+ alert
+ end
+
+ it 'updates existing PrometheusMetrics' do
+ subject.execute
+
+ expect(existing_metric.reload.attributes.with_indifferent_access).to include({
+ title: 'Super Chart B',
+ y_label: 'y_label',
+ query: 'query',
+ unit: 'unit',
+ legend: 'Legend Label'
+ })
+ end
+
it 'deletes stale metrics' do
subject.execute
expect { stale_metric.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
+
+ it 'deletes stale alert' do
+ subject.execute
+
+ expect { stale_alert.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ it 'updates affected environments' do
+ expect(::Clusters::Applications::ScheduleUpdateService).to receive(:new).with(
+ existing_alert.environment.cluster_prometheus_adapter,
+ project
+ ).and_return(double('ScheduleUpdateService', execute: true))
+
+ subject.execute
+ end
end
end
end
diff --git a/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb b/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
index 69b779d36eb..0c77dc540f3 100644
--- a/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
+++ b/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb
@@ -38,69 +38,49 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware do
end
context 'request is a health check endpoint' do
- it 'increments health endpoint counter' do
- env['PATH_INFO'] = '/-/liveness'
+ ['/-/liveness', '/-/liveness/', '/-/%6D%65%74%72%69%63%73'].each do |path|
+ context "when path is #{path}" do
+ before do
+ env['PATH_INFO'] = path
+ end
- expect(described_class).to receive_message_chain(:http_health_requests_total, :increment).with(method: 'get')
+ it 'increments health endpoint counter rather than overall counter' do
+ expect(described_class).to receive_message_chain(:http_health_requests_total, :increment).with(method: 'get')
+ expect(described_class).not_to receive(:http_request_total)
- subject.call(env)
- end
-
- context 'with trailing slash' do
- before do
- env['PATH_INFO'] = '/-/liveness/'
- end
+ subject.call(env)
+ end
- it 'increments health endpoint counter' do
- expect(described_class).to receive_message_chain(:http_health_requests_total, :increment).with(method: 'get')
+ it 'does not record the request duration' do
+ expect(described_class).not_to receive(:http_request_duration_seconds)
- subject.call(env)
- end
- end
-
- context 'with percent encoded values' do
- before do
- env['PATH_INFO'] = '/-/%6D%65%74%72%69%63%73' # /-/metrics
- end
-
- it 'increments health endpoint counter' do
- expect(described_class).to receive_message_chain(:http_health_requests_total, :increment).with(method: 'get')
-
- subject.call(env)
+ subject.call(env)
+ end
end
end
end
context 'request is not a health check endpoint' do
- it 'does not increment health endpoint counter' do
- env['PATH_INFO'] = '/-/ordinary-requests'
-
- expect(described_class).not_to receive(:http_health_requests_total)
-
- subject.call(env)
- end
-
- context 'path info is a root path' do
- before do
- env['PATH_INFO'] = '/-/'
- end
-
- it 'does not increment health endpoint counter' do
- expect(described_class).not_to receive(:http_health_requests_total)
-
- subject.call(env)
- end
- end
-
- context 'path info is a subpath' do
- before do
- env['PATH_INFO'] = '/-/health/subpath'
- end
-
- it 'does not increment health endpoint counter' do
- expect(described_class).not_to receive(:http_health_requests_total)
-
- subject.call(env)
+ ['/-/ordinary-requests', '/-/', '/-/health/subpath'].each do |path|
+ context "when path is #{path}" do
+ before do
+ env['PATH_INFO'] = path
+ end
+
+ it 'increments overall counter rather than health endpoint counter' do
+ expect(described_class).to receive_message_chain(:http_request_total, :increment).with(method: 'get')
+ expect(described_class).not_to receive(:http_health_requests_total)
+
+ subject.call(env)
+ end
+
+ it 'records the request duration' do
+ expect(described_class)
+ .to receive_message_chain(:http_request_duration_seconds, :observe)
+ .with({ method: 'get', status: '200' }, a_positive_execution_time)
+
+ subject.call(env)
+ end
end
end
end
diff --git a/spec/lib/gitlab/middleware/rails_queue_duration_spec.rb b/spec/lib/gitlab/middleware/rails_queue_duration_spec.rb
index cdb48024531..a9dae72f4db 100644
--- a/spec/lib/gitlab/middleware/rails_queue_duration_spec.rb
+++ b/spec/lib/gitlab/middleware/rails_queue_duration_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Gitlab::Middleware::RailsQueueDuration do
expect(transaction).to receive(:observe).with(:gitlab_rails_queue_duration_seconds, 1)
- Timecop.freeze(Time.at(3)) do
+ travel_to(Time.at(3)) do
expect(middleware.call(env)).to eq('yay')
end
end
diff --git a/spec/lib/gitlab/pagination/offset_pagination_spec.rb b/spec/lib/gitlab/pagination/offset_pagination_spec.rb
index be20f0194f7..c9a23170137 100644
--- a/spec/lib/gitlab/pagination/offset_pagination_spec.rb
+++ b/spec/lib/gitlab/pagination/offset_pagination_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::Pagination::OffsetPagination do
let(:request_context) { double("request_context") }
- subject do
+ subject(:paginator) do
described_class.new(request_context)
end
@@ -119,6 +119,34 @@ RSpec.describe Gitlab::Pagination::OffsetPagination do
subject.paginate(resource)
end
end
+
+ it 'does not return the total headers when excluding them' do
+ expect_no_header('X-Total')
+ expect_no_header('X-Total-Pages')
+ expect_header('X-Per-Page', '2')
+ expect_header('X-Page', '1')
+
+ paginator.paginate(resource, exclude_total_headers: true)
+ end
+ end
+
+ context 'when resource is a paginatable array' do
+ let(:resource) { Kaminari.paginate_array(Project.all.to_a) }
+
+ it_behaves_like 'response with pagination headers'
+
+ it 'only returns the requested resources' do
+ expect(paginator.paginate(resource).count).to eq(2)
+ end
+
+ it 'does not return total headers when excluding them' do
+ expect_no_header('X-Total')
+ expect_no_header('X-Total-Pages')
+ expect_header('X-Per-Page', '2')
+ expect_header('X-Page', '1')
+
+ paginator.paginate(resource, exclude_total_headers: true)
+ end
end
end
diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb
index fe0735b8043..a76ad1f6f4c 100644
--- a/spec/lib/gitlab/project_search_results_spec.rb
+++ b/spec/lib/gitlab/project_search_results_spec.rb
@@ -265,9 +265,15 @@ RSpec.describe Gitlab::ProjectSearchResults do
let_it_be(:project) { create(:project, :public) }
let_it_be(:closed_result) { create(:issue, :closed, project: project, title: 'foo closed') }
let_it_be(:opened_result) { create(:issue, :opened, project: project, title: 'foo opened') }
+ let_it_be(:confidential_result) { create(:issue, :confidential, project: project, title: 'foo confidential') }
let(:query) { 'foo' }
+ before do
+ project.add_developer(user)
+ end
+
include_examples 'search results filtered by state'
+ include_examples 'search results filtered by confidential'
end
end
diff --git a/spec/lib/gitlab/project_template_spec.rb b/spec/lib/gitlab/project_template_spec.rb
index fa45c605b1b..98bd2efdbc6 100644
--- a/spec/lib/gitlab/project_template_spec.rb
+++ b/spec/lib/gitlab/project_template_spec.rb
@@ -8,9 +8,9 @@ RSpec.describe Gitlab::ProjectTemplate do
expected = %w[
rails spring express iosswift dotnetcore android
gomicro gatsby hugo jekyll plainhtml gitbook
- hexo sse_middleman nfhugo nfjekyll nfplainhtml
- nfgitbook nfhexo salesforcedx serverless_framework
- jsonnet cluster_management
+ hexo sse_middleman gitpod_spring_petclinic nfhugo
+ nfjekyll nfplainhtml nfgitbook nfhexo salesforcedx
+ serverless_framework jsonnet cluster_management
]
expect(described_class.all).to be_an(Array)
diff --git a/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb b/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb
index 8abc944eeb1..b2350eff9f9 100644
--- a/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb
+++ b/spec/lib/gitlab/prometheus/queries/additional_metrics_deployment_query_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Prometheus::Queries::AdditionalMetricsDeploymentQuery do
around do |example|
- Timecop.freeze(Time.local(2008, 9, 1, 12, 0, 0)) { example.run }
+ travel_to(Time.local(2008, 9, 1, 12, 0, 0)) { example.run }
end
include_examples 'additional metrics query' do
diff --git a/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb b/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb
index 4683c4eae28..66b93d0dd72 100644
--- a/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb
+++ b/spec/lib/gitlab/prometheus/queries/deployment_query_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::Prometheus::Queries::DeploymentQuery do
around do |example|
time_without_subsecond_values = Time.local(2008, 9, 1, 12, 0, 0)
- Timecop.freeze(time_without_subsecond_values) { example.run }
+ travel_to(time_without_subsecond_values) { example.run }
end
it 'sends appropriate queries to prometheus' do
diff --git a/spec/lib/gitlab/prometheus/query_variables_spec.rb b/spec/lib/gitlab/prometheus/query_variables_spec.rb
index 1422d48152a..1dbdb892a5d 100644
--- a/spec/lib/gitlab/prometheus/query_variables_spec.rb
+++ b/spec/lib/gitlab/prometheus/query_variables_spec.rb
@@ -4,12 +4,12 @@ require 'spec_helper'
RSpec.describe Gitlab::Prometheus::QueryVariables do
describe '.call' do
+ let_it_be_with_refind(:environment) { create(:environment) }
let(:project) { environment.project }
- let(:environment) { create(:environment) }
let(:slug) { environment.slug }
let(:params) { {} }
- subject { described_class.call(environment, params) }
+ subject { described_class.call(environment, **params) }
it { is_expected.to include(ci_environment_slug: slug) }
it { is_expected.to include(ci_project_name: project.name) }
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index 88c3315150b..704a4e7b224 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -99,6 +99,36 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('foo-') }
end
+ describe '.build_trace_section_regex' do
+ subject { described_class.build_trace_section_regex }
+
+ context 'without options' do
+ example = "section_start:1600445393032:NAME\r\033\[0K"
+
+ it { is_expected.to match(example) }
+ it { is_expected.to match("section_end:12345678:aBcDeFg1234\r\033\[0K") }
+ it { is_expected.to match("section_start:0:sect_for_alpha-v1.0\r\033\[0K") }
+ it { is_expected.not_to match("section_start:section:0\r\033\[0K") }
+ it { is_expected.not_to match("section_:1600445393032:NAME\r\033\[0K") }
+ it { is_expected.not_to match(example.upcase) }
+ end
+
+ context 'with options' do
+ it { is_expected.to match("section_start:1600445393032:NAME[collapsed=true]\r\033\[0K") }
+ it { is_expected.to match("section_start:1600445393032:NAME[collapsed=true, example_option=false]\r\033\[0K") }
+ it { is_expected.to match("section_start:1600445393032:NAME[collapsed=true,example_option=false]\r\033\[0K") }
+ it { is_expected.to match("section_start:1600445393032:NAME[numeric_option=1234567]\r\033\[0K") }
+ # Without splitting the regex in one for start and one for end,
+ # this is possible, however, it is ignored for section_end.
+ it { is_expected.to match("section_end:1600445393032:NAME[collapsed=true]\r\033\[0K") }
+ it { is_expected.not_to match("section_start:1600445393032:NAME[collapsed=[]]]\r\033\[0K") }
+ it { is_expected.not_to match("section_start:1600445393032:NAME[collapsed = true]\r\033\[0K") }
+ it { is_expected.not_to match("section_start:1600445393032:NAME[collapsed = true, example_option=false]\r\033\[0K") }
+ it { is_expected.not_to match("section_start:1600445393032:NAME[collapsed=true, example_option=false]\r\033\[0K") }
+ it { is_expected.not_to match("section_start:1600445393032:NAME[]\r\033\[0K") }
+ end
+ end
+
describe '.container_repository_name_regex' do
subject { described_class.container_repository_name_regex }
@@ -384,6 +414,140 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('%2e%2e%2f1.2.3') }
end
+ describe '.debian_package_name_regex' do
+ subject { described_class.debian_package_name_regex }
+
+ it { is_expected.to match('0ad') }
+ it { is_expected.to match('g++') }
+ it { is_expected.to match('lua5.1') }
+ it { is_expected.to match('samba') }
+
+ # may not be empty string
+ it { is_expected.not_to match('') }
+ # must start with an alphanumeric character
+ it { is_expected.not_to match('-a') }
+ it { is_expected.not_to match('+a') }
+ it { is_expected.not_to match('.a') }
+ it { is_expected.not_to match('_a') }
+ # only letters, digits and characters '-+._'
+ it { is_expected.not_to match('a~') }
+ it { is_expected.not_to match('aé') }
+
+ # More strict Lintian regex
+ # at least 2 chars
+ it { is_expected.not_to match('a') }
+ # lowercase only
+ it { is_expected.not_to match('Aa') }
+ it { is_expected.not_to match('aA') }
+ # No underscore
+ it { is_expected.not_to match('a_b') }
+ end
+
+ describe '.debian_version_regex' do
+ subject { described_class.debian_version_regex }
+
+ context 'valid versions' do
+ it { is_expected.to match('1.0') }
+ it { is_expected.to match('1.0~alpha1') }
+ it { is_expected.to match('2:4.9.5+dfsg-5+deb10u1') }
+ end
+
+ context 'dpkg errors' do
+ # version string is empty
+ it { is_expected.not_to match('') }
+ # version string has embedded spaces
+ it { is_expected.not_to match('1 0') }
+ # epoch in version is empty
+ it { is_expected.not_to match(':1.0') }
+ # epoch in version is not number
+ it { is_expected.not_to match('a:1.0') }
+ # epoch in version is negative
+ it { is_expected.not_to match('-1:1.0') }
+ # epoch in version is too big
+ it { is_expected.not_to match('9999999999:1.0') }
+ # nothing after colon in version number
+ it { is_expected.not_to match('2:') }
+ # revision number is empty
+ # Note: we are less strict here
+ # it { is_expected.not_to match('1.0-') }
+ # version number is empty
+ it { is_expected.not_to match('-1') }
+ it { is_expected.not_to match('2:-1') }
+ end
+
+ context 'dpkg warnings' do
+ # version number does not start with digit
+ it { is_expected.not_to match('a') }
+ it { is_expected.not_to match('a1.0') }
+ # invalid character in version number
+ it { is_expected.not_to match('1_0') }
+ # invalid character in revision number
+ it { is_expected.not_to match('1.0-1_0') }
+ end
+
+ context 'dpkg accepts' do
+ # dpkg accepts leading or trailing space
+ it { is_expected.not_to match(' 1.0') }
+ it { is_expected.not_to match('1.0 ') }
+ # dpkg accepts multiple colons
+ it { is_expected.not_to match('1:2:3') }
+ end
+ end
+
+ describe '.debian_architecture_regex' do
+ subject { described_class.debian_architecture_regex }
+
+ it { is_expected.to match('amd64') }
+ it { is_expected.to match('kfreebsd-i386') }
+
+ # may not be empty string
+ it { is_expected.not_to match('') }
+ # must start with an alphanumeric
+ it { is_expected.not_to match('-a') }
+ it { is_expected.not_to match('+a') }
+ it { is_expected.not_to match('.a') }
+ it { is_expected.not_to match('_a') }
+ # only letters, digits and characters '-'
+ it { is_expected.not_to match('a+b') }
+ it { is_expected.not_to match('a.b') }
+ it { is_expected.not_to match('a_b') }
+ it { is_expected.not_to match('a~') }
+ it { is_expected.not_to match('aé') }
+
+ # More strict
+ # Enforce lowercase
+ it { is_expected.not_to match('AMD64') }
+ it { is_expected.not_to match('Amd64') }
+ it { is_expected.not_to match('aMD64') }
+ end
+
+ describe '.debian_distribution_regex' do
+ subject { described_class.debian_distribution_regex }
+
+ it { is_expected.to match('buster') }
+ it { is_expected.to match('buster-updates') }
+ it { is_expected.to match('Debian10.5') }
+
+ # Do not allow slash, even if this exists in the wild
+ it { is_expected.not_to match('jessie/updates') }
+
+ # Do not allow Unicode
+ it { is_expected.not_to match('hé') }
+ end
+
+ describe '.debian_component_regex' do
+ subject { described_class.debian_component_regex }
+
+ it { is_expected.to match('main') }
+ it { is_expected.to match('non-free') }
+
+ # Do not allow slash
+ it { is_expected.not_to match('non/free') }
+
+ # Do not allow Unicode
+ it { is_expected.not_to match('hé') }
+ end
+
describe '.semver_regex' do
subject { described_class.semver_regex }
@@ -434,4 +598,45 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('%2e%2e%2f1.2.3') }
it { is_expected.not_to match('') }
end
+
+ describe '.generic_package_name_regex' do
+ subject { described_class.generic_package_name_regex }
+
+ it { is_expected.to match('123') }
+ it { is_expected.to match('foo') }
+ it { is_expected.to match('foo.bar.baz-2.0-20190901.47283-1') }
+ it { is_expected.not_to match('../../foo') }
+ it { is_expected.not_to match('..\..\foo') }
+ it { is_expected.not_to match('%2f%2e%2e%2f%2essh%2fauthorized_keys') }
+ it { is_expected.not_to match('$foo/bar') }
+ it { is_expected.not_to match('my file name') }
+ it { is_expected.not_to match('!!()()') }
+ end
+
+ describe '.generic_package_file_name_regex' do
+ subject { described_class.generic_package_file_name_regex }
+
+ it { is_expected.to match('123') }
+ it { is_expected.to match('foo') }
+ it { is_expected.to match('foo.bar.baz-2.0-20190901.47283-1.jar') }
+ it { is_expected.not_to match('../../foo') }
+ it { is_expected.not_to match('..\..\foo') }
+ it { is_expected.not_to match('%2f%2e%2e%2f%2essh%2fauthorized_keys') }
+ it { is_expected.not_to match('$foo/bar') }
+ it { is_expected.not_to match('my file name') }
+ it { is_expected.not_to match('!!()()') }
+ end
+
+ describe '.prefixed_semver_regex' do
+ subject { described_class.prefixed_semver_regex }
+
+ it { is_expected.to match('v1.2.3') }
+ it { is_expected.to match('v1.2.3-beta') }
+ it { is_expected.to match('v1.2.3-alpha.3') }
+ it { is_expected.not_to match('v1') }
+ it { is_expected.not_to match('v1.2') }
+ it { is_expected.not_to match('v1./2.3') }
+ it { is_expected.not_to match('v../../../../../1.2.3') }
+ it { is_expected.not_to match('v%2e%2e%2f1.2.3') }
+ end
end
diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb
index b4cf6a568b4..cdb626778a8 100644
--- a/spec/lib/gitlab/search_results_spec.rb
+++ b/spec/lib/gitlab/search_results_spec.rb
@@ -11,9 +11,11 @@ RSpec.describe Gitlab::SearchResults do
let_it_be(:issue) { create(:issue, project: project, title: 'foo') }
let_it_be(:milestone) { create(:milestone, project: project, title: 'foo') }
let(:merge_request) { create(:merge_request, source_project: project, title: 'foo') }
+ let(:query) { 'foo' }
let(:filters) { {} }
+ let(:sort) { nil }
- subject(:results) { described_class.new(user, 'foo', Project.order(:id), filters: filters) }
+ subject(:results) { described_class.new(user, query, Project.order(:id), sort: sort, filters: filters) }
context 'as a user with access' do
before do
@@ -137,10 +139,12 @@ RSpec.describe Gitlab::SearchResults do
end
describe '#merge_requests' do
+ let(:scope) { 'merge_requests' }
+
it 'includes project filter by default' do
expect(results).to receive(:project_ids_relation).and_call_original
- results.objects('merge_requests')
+ results.objects(scope)
end
it 'skips project filter if default project context is used' do
@@ -148,24 +152,34 @@ RSpec.describe Gitlab::SearchResults do
expect(results).not_to receive(:project_ids_relation)
- results.objects('merge_requests')
+ results.objects(scope)
end
context 'filtering' do
let!(:opened_result) { create(:merge_request, :opened, source_project: project, title: 'foo opened') }
let!(:closed_result) { create(:merge_request, :closed, source_project: project, title: 'foo closed') }
- let(:scope) { 'merge_requests' }
let(:query) { 'foo' }
include_examples 'search results filtered by state'
end
+
+ context 'ordering' do
+ let(:query) { 'sorted' }
+ let!(:old_result) { create(:merge_request, :opened, source_project: project, source_branch: 'old-1', title: 'sorted old', created_at: 1.month.ago) }
+ let!(:new_result) { create(:merge_request, :opened, source_project: project, source_branch: 'new-1', title: 'sorted recent', created_at: 1.day.ago) }
+ let!(:very_old_result) { create(:merge_request, :opened, source_project: project, source_branch: 'very-old-1', title: 'sorted very old', created_at: 1.year.ago) }
+
+ include_examples 'search results sorted'
+ end
end
describe '#issues' do
+ let(:scope) { 'issues' }
+
it 'includes project filter by default' do
expect(results).to receive(:project_ids_relation).and_call_original
- results.objects('issues')
+ results.objects(scope)
end
it 'skips project filter if default project context is used' do
@@ -173,16 +187,25 @@ RSpec.describe Gitlab::SearchResults do
expect(results).not_to receive(:project_ids_relation)
- results.objects('issues')
+ results.objects(scope)
end
context 'filtering' do
- let(:scope) { 'issues' }
-
let_it_be(:closed_result) { create(:issue, :closed, project: project, title: 'foo closed') }
let_it_be(:opened_result) { create(:issue, :opened, project: project, title: 'foo open') }
+ let_it_be(:confidential_result) { create(:issue, :confidential, project: project, title: 'foo confidential') }
include_examples 'search results filtered by state'
+ include_examples 'search results filtered by confidential'
+ end
+
+ context 'ordering' do
+ let(:query) { 'sorted' }
+ let!(:old_result) { create(:issue, project: project, title: 'sorted old', created_at: 1.month.ago) }
+ let!(:new_result) { create(:issue, project: project, title: 'sorted recent', created_at: 1.day.ago) }
+ let!(:very_old_result) { create(:issue, project: project, title: 'sorted very old', created_at: 1.year.ago) }
+
+ include_examples 'search results sorted'
end
end
diff --git a/spec/lib/gitlab/sql/pattern_spec.rb b/spec/lib/gitlab/sql/pattern_spec.rb
index 220ac2ff6da..9bf6f0b82bc 100644
--- a/spec/lib/gitlab/sql/pattern_spec.rb
+++ b/spec/lib/gitlab/sql/pattern_spec.rb
@@ -3,6 +3,43 @@
require 'spec_helper'
RSpec.describe Gitlab::SQL::Pattern do
+ using RSpec::Parameterized::TableSyntax
+
+ describe '.fuzzy_search' do
+ let_it_be(:issue1) { create(:issue, title: 'noise foo noise', description: 'noise bar noise') }
+ let_it_be(:issue2) { create(:issue, title: 'noise baz noise', description: 'noise foo noise') }
+ let_it_be(:issue3) { create(:issue, title: 'Oh', description: 'Ah') }
+
+ subject(:fuzzy_search) { Issue.fuzzy_search(query, columns) }
+
+ where(:query, :columns, :expected) do
+ 'foo' | [Issue.arel_table[:title]] | %i[issue1]
+
+ 'foo' | %i[title] | %i[issue1]
+ 'foo' | %w[title] | %i[issue1]
+ 'foo' | %i[description] | %i[issue2]
+ 'foo' | %i[title description] | %i[issue1 issue2]
+ 'bar' | %i[title description] | %i[issue1]
+ 'baz' | %i[title description] | %i[issue2]
+ 'qux' | %i[title description] | []
+
+ 'oh' | %i[title description] | %i[issue3]
+ 'OH' | %i[title description] | %i[issue3]
+ 'ah' | %i[title description] | %i[issue3]
+ 'AH' | %i[title description] | %i[issue3]
+ 'oh' | %i[title] | %i[issue3]
+ 'ah' | %i[description] | %i[issue3]
+ end
+
+ with_them do
+ let(:expected_issues) { expected.map { |sym| send(sym) } }
+
+ it 'finds the expected issues' do
+ expect(fuzzy_search).to match_array(expected_issues)
+ end
+ end
+ end
+
describe '.to_pattern' do
subject(:to_pattern) { User.to_pattern(query) }
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config/entry/global_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config/entry/global_spec.rb
new file mode 100644
index 00000000000..9ce6007165b
--- /dev/null
+++ b/spec/lib/gitlab/static_site_editor/config/file_config/entry/global_spec.rb
@@ -0,0 +1,245 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig::Entry::Global do
+ let(:global) { described_class.new(hash) }
+ let(:default_image_upload_path_value) { 'source/images' }
+
+ let(:default_mounts_value) do
+ [
+ {
+ source: 'source',
+ target: ''
+ }
+ ]
+ end
+
+ let(:default_static_site_generator_value) { 'middleman' }
+
+ shared_examples_for 'valid default configuration' do
+ describe '#compose!' do
+ before do
+ global.compose!
+ end
+
+ it 'creates nodes hash' do
+ expect(global.descendants).to be_an Array
+ end
+
+ it 'creates node object for each entry' do
+ expect(global.descendants.count).to eq 3
+ end
+
+ it 'creates node object using valid class' do
+ expect(global.descendants.map(&:class)).to match_array(expected_node_object_classes)
+ end
+
+ it 'sets a description containing "Static Site Editor" for all nodes' do
+ expect(global.descendants.map(&:description)).to all(match(/Static Site Editor/))
+ end
+
+ describe '#leaf?' do
+ it 'is not leaf' do
+ expect(global).not_to be_leaf
+ end
+ end
+ end
+
+ context 'when not composed' do
+ describe '#static_site_generator_value' do
+ it 'returns nil' do
+ expect(global.static_site_generator_value).to be nil
+ end
+ end
+
+ describe '#leaf?' do
+ it 'is leaf' do
+ expect(global).to be_leaf
+ end
+ end
+ end
+
+ context 'when composed' do
+ before do
+ global.compose!
+ end
+
+ describe '#errors' do
+ it 'has no errors' do
+ expect(global.errors).to be_empty
+ end
+ end
+
+ describe '#image_upload_path_value' do
+ it 'returns correct values' do
+ expect(global.image_upload_path_value).to eq(default_image_upload_path_value)
+ end
+ end
+
+ describe '#mounts_value' do
+ it 'returns correct values' do
+ expect(global.mounts_value).to eq(default_mounts_value)
+ end
+ end
+
+ describe '#static_site_generator_value' do
+ it 'returns correct values' do
+ expect(global.static_site_generator_value).to eq(default_static_site_generator_value)
+ end
+ end
+ end
+ end
+
+ describe '.nodes' do
+ it 'returns a hash' do
+ expect(described_class.nodes).to be_a(Hash)
+ end
+
+ context 'when filtering all the entry/node names' do
+ it 'contains the expected node names' do
+ expected_node_names = %i[
+ image_upload_path
+ mounts
+ static_site_generator
+ ]
+ expect(described_class.nodes.keys).to match_array(expected_node_names)
+ end
+ end
+ end
+
+ context 'when configuration is valid' do
+ context 'when some entries defined' do
+ let(:expected_node_object_classes) do
+ [
+ Gitlab::StaticSiteEditor::Config::FileConfig::Entry::ImageUploadPath,
+ Gitlab::StaticSiteEditor::Config::FileConfig::Entry::Mounts,
+ Gitlab::StaticSiteEditor::Config::FileConfig::Entry::StaticSiteGenerator
+ ]
+ end
+
+ let(:hash) do
+ {
+ image_upload_path: default_image_upload_path_value,
+ mounts: default_mounts_value,
+ static_site_generator: default_static_site_generator_value
+ }
+ end
+
+ it_behaves_like 'valid default configuration'
+ end
+ end
+
+ context 'when value is an empty hash' do
+ let(:expected_node_object_classes) do
+ [
+ Gitlab::Config::Entry::Unspecified,
+ Gitlab::Config::Entry::Unspecified,
+ Gitlab::Config::Entry::Unspecified
+ ]
+ end
+
+ let(:hash) { {} }
+
+ it_behaves_like 'valid default configuration'
+ end
+
+ context 'when configuration is not valid' do
+ before do
+ global.compose!
+ end
+
+ context 'when a single entry is invalid' do
+ let(:hash) do
+ { image_upload_path: { not_a_string: true } }
+ end
+
+ describe '#errors' do
+ it 'reports errors' do
+ expect(global.errors)
+ .to include 'image_upload_path config should be a string'
+ end
+ end
+ end
+
+ context 'when a multiple entries are invalid' do
+ let(:hash) do
+ {
+ image_upload_path: { not_a_string: true },
+ static_site_generator: { not_a_string: true }
+ }
+ end
+
+ describe '#errors' do
+ it 'reports errors' do
+ expect(global.errors)
+ .to match_array([
+ 'image_upload_path config should be a string',
+ 'static_site_generator config should be a string',
+ "static_site_generator config should be 'middleman'"
+ ])
+ end
+ end
+ end
+
+ context 'when there is an invalid key' do
+ let(:hash) do
+ { invalid_key: true }
+ end
+
+ describe '#errors' do
+ it 'reports errors' do
+ expect(global.errors)
+ .to include 'global config contains unknown keys: invalid_key'
+ end
+ end
+ end
+ end
+
+ context 'when value is not a hash' do
+ let(:hash) { [] }
+
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(global).not_to be_valid
+ end
+ end
+
+ describe '#errors' do
+ it 'returns error about invalid type' do
+ expect(global.errors.first).to match /should be a hash/
+ end
+ end
+ end
+
+ describe '#specified?' do
+ it 'is concrete entry that is defined' do
+ expect(global.specified?).to be true
+ end
+ end
+
+ describe '#[]' do
+ before do
+ global.compose!
+ end
+
+ let(:hash) do
+ { static_site_generator: default_static_site_generator_value }
+ end
+
+ context 'when entry exists' do
+ it 'returns correct entry' do
+ expect(global[:static_site_generator])
+ .to be_an_instance_of Gitlab::StaticSiteEditor::Config::FileConfig::Entry::StaticSiteGenerator
+ expect(global[:static_site_generator].value).to eq default_static_site_generator_value
+ end
+ end
+
+ context 'when entry does not exist' do
+ it 'always return unspecified node' do
+ expect(global[:some][:unknown][:node])
+ .not_to be_specified
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config/entry/image_upload_path_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config/entry/image_upload_path_spec.rb
new file mode 100644
index 00000000000..c2b7fbf6f98
--- /dev/null
+++ b/spec/lib/gitlab/static_site_editor/config/file_config/entry/image_upload_path_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig::Entry::ImageUploadPath do
+ subject(:image_upload_path_entry) { described_class.new(config) }
+
+ describe 'validations' do
+ context 'with a valid config' do
+ let(:config) { 'an-image-upload-path' }
+
+ it { is_expected.to be_valid }
+
+ describe '#value' do
+ it 'returns a image_upload_path key' do
+ expect(image_upload_path_entry.value).to eq config
+ end
+ end
+ end
+
+ context 'with an invalid config' do
+ let(:config) { { not_a_string: true } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'reports errors about wrong type' do
+ expect(image_upload_path_entry.errors)
+ .to include 'image upload path config should be a string'
+ end
+ end
+ end
+
+ describe '.default' do
+ it 'returns default image_upload_path' do
+ expect(described_class.default).to eq 'source/images'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config/entry/mount_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config/entry/mount_spec.rb
new file mode 100644
index 00000000000..04248fc60a5
--- /dev/null
+++ b/spec/lib/gitlab/static_site_editor/config/file_config/entry/mount_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig::Entry::Mount do
+ subject(:entry) { described_class.new(config) }
+
+ describe 'validations' do
+ context 'with a valid config' do
+ context 'and target is a non-empty string' do
+ let(:config) do
+ {
+ source: 'source',
+ target: 'sub-site'
+ }
+ end
+
+ it { is_expected.to be_valid }
+
+ describe '#value' do
+ it 'returns mount configuration' do
+ expect(entry.value).to eq config
+ end
+ end
+ end
+
+ context 'and target is an empty string' do
+ let(:config) do
+ {
+ source: 'source',
+ target: ''
+ }
+ end
+
+ it { is_expected.to be_valid }
+
+ describe '#value' do
+ it 'returns mount configuration' do
+ expect(entry.value).to eq config
+ end
+ end
+ end
+ end
+
+ context 'with an invalid config' do
+ context 'when source is not a string' do
+ let(:config) { { source: 123, target: 'target' } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include 'mount source should be a string'
+ end
+ end
+
+ context 'when source is not present' do
+ let(:config) { { target: 'target' } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include "mount source can't be blank"
+ end
+ end
+
+ context 'when target is not a string' do
+ let(:config) { { source: 'source', target: 123 } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include 'mount target should be a string'
+ end
+ end
+
+ context 'when there is an unknown key present' do
+ let(:config) { { test: 100 } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'reports error' do
+ expect(entry.errors)
+ .to include 'mount config contains unknown keys: test'
+ end
+ end
+ end
+ end
+
+ describe '.default' do
+ it 'returns default mount' do
+ expect(described_class.default)
+ .to eq({
+ source: 'source',
+ target: ''
+ })
+ end
+ end
+end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config/entry/mounts_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config/entry/mounts_spec.rb
new file mode 100644
index 00000000000..0ae2ece9474
--- /dev/null
+++ b/spec/lib/gitlab/static_site_editor/config/file_config/entry/mounts_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig::Entry::Mounts do
+ subject(:entry) { described_class.new(config) }
+
+ describe 'validations' do
+ context 'with a valid config' do
+ let(:config) do
+ [
+ {
+ source: 'source',
+ target: ''
+ },
+ {
+ source: 'sub-site/source',
+ target: 'sub-site'
+ }
+ ]
+ end
+
+ it { is_expected.to be_valid }
+
+ describe '#value' do
+ it 'returns mounts configuration' do
+ expect(entry.value).to eq config
+ end
+ end
+ end
+
+ context 'with an invalid config' do
+ let(:config) { { not_an_array: true } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'reports errors about wrong type' do
+ expect(entry.errors)
+ .to include 'mounts config should be a array'
+ end
+ end
+ end
+
+ describe '.default' do
+ it 'returns default mounts' do
+ expect(described_class.default)
+ .to eq([{
+ source: 'source',
+ target: ''
+ }])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config/entry/static_site_generator_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config/entry/static_site_generator_spec.rb
new file mode 100644
index 00000000000..a9c730218cf
--- /dev/null
+++ b/spec/lib/gitlab/static_site_editor/config/file_config/entry/static_site_generator_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig::Entry::StaticSiteGenerator do
+ let(:static_site_generator) { described_class.new(config) }
+
+ describe 'validations' do
+ context 'when value is valid' do
+ let(:config) { 'middleman' }
+
+ describe '#value' do
+ it 'returns a static_site_generator key' do
+ expect(static_site_generator.value).to eq config
+ end
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(static_site_generator).to be_valid
+ end
+ end
+ end
+
+ context 'when value is invalid' do
+ let(:config) { 'not-a-valid-generator' }
+
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(static_site_generator).not_to be_valid
+ end
+ end
+ end
+
+ context 'when value has a wrong type' do
+ let(:config) { { not_a_string: true } }
+
+ it 'reports errors about wrong type' do
+ expect(static_site_generator.errors)
+ .to include 'static site generator config should be a string'
+ end
+ end
+ end
+
+ describe '.default' do
+ it 'returns default static_site_generator' do
+ expect(described_class.default).to eq 'middleman'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/static_site_editor/config/file_config_spec.rb b/spec/lib/gitlab/static_site_editor/config/file_config_spec.rb
index 594425c2dab..d444d4f1df7 100644
--- a/spec/lib/gitlab/static_site_editor/config/file_config_spec.rb
+++ b/spec/lib/gitlab/static_site_editor/config/file_config_spec.rb
@@ -3,13 +3,85 @@
require 'spec_helper'
RSpec.describe Gitlab::StaticSiteEditor::Config::FileConfig do
- subject(:config) { described_class.new }
+ let(:config) do
+ described_class.new(yml)
+ end
+
+ context 'when config is valid' do
+ context 'when config has valid values' do
+ let(:yml) do
+ <<-EOS
+ static_site_generator: middleman
+ EOS
+ end
+
+ describe '#to_hash_with_defaults' do
+ it 'returns hash created from string' do
+ expect(config.to_hash_with_defaults.fetch(:static_site_generator)).to eq 'middleman'
+ end
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(config).to be_valid
+ end
+
+ it 'has no errors' do
+ expect(config.errors).to be_empty
+ end
+ end
+ end
+ end
+
+ context 'when a config entry has an empty value' do
+ let(:yml) { 'static_site_generator: ' }
+
+ describe '#to_hash' do
+ it 'returns default value' do
+ expect(config.to_hash_with_defaults.fetch(:static_site_generator)).to eq 'middleman'
+ end
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(config).to be_valid
+ end
+
+ it 'has no errors' do
+ expect(config.errors).to be_empty
+ end
+ end
+ end
+
+ context 'when config is invalid' do
+ context 'when yml is incorrect' do
+ let(:yml) { '// invalid' }
+
+ describe '.new' do
+ it 'raises error' do
+ expect { config }.to raise_error(described_class::ConfigError, /Invalid configuration format/)
+ end
+ end
+ end
+
+ context 'when config value exists but is not a valid value' do
+ let(:yml) { 'static_site_generator: "unsupported-generator"' }
+
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(config).not_to be_valid
+ end
- describe '#data' do
- subject { config.data }
+ it 'has errors' do
+ expect(config.errors).not_to be_empty
+ end
+ end
- it 'returns hardcoded data for now' do
- is_expected.to match(static_site_generator: 'middleman')
+ describe '#errors' do
+ it 'returns an array of strings' do
+ expect(config.errors).to all(be_an_instance_of(String))
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb b/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
index 3433a54be9c..2f761b69e60 100644
--- a/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
+++ b/spec/lib/gitlab/static_site_editor/config/generated_config_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Gitlab::StaticSiteEditor::Config::GeneratedConfig do
project: 'project',
project_id: project.id,
return_url: 'http://example.com',
- is_supported_content: 'true',
+ is_supported_content: true,
base_url: '/namespace/project/-/sse/master%2FREADME.md',
merge_requests_illustration_path: %r{illustrations/merge_requests}
})
@@ -65,7 +65,7 @@ RSpec.describe Gitlab::StaticSiteEditor::Config::GeneratedConfig do
stub_feature_flags(sse_erb_support: project)
end
- it { is_expected.to include(is_supported_content: 'true') }
+ it { is_expected.to include(is_supported_content: true) }
end
context 'when feature flag is disabled' do
@@ -75,7 +75,7 @@ RSpec.describe Gitlab::StaticSiteEditor::Config::GeneratedConfig do
stub_feature_flags(sse_erb_support: false)
end
- it { is_expected.to include(is_supported_content: 'false') }
+ it { is_expected.to include(is_supported_content: false) }
end
end
@@ -88,31 +88,31 @@ RSpec.describe Gitlab::StaticSiteEditor::Config::GeneratedConfig do
context 'when branch is not master' do
let(:ref) { 'my-branch' }
- it { is_expected.to include(is_supported_content: 'false') }
+ it { is_expected.to include(is_supported_content: false) }
end
context 'when file does not have a markdown extension' do
let(:path) { 'README.txt' }
- it { is_expected.to include(is_supported_content: 'false') }
+ it { is_expected.to include(is_supported_content: false) }
end
context 'when file does not have an extension' do
let(:path) { 'README' }
- it { is_expected.to include(is_supported_content: 'false') }
+ it { is_expected.to include(is_supported_content: false) }
end
context 'when file does not exist' do
let(:path) { 'UNKNOWN.md' }
- it { is_expected.to include(is_supported_content: 'false') }
+ it { is_expected.to include(is_supported_content: false) }
end
context 'when repository is empty' do
let(:repository) { create(:project_empty_repo).repository }
- it { is_expected.to include(is_supported_content: 'false') }
+ it { is_expected.to include(is_supported_content: false) }
end
context 'when return_url is not a valid URL' do
@@ -132,5 +132,11 @@ RSpec.describe Gitlab::StaticSiteEditor::Config::GeneratedConfig do
it { is_expected.to include(return_url: nil) }
end
+
+ context 'when a commit for the ref cannot be found' do
+ let(:ref) { 'nonexistent-ref' }
+
+ it { is_expected.to include(commit_id: nil) }
+ end
end
end
diff --git a/spec/lib/gitlab/themes_spec.rb b/spec/lib/gitlab/themes_spec.rb
index 68ff28becfa..6d03cf496b8 100644
--- a/spec/lib/gitlab/themes_spec.rb
+++ b/spec/lib/gitlab/themes_spec.rb
@@ -47,4 +47,18 @@ RSpec.describe Gitlab::Themes, lib: true do
expect(ids).not_to be_empty
end
end
+
+ describe 'theme.css_filename' do
+ described_class.each do |theme|
+ next unless theme.css_filename
+
+ context "for #{theme.name}" do
+ it 'returns an existing CSS filename' do
+ css_file_path = Rails.root.join('app/assets/stylesheets/themes', theme.css_filename + '.scss')
+
+ expect(File.exist?(css_file_path)).to eq(true)
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index f0bf7b9964f..6ddeaf98370 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe Gitlab::Tracking do
end
around do |example|
- Timecop.freeze(timestamp) { example.run }
+ travel_to(timestamp) { example.run }
end
before do
diff --git a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
index 2a674557b76..f2c1d8718d7 100644
--- a/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/editor_unique_counter_spec.rb
@@ -41,11 +41,11 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
context 'for web IDE edit actions' do
it_behaves_like 'tracks and counts action' do
def track_action(params)
- described_class.track_web_ide_edit_action(params)
+ described_class.track_web_ide_edit_action(**params)
end
def count_unique(params)
- described_class.count_web_ide_edit_actions(params)
+ described_class.count_web_ide_edit_actions(**params)
end
end
end
@@ -53,11 +53,11 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
context 'for SFE edit actions' do
it_behaves_like 'tracks and counts action' do
def track_action(params)
- described_class.track_sfe_edit_action(params)
+ described_class.track_sfe_edit_action(**params)
end
def count_unique(params)
- described_class.count_sfe_edit_actions(params)
+ described_class.count_sfe_edit_actions(**params)
end
end
end
@@ -65,11 +65,11 @@ RSpec.describe Gitlab::UsageDataCounters::EditorUniqueCounter, :clean_gitlab_red
context 'for snippet editor edit actions' do
it_behaves_like 'tracks and counts action' do
def track_action(params)
- described_class.track_snippet_editor_edit_action(params)
+ described_class.track_snippet_editor_edit_action(**params)
end
def count_unique(params)
- described_class.count_snippet_editor_edit_actions(params)
+ described_class.count_snippet_editor_edit_actions(**params)
end
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index f881da71251..3255e3616b2 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
# depending on which day of the week test is run.
# Monday 6th of June
reference_time = Time.utc(2020, 6, 1)
- Timecop.freeze(reference_time) { example.run }
+ travel_to(reference_time) { example.run }
end
describe '.categories' do
@@ -238,16 +238,20 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
it 'returns the number of unique events for all known events' do
results = {
- 'category1' => {
- 'event1_slot' => 1,
- 'event2_slot' => 1,
- 'category1_total_unique_counts_weekly' => 2,
- 'category1_total_unique_counts_monthly' => 3
- },
- 'category2' => {
- 'event3' => 1,
- 'event4' => 1
- }
+ "category1" => {
+ "event1_slot_weekly" => 1,
+ "event1_slot_monthly" => 1,
+ "event2_slot_weekly" => 1,
+ "event2_slot_monthly" => 2,
+ "category1_total_unique_counts_weekly" => 2,
+ "category1_total_unique_counts_monthly" => 3
+ },
+ "category2" => {
+ "event3_weekly" => 1,
+ "event3_monthly" => 1,
+ "event4_weekly" => 1,
+ "event4_monthly" => 1
+ }
}
expect(subject.unique_events_data).to eq(results)
diff --git a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
index 479fe36bcdd..4cc85c86de1 100644
--- a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
let(:action) { described_class::ISSUE_TITLE_CHANGED }
def track_action(params)
- described_class.track_issue_title_changed_action(params)
+ described_class.track_issue_title_changed_action(**params)
end
end
end
@@ -57,7 +57,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
let(:action) { described_class::ISSUE_DESCRIPTION_CHANGED }
def track_action(params)
- described_class.track_issue_description_changed_action(params)
+ described_class.track_issue_description_changed_action(**params)
end
end
end
@@ -67,7 +67,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
let(:action) { described_class::ISSUE_ASSIGNEE_CHANGED }
def track_action(params)
- described_class.track_issue_assignee_changed_action(params)
+ described_class.track_issue_assignee_changed_action(**params)
end
end
end
@@ -77,7 +77,7 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
let(:action) { described_class::ISSUE_MADE_CONFIDENTIAL }
def track_action(params)
- described_class.track_issue_made_confidential_action(params)
+ described_class.track_issue_made_confidential_action(**params)
end
end
end
@@ -87,7 +87,47 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
let(:action) { described_class::ISSUE_MADE_VISIBLE }
def track_action(params)
- described_class.track_issue_made_visible_action(params)
+ described_class.track_issue_made_visible_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue created actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_CREATED }
+
+ def track_action(params)
+ described_class.track_issue_created_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue closed actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_CLOSED }
+
+ def track_action(params)
+ described_class.track_issue_closed_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue reopened actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_REOPENED }
+
+ def track_action(params)
+ described_class.track_issue_reopened_action(**params)
+ end
+ end
+ end
+
+ context 'for Issue label changed actions' do
+ it_behaves_like 'tracks and counts action' do
+ let(:action) { described_class::ISSUE_LABEL_CHANGED }
+
+ def track_action(params)
+ described_class.track_issue_label_changed_action(**params)
end
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/track_unique_events_spec.rb b/spec/lib/gitlab/usage_data_counters/track_unique_events_spec.rb
index 8f5f1347ce8..d1144dd0bc5 100644
--- a/spec/lib/gitlab/usage_data_counters/track_unique_events_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/track_unique_events_spec.rb
@@ -8,11 +8,11 @@ RSpec.describe Gitlab::UsageDataCounters::TrackUniqueEvents, :clean_gitlab_redis
let(:time) { Time.zone.now }
def track_event(params)
- track_unique_events.track_event(params)
+ track_unique_events.track_event(**params)
end
def count_unique(params)
- track_unique_events.count_unique_events(params)
+ track_unique_events.count_unique_events(**params)
end
context 'tracking an event' do
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 6631a0d3cc6..984fd4c08e6 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -29,7 +29,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
user_minimum_id user_maximum_id unique_visit_service
deployment_minimum_id deployment_maximum_id
approval_merge_request_rule_minimum_id
- approval_merge_request_rule_maximum_id)
+ approval_merge_request_rule_maximum_id
+ auth_providers)
values.each do |key|
expect(described_class).to receive(:clear_memoization).with(key)
end
@@ -167,6 +168,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
describe 'usage_activity_by_stage_manage' do
it 'includes accurate usage_activity_by_stage data' do
+ described_class.clear_memoization(:auth_providers)
+
stub_config(
omniauth:
{ providers: omniauth_providers }
@@ -174,21 +177,29 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
for_defined_days_back do
user = create(:user)
+ user2 = create(:user)
create(:event, author: user)
create(:group_member, user: user)
+ create(:authentication_event, user: user, provider: :ldapmain, result: :success)
+ create(:authentication_event, user: user2, provider: :ldapsecondary, result: :success)
+ create(:authentication_event, user: user2, provider: :group_saml, result: :success)
+ create(:authentication_event, user: user2, provider: :group_saml, result: :success)
+ create(:authentication_event, user: user, provider: :group_saml, result: :failed)
end
expect(described_class.usage_activity_by_stage_manage({})).to include(
events: 2,
groups: 2,
- users_created: 4,
- omniauth_providers: ['google_oauth2']
+ users_created: 6,
+ omniauth_providers: ['google_oauth2'],
+ user_auth_by_provider: { 'group_saml' => 2, 'ldap' => 4 }
)
expect(described_class.usage_activity_by_stage_manage(described_class.last_28_days_time_period)).to include(
events: 1,
groups: 1,
- users_created: 2,
- omniauth_providers: ['google_oauth2']
+ users_created: 3,
+ omniauth_providers: ['google_oauth2'],
+ user_auth_by_provider: { 'group_saml' => 1, 'ldap' => 2 }
)
end
@@ -244,6 +255,20 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
)
end
+ it 'includes group imports usage data' do
+ for_defined_days_back do
+ user = create(:user)
+ group = create(:group)
+ group.add_owner(user)
+ create(:group_import_state, group: group, user: user)
+ end
+
+ expect(described_class.usage_activity_by_stage_manage({}))
+ .to include(groups_imported: 2)
+ expect(described_class.usage_activity_by_stage_manage(described_class.last_28_days_time_period))
+ .to include(groups_imported: 1)
+ end
+
def omniauth_providers
[
OpenStruct.new(name: 'google_oauth2'),
@@ -260,17 +285,20 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
cluster = create(:cluster, user: user)
create(:project, creator: user)
create(:clusters_applications_prometheus, :installed, cluster: cluster)
+ create(:project_tracing_setting)
end
expect(described_class.usage_activity_by_stage_monitor({})).to include(
clusters: 2,
clusters_applications_prometheus: 2,
- operations_dashboard_default_dashboard: 2
+ operations_dashboard_default_dashboard: 2,
+ projects_with_tracing_enabled: 2
)
expect(described_class.usage_activity_by_stage_monitor(described_class.last_28_days_time_period)).to include(
clusters: 1,
clusters_applications_prometheus: 1,
- operations_dashboard_default_dashboard: 1
+ operations_dashboard_default_dashboard: 1,
+ projects_with_tracing_enabled: 1
)
end
end
@@ -420,6 +448,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(count_data[:projects_inheriting_instance_mattermost_active]).to eq(1)
expect(count_data[:projects_with_repositories_enabled]).to eq(3)
expect(count_data[:projects_with_error_tracking_enabled]).to eq(1)
+ expect(count_data[:projects_with_tracing_enabled]).to eq(1)
expect(count_data[:projects_with_alerts_service_enabled]).to eq(1)
expect(count_data[:projects_with_prometheus_alerts]).to eq(2)
expect(count_data[:projects_with_terraform_reports]).to eq(2)
@@ -472,8 +501,10 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(count_data[:personal_snippets]).to eq(2)
expect(count_data[:project_snippets]).to eq(4)
+ expect(count_data[:projects_creating_incidents]).to eq(2)
expect(count_data[:projects_with_packages]).to eq(2)
expect(count_data[:packages]).to eq(4)
+ expect(count_data[:user_preferences_user_gitpod_enabled]).to eq(1)
end
it 'gathers object store usage correctly' do
@@ -628,6 +659,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(subject[:gitlab_shared_runners_enabled]).to eq(Gitlab.config.gitlab_ci.shared_runners_enabled)
expect(subject[:web_ide_clientside_preview_enabled]).to eq(Gitlab::CurrentSettings.web_ide_clientside_preview_enabled?)
expect(subject[:grafana_link_enabled]).to eq(Gitlab::CurrentSettings.grafana_enabled?)
+ expect(subject[:gitpod_enabled]).to eq(Gitlab::CurrentSettings.gitpod_enabled?)
end
context 'with embedded Prometheus' do
@@ -657,6 +689,20 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(subject[:grafana_link_enabled]).to eq(false)
end
end
+
+ context 'with Gitpod' do
+ it 'returns true when is enabled' do
+ stub_application_setting(gitpod_enabled: true)
+
+ expect(subject[:gitpod_enabled]).to eq(true)
+ end
+
+ it 'returns false when is disabled' do
+ stub_application_setting(gitpod_enabled: false)
+
+ expect(subject[:gitpod_enabled]).to eq(false)
+ end
+ end
end
describe '.components_usage_data' do
@@ -979,7 +1025,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
- def for_defined_days_back(days: [29, 2])
+ def for_defined_days_back(days: [31, 3])
days.each do |n|
Timecop.travel(n.days.ago) do
yield
@@ -1146,11 +1192,13 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
categories.each do |category|
keys = ::Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category(category)
+ metrics = keys.map { |key| "#{key}_weekly" } + keys.map { |key| "#{key}_monthly" }
+
if ineligible_total_categories.exclude?(category)
- keys.append("#{category}_total_unique_counts_weekly", "#{category}_total_unique_counts_monthly")
+ metrics.append("#{category}_total_unique_counts_weekly", "#{category}_total_unique_counts_monthly")
end
- expect(subject[:redis_hll_counters][category].keys).to match_array(keys)
+ expect(subject[:redis_hll_counters][category].keys).to match_array(metrics)
end
end
end
@@ -1169,6 +1217,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
describe '.snowplow_event_counts' do
+ let_it_be(:time_period) { { collector_tstamp: 8.days.ago..1.day.ago } }
+
context 'when self-monitoring project exists' do
let_it_be(:project) { create(:project) }
@@ -1181,14 +1231,14 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
stub_feature_flags(product_analytics: project)
create(:product_analytics_event, project: project, se_category: 'epics', se_action: 'promote')
- create(:product_analytics_event, project: project, se_category: 'epics', se_action: 'promote', collector_tstamp: 28.days.ago)
+ create(:product_analytics_event, project: project, se_category: 'epics', se_action: 'promote', collector_tstamp: 2.days.ago)
+ create(:product_analytics_event, project: project, se_category: 'epics', se_action: 'promote', collector_tstamp: 9.days.ago)
+
+ create(:product_analytics_event, project: project, se_category: 'foo', se_action: 'bar', collector_tstamp: 2.days.ago)
end
it 'returns promoted_issues for the time period' do
- expect(described_class.snowplow_event_counts[:promoted_issues]).to eq(2)
- expect(described_class.snowplow_event_counts(
- time_period: described_class.last_28_days_time_period(column: :collector_tstamp)
- )[:promoted_issues]).to eq(1)
+ expect(described_class.snowplow_event_counts(time_period)[:promoted_issues]).to eq(1)
end
end
@@ -1198,14 +1248,14 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
it 'returns an empty hash' do
- expect(described_class.snowplow_event_counts).to eq({})
+ expect(described_class.snowplow_event_counts(time_period)).to eq({})
end
end
end
context 'when self-monitoring project does not exist' do
it 'returns an empty hash' do
- expect(described_class.snowplow_event_counts).to eq({})
+ expect(described_class.snowplow_event_counts(time_period)).to eq({})
end
end
end
diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb
index 362cbaa78e9..9c0dc69ccd1 100644
--- a/spec/lib/gitlab/utils/usage_data_spec.rb
+++ b/spec/lib/gitlab/utils/usage_data_spec.rb
@@ -212,33 +212,26 @@ RSpec.describe Gitlab::Utils::UsageData do
describe '#track_usage_event' do
let(:value) { '9f302fea-f828-4ca9-aef4-e10bd723c0b3' }
- let(:event_name) { 'my_event' }
+ let(:event_name) { 'incident_management_alert_status_changed' }
let(:unknown_event) { 'unknown' }
let(:feature) { "usage_data_#{event_name}" }
+ before do
+ skip_feature_flags_yaml_validation
+ end
+
context 'with feature enabled' do
before do
stub_feature_flags(feature => true)
end
it 'tracks redis hll event' do
- stub_application_setting(usage_ping_enabled: true)
-
expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(value, event_name)
described_class.track_usage_event(event_name, value)
end
- it 'does not track event when usage ping is not enabled' do
- stub_application_setting(usage_ping_enabled: false)
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
-
- described_class.track_usage_event(event_name, value)
- end
-
it 'raise an error for unknown event' do
- stub_application_setting(usage_ping_enabled: true)
-
expect { described_class.track_usage_event(unknown_event, value) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
end
end
diff --git a/spec/lib/gitlab/webpack/manifest_spec.rb b/spec/lib/gitlab/webpack/manifest_spec.rb
new file mode 100644
index 00000000000..e3aeceda148
--- /dev/null
+++ b/spec/lib/gitlab/webpack/manifest_spec.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'json'
+
+RSpec.describe Gitlab::Webpack::Manifest do
+ let(:manifest) do
+ <<-EOF
+ {
+ "errors": [],
+ "assetsByChunkName": {
+ "entry1": [ "entry1.js", "entry1-a.js" ],
+ "entry2": "entry2.js"
+ }
+ }
+ EOF
+ end
+
+ around do |example|
+ Gitlab::Webpack::Manifest.clear_manifest!
+
+ example.run
+
+ Gitlab::Webpack::Manifest.clear_manifest!
+ end
+
+ shared_examples_for "a valid manifest" do
+ it "returns single entry asset paths from the manifest" do
+ expect(Gitlab::Webpack::Manifest.asset_paths("entry2")).to eq(["/public_path/entry2.js"])
+ end
+
+ it "returns multiple entry asset paths from the manifest" do
+ expect(Gitlab::Webpack::Manifest.asset_paths("entry1")).to eq(["/public_path/entry1.js", "/public_path/entry1-a.js"])
+ end
+
+ it "errors on a missing entry point" do
+ expect { Gitlab::Webpack::Manifest.asset_paths("herp") }.to raise_error(Gitlab::Webpack::Manifest::AssetMissingError)
+ end
+ end
+
+ before do
+ # Test that config variables work while we're here
+ ::Rails.configuration.webpack.dev_server.host = 'hostname'
+ ::Rails.configuration.webpack.dev_server.port = 1999
+ ::Rails.configuration.webpack.dev_server.manifest_host = 'hostname'
+ ::Rails.configuration.webpack.dev_server.manifest_port = 2000
+ ::Rails.configuration.webpack.manifest_filename = "my_manifest.json"
+ ::Rails.configuration.webpack.public_path = "public_path"
+ ::Rails.configuration.webpack.output_dir = "manifest_output"
+ end
+
+ context "with dev server enabled" do
+ before do
+ ::Rails.configuration.webpack.dev_server.enabled = true
+
+ stub_request(:get, "http://hostname:2000/public_path/my_manifest.json").to_return(body: manifest, status: 200)
+ end
+
+ describe ".asset_paths" do
+ it_behaves_like "a valid manifest"
+
+ it "errors if we can't find the manifest" do
+ ::Rails.configuration.webpack.manifest_filename = "broken.json"
+ stub_request(:get, "http://hostname:2000/public_path/broken.json").to_raise(SocketError)
+
+ expect { Gitlab::Webpack::Manifest.asset_paths("entry1") }.to raise_error(Gitlab::Webpack::Manifest::ManifestLoadError)
+ end
+
+ describe "webpack errors" do
+ context "when webpack has 'Module build failed' errors in its manifest" do
+ it "errors" do
+ error_manifest = Gitlab::Json.parse(manifest).merge("errors" => [
+ "somethingModule build failed something",
+ "I am an error"
+ ]).to_json
+ stub_request(:get, "http://hostname:2000/public_path/my_manifest.json").to_return(body: error_manifest, status: 200)
+
+ expect { Gitlab::Webpack::Manifest.asset_paths("entry1") }.to raise_error(Gitlab::Webpack::Manifest::WebpackError)
+ end
+ end
+
+ context "when webpack does not have 'Module build failed' errors in its manifest" do
+ it "does not error" do
+ error_manifest = Gitlab::Json.parse(manifest).merge("errors" => ["something went wrong"]).to_json
+ stub_request(:get, "http://hostname:2000/public_path/my_manifest.json").to_return(body: error_manifest, status: 200)
+
+ expect { Gitlab::Webpack::Manifest.asset_paths("entry1") }.not_to raise_error
+ end
+ end
+
+ it "does not error if errors is present but empty" do
+ error_manifest = Gitlab::Json.parse(manifest).merge("errors" => []).to_json
+ stub_request(:get, "http://hostname:2000/public_path/my_manifest.json").to_return(body: error_manifest, status: 200)
+ expect { Gitlab::Webpack::Manifest.asset_paths("entry1") }.not_to raise_error
+ end
+ end
+ end
+ end
+
+ context "with dev server disabled" do
+ before do
+ ::Rails.configuration.webpack.dev_server.enabled = false
+ allow(File).to receive(:read).with(::Rails.root.join("manifest_output/my_manifest.json")).and_return(manifest)
+ end
+
+ describe ".asset_paths" do
+ it_behaves_like "a valid manifest"
+
+ it "errors if we can't find the manifest" do
+ ::Rails.configuration.webpack.manifest_filename = "broken.json"
+ allow(File).to receive(:read).with(::Rails.root.join("manifest_output/broken.json")).and_raise(Errno::ENOENT)
+ expect { Gitlab::Webpack::Manifest.asset_paths("entry1") }.to raise_error(Gitlab::Webpack::Manifest::ManifestLoadError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab_danger_spec.rb b/spec/lib/gitlab_danger_spec.rb
index b534823a888..e332647cf8a 100644
--- a/spec/lib/gitlab_danger_spec.rb
+++ b/spec/lib/gitlab_danger_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe GitlabDanger do
describe '.local_warning_message' do
it 'returns an informational message with rules that can run' do
- expect(described_class.local_warning_message).to eq('==> Only the following Danger rules can be run locally: changes_size, documentation, frozen_string, duplicate_yarn_dependencies, prettier, eslint, karma, database, commit_messages, telemetry, utility_css, pajamas')
+ expect(described_class.local_warning_message).to eq('==> Only the following Danger rules can be run locally: changes_size, documentation, frozen_string, duplicate_yarn_dependencies, prettier, eslint, karma, database, commit_messages, product_analytics, utility_css, pajamas')
end
end
diff --git a/spec/lib/google_api/auth_spec.rb b/spec/lib/google_api/auth_spec.rb
index eeb99bfbb6c..92cb9e494ac 100644
--- a/spec/lib/google_api/auth_spec.rb
+++ b/spec/lib/google_api/auth_spec.rb
@@ -12,12 +12,12 @@ RSpec.describe GoogleApi::Auth do
end
describe '#authorize_url' do
- subject { client.authorize_url }
+ subject { Addressable::URI.parse(client.authorize_url) }
it 'returns authorize_url' do
- is_expected.to start_with('https://accounts.google.com/o/oauth2')
- is_expected.to include(URI.encode(redirect_uri, URI::PATTERN::RESERVED))
- is_expected.to include(URI.encode(redirect_to, URI::PATTERN::RESERVED))
+ expect(subject.to_s).to start_with('https://accounts.google.com/o/oauth2')
+ expect(subject.query_values['state']).to eq(redirect_to)
+ expect(subject.query_values['redirect_uri']).to eq(redirect_uri)
end
end
diff --git a/spec/lib/grafana/time_window_spec.rb b/spec/lib/grafana/time_window_spec.rb
index 9ee65c6cf20..0657bed7b28 100644
--- a/spec/lib/grafana/time_window_spec.rb
+++ b/spec/lib/grafana/time_window_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Grafana::TimeWindow do
let(:to) { '1552828200000' }
around do |example|
- Timecop.freeze(Time.utc(2019, 3, 17, 13, 10)) { example.run }
+ travel_to(Time.utc(2019, 3, 17, 13, 10)) { example.run }
end
describe '#formatted' do
@@ -37,7 +37,7 @@ RSpec.describe Grafana::RangeWithDefaults do
let(:to) { Grafana::Timestamp.from_ms_since_epoch('1552828200000') }
around do |example|
- Timecop.freeze(Time.utc(2019, 3, 17, 13, 10)) { example.run }
+ travel_to(Time.utc(2019, 3, 17, 13, 10)) { example.run }
end
describe '#to_hash' do
@@ -82,7 +82,7 @@ RSpec.describe Grafana::Timestamp do
let(:timestamp) { Time.at(1552799400) }
around do |example|
- Timecop.freeze(Time.utc(2019, 3, 17, 13, 10)) { example.run }
+ travel_to(Time.utc(2019, 3, 17, 13, 10)) { example.run }
end
describe '#formatted' do
diff --git a/spec/lib/pager_duty/webhook_payload_parser_spec.rb b/spec/lib/pager_duty/webhook_payload_parser_spec.rb
index 0010165318d..54c61b9121c 100644
--- a/spec/lib/pager_duty/webhook_payload_parser_spec.rb
+++ b/spec/lib/pager_duty/webhook_payload_parser_spec.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
require 'fast_spec_helper'
+require 'json_schemer'
RSpec.describe PagerDuty::WebhookPayloadParser do
describe '.call' do
@@ -8,36 +9,36 @@ RSpec.describe PagerDuty::WebhookPayloadParser do
File.read(File.join(File.dirname(__FILE__), '../../fixtures/pager_duty/webhook_incident_trigger.json'))
end
+ let(:triggered_event) do
+ {
+ 'event' => 'incident.trigger',
+ 'incident' => {
+ 'url' => 'https://webdemo.pagerduty.com/incidents/PRORDTY',
+ 'incident_number' => 33,
+ 'title' => 'My new incident',
+ 'status' => 'triggered',
+ 'created_at' => '2017-09-26T15:14:36Z',
+ 'urgency' => 'high',
+ 'incident_key' => nil,
+ 'assignees' => [{
+ 'summary' => 'Laura Haley',
+ 'url' => 'https://webdemo.pagerduty.com/users/P553OPV'
+ }],
+ 'impacted_services' => [{
+ 'summary' => 'Production XDB Cluster',
+ 'url' => 'https://webdemo.pagerduty.com/services/PN49J75'
+ }]
+ }
+ }
+ end
+
subject(:parse) { described_class.call(payload) }
context 'when payload is a correct PagerDuty payload' do
let(:payload) { Gitlab::Json.parse(fixture_file) }
it 'returns parsed payload' do
- is_expected.to eq(
- [
- {
- 'event' => 'incident.trigger',
- 'incident' => {
- 'url' => 'https://webdemo.pagerduty.com/incidents/PRORDTY',
- 'incident_number' => 33,
- 'title' => 'My new incident',
- 'status' => 'triggered',
- 'created_at' => '2017-09-26T15:14:36Z',
- 'urgency' => 'high',
- 'incident_key' => nil,
- 'assignees' => [{
- 'summary' => 'Laura Haley',
- 'url' => 'https://webdemo.pagerduty.com/users/P553OPV'
- }],
- 'impacted_services' => [{
- 'summary' => 'Production XDB Cluster',
- 'url' => 'https://webdemo.pagerduty.com/services/PN49J75'
- }]
- }
- }
- ]
- )
+ is_expected.to eq([triggered_event])
end
context 'when assignments summary and html_url are blank' do
@@ -69,11 +70,42 @@ RSpec.describe PagerDuty::WebhookPayloadParser do
end
end
- context 'when payload has no incidents' do
+ context 'when payload schema is invalid' do
let(:payload) { { 'messages' => [{ 'event' => 'incident.trigger' }] } }
it 'returns payload with blank incidents' do
- is_expected.to eq([{ 'event' => 'incident.trigger', 'incident' => {} }])
+ is_expected.to eq([])
+ end
+ end
+
+ context 'when payload consists of two messages' do
+ context 'when one of the messages has no incident data' do
+ let(:payload) do
+ valid_payload = Gitlab::Json.parse(fixture_file)
+ event = { 'event' => 'incident.trigger' }
+ valid_payload['messages'] = valid_payload['messages'].append(event)
+ valid_payload
+ end
+
+ it 'returns parsed payload with valid events only' do
+ is_expected.to eq([triggered_event])
+ end
+ end
+
+ context 'when one of the messages has unknown event' do
+ let(:payload) do
+ valid_payload = Gitlab::Json.parse(fixture_file)
+ event = { 'event' => 'incident.unknown', 'incident' => valid_payload['messages'].first['incident'] }
+ valid_payload['messages'] = valid_payload['messages'].append(event)
+ valid_payload
+ end
+
+ it 'returns parsed payload' do
+ unknown_event = triggered_event.dup
+ unknown_event['event'] = 'incident.unknown'
+
+ is_expected.to contain_exactly(triggered_event, unknown_event)
+ end
end
end
end
diff --git a/spec/lib/safe_zip/extract_spec.rb b/spec/lib/safe_zip/extract_spec.rb
index 30b7e1cdd2c..443430b267d 100644
--- a/spec/lib/safe_zip/extract_spec.rb
+++ b/spec/lib/safe_zip/extract_spec.rb
@@ -15,11 +15,7 @@ RSpec.describe SafeZip::Extract do
describe '#extract' do
subject { object.extract(directories: directories, to: target_path) }
- shared_examples 'extracts archive' do |param|
- before do
- stub_feature_flags(safezip_use_rubyzip: param)
- end
-
+ shared_examples 'extracts archive' do
it 'does extract archive' do
subject
@@ -28,11 +24,7 @@ RSpec.describe SafeZip::Extract do
end
end
- shared_examples 'fails to extract archive' do |param|
- before do
- stub_feature_flags(safezip_use_rubyzip: param)
- end
-
+ shared_examples 'fails to extract archive' do
it 'does not extract archive' do
expect { subject }.to raise_error(SafeZip::Extract::Error)
end
@@ -42,13 +34,7 @@ RSpec.describe SafeZip::Extract do
context "when using #{name} archive" do
let(:archive_name) { name }
- context 'for RubyZip' do
- it_behaves_like 'extracts archive', true
- end
-
- context 'for UnZip' do
- it_behaves_like 'extracts archive', false
- end
+ it_behaves_like 'extracts archive'
end
end
@@ -56,13 +42,7 @@ RSpec.describe SafeZip::Extract do
context "when using #{name} archive" do
let(:archive_name) { name }
- context 'for RubyZip' do
- it_behaves_like 'fails to extract archive', true
- end
-
- context 'for UnZip (UNSAFE)' do
- it_behaves_like 'extracts archive', false
- end
+ it_behaves_like 'fails to extract archive'
end
end
@@ -70,13 +50,7 @@ RSpec.describe SafeZip::Extract do
let(:archive_name) { 'valid-simple.zip' }
let(:directories) { %w(non/existing) }
- context 'for RubyZip' do
- it_behaves_like 'fails to extract archive', true
- end
-
- context 'for UnZip' do
- it_behaves_like 'fails to extract archive', false
- end
+ it_behaves_like 'fails to extract archive'
end
end
end
diff --git a/spec/mailers/abuse_report_mailer_spec.rb b/spec/mailers/abuse_report_mailer_spec.rb
index 4eb616722ac..061f972fd35 100644
--- a/spec/mailers/abuse_report_mailer_spec.rb
+++ b/spec/mailers/abuse_report_mailer_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe AbuseReportMailer do
describe '.notify' do
before do
- stub_application_setting(admin_notification_email: 'admin@example.com')
+ stub_application_setting(abuse_notification_email: 'admin@example.com')
end
let(:report) { create(:abuse_report) }
@@ -17,8 +17,8 @@ RSpec.describe AbuseReportMailer do
it_behaves_like 'appearance header and footer enabled'
it_behaves_like 'appearance header and footer not enabled'
- context 'with admin_notification_email set' do
- it 'sends to the admin_notification_email' do
+ context 'with abuse_notification_email set' do
+ it 'sends to the abuse_notification_email' do
is_expected.to deliver_to 'admin@example.com'
end
@@ -27,9 +27,9 @@ RSpec.describe AbuseReportMailer do
end
end
- context 'with no admin_notification_email set' do
+ context 'with no abuse_notification_email set' do
it 'returns early' do
- stub_application_setting(admin_notification_email: nil)
+ stub_application_setting(abuse_notification_email: nil)
expect { described_class.notify(spy).deliver_now }
.not_to change { ActionMailer::Base.deliveries.count }
diff --git a/spec/mailers/emails/projects_spec.rb b/spec/mailers/emails/projects_spec.rb
index 599f62a8113..aa5947bf68e 100644
--- a/spec/mailers/emails/projects_spec.rb
+++ b/spec/mailers/emails/projects_spec.rb
@@ -30,107 +30,118 @@ RSpec.describe Emails::Projects do
let_it_be(:user) { create(:user) }
describe '#prometheus_alert_fired_email' do
+ let(:default_title) { Gitlab::AlertManagement::Payload::Generic::DEFAULT_TITLE }
+ let(:payload) { { 'startsAt' => Time.now.rfc3339 } }
+ let(:alert_attributes) { build(:alert_management_alert, :from_payload, payload: payload, project: project).attributes }
+
subject do
- Notify.prometheus_alert_fired_email(project.id, user.id, alert_params)
+ Notify.prometheus_alert_fired_email(project.id, user.id, alert_attributes)
end
- let(:alert_params) do
- { 'startsAt' => Time.now.rfc3339 }
+ context 'missing required attributes' do
+ let(:alert_attributes) { build(:alert_management_alert, :prometheus, :from_payload, payload: payload, project: project).attributes }
+
+ it_behaves_like 'no email'
end
- context 'with a gitlab alert' do
- before do
- alert_params['labels'] = { 'gitlab_alert_id' => alert.prometheus_metric_id.to_s }
- end
+ context 'with minimum required attributes' do
+ let(:payload) { {} }
- let(:title) do
- "#{alert.title} #{alert.computed_operator} #{alert.threshold}"
- end
+ it_behaves_like 'an email sent from GitLab'
+ it_behaves_like 'it should not have Gmail Actions links'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
- let(:metrics_url) do
- metrics_project_environment_url(project, environment)
+ it 'has expected subject' do
+ is_expected.to have_subject("#{project.name} | Alert: #{default_title}")
end
- let(:environment) { alert.environment }
+ it 'has expected content' do
+ is_expected.to have_body_text('An alert has been triggered')
+ is_expected.to have_body_text(project.full_path)
+ is_expected.not_to have_body_text('Description:')
+ is_expected.not_to have_body_text('Environment:')
+ is_expected.not_to have_body_text('Metric:')
+ end
+ end
- let!(:alert) { create(:prometheus_alert, project: project) }
+ context 'with description' do
+ let(:payload) { { 'description' => 'alert description' } }
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like 'a user cannot unsubscribe through footer link'
it 'has expected subject' do
- is_expected.to have_subject("#{project.name} | Alert: #{environment.name}: #{title} for 5 minutes")
+ is_expected.to have_subject("#{project.name} | Alert: #{default_title}")
end
it 'has expected content' do
is_expected.to have_body_text('An alert has been triggered')
is_expected.to have_body_text(project.full_path)
- is_expected.to have_body_text('Environment:')
- is_expected.to have_body_text(environment.name)
- is_expected.to have_body_text('Metric:')
- is_expected.to have_body_text(alert.full_query)
- is_expected.to have_body_text(metrics_url)
+ is_expected.to have_body_text('Description:')
+ is_expected.to have_body_text('alert description')
+ is_expected.not_to have_body_text('Environment:')
+ is_expected.not_to have_body_text('Metric:')
end
-
- it_behaves_like 'shows the incident issues url'
end
- context 'with no payload' do
- let(:alert_params) { {} }
+ context 'with environment' do
+ let_it_be(:environment) { create(:environment, project: project) }
+ let(:payload) { { 'gitlab_environment_name' => environment.name } }
+ let(:metrics_url) { metrics_project_environment_url(project, environment) }
- it_behaves_like 'no email'
- end
+ it_behaves_like 'an email sent from GitLab'
+ it_behaves_like 'it should not have Gmail Actions links'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
- context 'with an unknown alert' do
- before do
- alert_params['labels'] = { 'gitlab_alert_id' => 'unknown' }
+ it 'has expected subject' do
+ is_expected.to have_subject("#{project.name} | Alert: #{environment.name}: #{default_title}")
end
- it_behaves_like 'no email'
+ it 'has expected content' do
+ is_expected.to have_body_text('An alert has been triggered')
+ is_expected.to have_body_text(project.full_path)
+ is_expected.to have_body_text('Environment:')
+ is_expected.to have_body_text(environment.name)
+ is_expected.not_to have_body_text('Description:')
+ is_expected.not_to have_body_text('Metric:')
+ end
end
- context 'with an external alert' do
- let(:title) { 'alert title' }
+ context 'with gitlab alerting rule' do
+ let_it_be(:prometheus_alert) { create(:prometheus_alert, project: project) }
+ let_it_be(:environment) { prometheus_alert.environment }
- let(:metrics_url) do
- metrics_project_environments_url(project)
- end
+ let(:alert_attributes) { build(:alert_management_alert, :prometheus, :from_payload, payload: payload, project: project).attributes }
+ let(:title) { "#{prometheus_alert.title} #{prometheus_alert.computed_operator} #{prometheus_alert.threshold}" }
+ let(:metrics_url) { metrics_project_environment_url(project, environment) }
before do
- alert_params['annotations'] = { 'title' => title }
- alert_params['generatorURL'] = 'http://localhost:9090/graph?g0.expr=vector%281%29&g0.tab=1'
+ payload['labels'] = {
+ 'gitlab_alert_id' => prometheus_alert.prometheus_metric_id,
+ 'alertname' => prometheus_alert.title
+ }
end
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
it_behaves_like 'a user cannot unsubscribe through footer link'
+ it_behaves_like 'shows the incident issues url'
it 'has expected subject' do
- is_expected.to have_subject("#{project.name} | Alert: #{title}")
+ is_expected.to have_subject("#{project.name} | Alert: #{environment.name}: #{title} for 5 minutes")
end
it 'has expected content' do
is_expected.to have_body_text('An alert has been triggered')
is_expected.to have_body_text(project.full_path)
+ is_expected.to have_body_text('Environment:')
+ is_expected.to have_body_text(environment.name)
+ is_expected.to have_body_text('Metric:')
+ is_expected.to have_body_text(prometheus_alert.full_query)
+ is_expected.to have_body_text(metrics_url)
is_expected.not_to have_body_text('Description:')
- is_expected.not_to have_body_text('Environment:')
end
-
- context 'with annotated description' do
- let(:description) { 'description' }
-
- before do
- alert_params['annotations']['description'] = description
- end
-
- it 'shows the description' do
- is_expected.to have_body_text('Description:')
- is_expected.to have_body_text(description)
- end
- end
-
- it_behaves_like 'shows the incident issues url'
end
end
end
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index b9f95a9eb00..fa03c374633 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -1508,12 +1508,44 @@ RSpec.describe Notify do
)
end
- describe 'group invitation' do
+ describe 'invitations' do
let(:owner) { create(:user).tap { |u| group.add_user(u, Gitlab::Access::OWNER) } }
let(:group_member) { invite_to_group(group, inviter: inviter) }
let(:inviter) { owner }
- subject { described_class.member_invited_email('group', group_member.id, group_member.invite_token) }
+ subject { described_class.member_invited_email('Group', group_member.id, group_member.invite_token) }
+
+ shared_examples "tracks the 'sent' event for the invitation reminders experiment" do
+ before do
+ stub_experiment(invitation_reminders: true)
+ allow(Gitlab::Experimentation).to receive(:enabled_for_attribute?).with(:invitation_reminders, group_member.invite_email).and_return(experimental_group)
+ end
+
+ it "tracks the 'sent' event", :snowplow do
+ subject.deliver_now
+
+ expect_snowplow_event(
+ category: 'Growth::Acquisition::Experiment::InvitationReminders',
+ label: Digest::MD5.hexdigest(group_member.to_global_id.to_s),
+ property: experimental_group ? 'experimental_group' : 'control_group',
+ action: 'sent'
+ )
+ end
+ end
+
+ describe 'tracking for the invitation reminders experiment' do
+ context 'when invite email is in the experimental group' do
+ let(:experimental_group) { true }
+
+ it_behaves_like "tracks the 'sent' event for the invitation reminders experiment"
+ end
+
+ context 'when invite email is in the control group' do
+ let(:experimental_group) { false }
+
+ it_behaves_like "tracks the 'sent' event for the invitation reminders experiment"
+ end
+ end
context 'when invite_email_experiment is disabled' do
before do
diff --git a/spec/migrations/add_partial_index_to_ci_builds_table_on_user_id_name_spec.rb b/spec/migrations/add_partial_index_to_ci_builds_table_on_user_id_name_spec.rb
new file mode 100644
index 00000000000..018d48bea66
--- /dev/null
+++ b/spec/migrations/add_partial_index_to_ci_builds_table_on_user_id_name_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200908064229_add_partial_index_to_ci_builds_table_on_user_id_name.rb')
+
+RSpec.describe AddPartialIndexToCiBuildsTableOnUserIdName do
+ let(:migration) { described_class.new }
+
+ describe '#up' do
+ it 'creates temporary partial index on type' do
+ expect { migration.up }.to change { migration.index_exists?(:ci_builds, [:user_id, :name], name: described_class::INDEX_NAME) }.from(false).to(true)
+ end
+ end
+
+ describe '#down' do
+ it 'removes temporary partial index on type' do
+ migration.up
+
+ expect { migration.down }.to change { migration.index_exists?(:ci_builds, [:user_id, :name], name: described_class::INDEX_NAME) }.from(true).to(false)
+ end
+ end
+end
diff --git a/spec/migrations/backfill_status_page_published_incidents_spec.rb b/spec/migrations/backfill_status_page_published_incidents_spec.rb
index 2b1ab891038..674484cdf0a 100644
--- a/spec/migrations/backfill_status_page_published_incidents_spec.rb
+++ b/spec/migrations/backfill_status_page_published_incidents_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe BackfillStatusPagePublishedIncidents, :migration do
end
it 'creates a StatusPage::PublishedIncident record for each published issue' do
- Timecop.freeze(current_time) do
+ travel_to(current_time) do
expect(incidents.all).to be_empty
migrate!
diff --git a/spec/migrations/cleanup_group_import_states_with_null_user_id_spec.rb b/spec/migrations/cleanup_group_import_states_with_null_user_id_spec.rb
new file mode 100644
index 00000000000..f9285c857de
--- /dev/null
+++ b/spec/migrations/cleanup_group_import_states_with_null_user_id_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+# In order to test the CleanupGroupImportStatesWithNullUserId migration, we need
+# to first create GroupImportState with NULL user_id
+# and then run the migration to check that user_id was populated or record removed
+#
+# The problem is that the CleanupGroupImportStatesWithNullUserId migration comes
+# after the NOT NULL constraint has been added with a previous migration (AddNotNullConstraintToUserOnGroupImportStates)
+# That means that while testing the current class we can not insert GroupImportState records with an
+# invalid user_id as constraint is blocking it from doing so
+#
+# To solve this problem, use SchemaVersionFinder to set schema one version prior to AddNotNullConstraintToUserOnGroupImportStates
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200907092715_add_not_null_constraint_to_user_on_group_import_states.rb')
+require Rails.root.join('db', 'post_migrate', '20200909161624_cleanup_group_import_states_with_null_user_id.rb')
+
+RSpec.describe CleanupGroupImportStatesWithNullUserId, :migration,
+ schema: MigrationHelpers::SchemaVersionFinder.migration_prior(AddNotNullConstraintToUserOnGroupImportStates) do
+ let(:namespaces_table) { table(:namespaces) }
+ let(:users_table) { table(:users) }
+ let(:group_import_states_table) { table(:group_import_states) }
+ let(:members_table) { table(:members) }
+
+ describe 'Group import states clean up' do
+ context 'when user_id is present' do
+ it 'does not update group_import_state record' do
+ user_1 = users_table.create!(name: 'user1', email: 'user1@example.com', projects_limit: 1)
+ group_1 = namespaces_table.create!(name: 'group_1', path: 'group_1', type: 'Group')
+ create_member(user_id: user_1.id, type: 'GroupMember', source_type: 'Namespace', source_id: group_1.id, access_level: described_class::Group::OWNER)
+ group_import_state_1 = group_import_states_table.create!(group_id: group_1.id, user_id: user_1.id, status: 0)
+
+ expect(group_import_state_1.user_id).to eq(user_1.id)
+
+ disable_migrations_output { migrate! }
+
+ expect(group_import_state_1.reload.user_id).to eq(user_1.id)
+ end
+ end
+
+ context 'when user_id is missing' do
+ it 'updates user_id with group default owner id' do
+ user_2 = users_table.create!(name: 'user2', email: 'user2@example.com', projects_limit: 1)
+ group_2 = namespaces_table.create!(name: 'group_2', path: 'group_2', type: 'Group')
+ create_member(user_id: user_2.id, type: 'GroupMember', source_type: 'Namespace', source_id: group_2.id, access_level: described_class::Group::OWNER)
+ group_import_state_2 = group_import_states_table.create!(group_id: group_2.id, user_id: nil, status: 0)
+
+ disable_migrations_output { migrate! }
+
+ expect(group_import_state_2.reload.user_id).to eq(user_2.id)
+ end
+ end
+
+ context 'when group does not contain any owners' do
+ it 'removes group_import_state record' do
+ group_3 = namespaces_table.create!(name: 'group_3', path: 'group_3', type: 'Group')
+ group_import_state_3 = group_import_states_table.create!(group_id: group_3.id, user_id: nil, status: 0)
+
+ disable_migrations_output { migrate! }
+
+ expect { group_import_state_3.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'when group has parent' do
+ it 'updates user_id with parent group default owner id' do
+ user = users_table.create!(name: 'user4', email: 'user4@example.com', projects_limit: 1)
+ group_1 = namespaces_table.create!(name: 'group_1', path: 'group_1', type: 'Group')
+ create_member(user_id: user.id, type: 'GroupMember', source_type: 'Namespace', source_id: group_1.id, access_level: described_class::Group::OWNER)
+ group_2 = namespaces_table.create!(name: 'group_2', path: 'group_2', type: 'Group', parent_id: group_1.id)
+ group_import_state = group_import_states_table.create!(group_id: group_2.id, user_id: nil, status: 0)
+
+ disable_migrations_output { migrate! }
+
+ expect(group_import_state.reload.user_id).to eq(user.id)
+ end
+ end
+
+ context 'when group has owner_id' do
+ it 'updates user_id with owner_id' do
+ user = users_table.create!(name: 'user', email: 'user@example.com', projects_limit: 1)
+ group = namespaces_table.create!(name: 'group', path: 'group', type: 'Group', owner_id: user.id)
+ group_import_state = group_import_states_table.create!(group_id: group.id, user_id: nil, status: 0)
+
+ disable_migrations_output { migrate! }
+
+ expect(group_import_state.reload.user_id).to eq(user.id)
+ end
+ end
+ end
+
+ def create_member(options)
+ members_table.create!(
+ {
+ notification_level: 0,
+ ldap: false,
+ override: false
+ }.merge(options)
+ )
+ end
+end
diff --git a/spec/migrations/ensure_filled_file_store_on_package_files_spec.rb b/spec/migrations/ensure_filled_file_store_on_package_files_spec.rb
new file mode 100644
index 00000000000..8a0f51ab27e
--- /dev/null
+++ b/spec/migrations/ensure_filled_file_store_on_package_files_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200915185707_ensure_filled_file_store_on_package_files.rb')
+
+RSpec.describe EnsureFilledFileStoreOnPackageFiles, schema: 20200910175553 do
+ let!(:packages_package_files) { table(:packages_package_files) }
+ let!(:packages_packages) { table(:packages_packages) }
+ let!(:namespaces) { table(:namespaces) }
+ let!(:projects) { table(:projects) }
+ let!(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
+ let!(:project) { projects.create!(namespace_id: namespace.id) }
+ let!(:package) { packages_packages.create!(project_id: project.id, name: 'bar', package_type: 1) }
+
+ before do
+ constraint_name = 'check_4c5e6bb0b3'
+
+ # In order to insert a row with a NULL to fill.
+ ActiveRecord::Base.connection.execute "ALTER TABLE packages_package_files DROP CONSTRAINT #{constraint_name}"
+
+ @file_store_1 = packages_package_files.create!(file_store: 1, file_name: 'foo_1', file: 'foo_1', package_id: package.id)
+ @file_store_2 = packages_package_files.create!(file_store: 2, file_name: 'foo_2', file: 'foo_2', package_id: package.id)
+ @file_store_nil = packages_package_files.create!(file_store: nil, file_name: 'foo_nil', file: 'foo_nil', package_id: package.id)
+
+ # revert DB structure
+ ActiveRecord::Base.connection.execute "ALTER TABLE packages_package_files ADD CONSTRAINT #{constraint_name} CHECK ((file_store IS NOT NULL)) NOT VALID"
+ end
+
+ it 'correctly migrates nil file_store to 1' do
+ migrate!
+
+ @file_store_1.reload
+ @file_store_2.reload
+ @file_store_nil.reload
+
+ expect(@file_store_1.file_store).to eq(1) # unchanged
+ expect(@file_store_2.file_store).to eq(2) # unchanged
+ expect(@file_store_nil.file_store).to eq(1) # nil => 1
+ end
+end
diff --git a/spec/models/alert_management/http_integration_spec.rb b/spec/models/alert_management/http_integration_spec.rb
new file mode 100644
index 00000000000..37d67dfe09a
--- /dev/null
+++ b/spec/models/alert_management/http_integration_spec.rb
@@ -0,0 +1,92 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AlertManagement::HttpIntegration do
+ let_it_be(:project) { create(:project) }
+
+ subject(:integration) { build(:alert_management_http_integration) }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:project) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_length_of(:name).is_at_most(255) }
+ it { is_expected.to validate_presence_of(:endpoint_identifier) }
+ it { is_expected.to validate_length_of(:endpoint_identifier).is_at_most(255) }
+
+ context 'when active' do
+ # Using `create` instead of `build` the integration so `token` is set.
+ # Uniqueness spec saves integration with `validate: false` otherwise.
+ subject { create(:alert_management_http_integration) }
+
+ it { is_expected.to validate_uniqueness_of(:endpoint_identifier).scoped_to(:project_id, :active) }
+ end
+
+ context 'when inactive' do
+ subject { create(:alert_management_http_integration, :inactive) }
+
+ it { is_expected.not_to validate_uniqueness_of(:endpoint_identifier).scoped_to(:project_id, :active) }
+ end
+ end
+
+ describe '#token' do
+ subject { integration.token }
+
+ shared_context 'assign token' do |token|
+ let!(:previous_token) { integration.token }
+
+ before do
+ integration.token = token
+ integration.valid?
+ end
+ end
+
+ shared_examples 'valid token' do
+ it { is_expected.to match(/\A\h{32}\z/) }
+ end
+
+ context 'when unsaved' do
+ context 'when unassigned' do
+ before do
+ integration.valid?
+ end
+
+ it_behaves_like 'valid token'
+ end
+
+ context 'when assigned' do
+ include_context 'assign token', 'random_token'
+
+ it_behaves_like 'valid token'
+ it { is_expected.not_to eq('random_token') }
+ end
+ end
+
+ context 'when persisted' do
+ before do
+ integration.save!
+ integration.reload
+ end
+
+ it_behaves_like 'valid token'
+
+ context 'when resetting' do
+ include_context 'assign token', ''
+
+ it_behaves_like 'valid token'
+ it { is_expected.not_to eq(previous_token) }
+ end
+
+ context 'when reassigning' do
+ include_context 'assign token', 'random_token'
+
+ it_behaves_like 'valid token'
+ it { is_expected.to eq(previous_token) }
+ end
+ end
+ end
+end
diff --git a/spec/models/analytics/instance_statistics/measurement_spec.rb b/spec/models/analytics/instance_statistics/measurement_spec.rb
index 4df847ea524..379272cfcb9 100644
--- a/spec/models/analytics/instance_statistics/measurement_spec.rb
+++ b/spec/models/analytics/instance_statistics/measurement_spec.rb
@@ -20,7 +20,11 @@ RSpec.describe Analytics::InstanceStatistics::Measurement, type: :model do
issues: 3,
merge_requests: 4,
groups: 5,
- pipelines: 6
+ pipelines: 6,
+ pipelines_succeeded: 7,
+ pipelines_failed: 8,
+ pipelines_canceled: 9,
+ pipelines_skipped: 10
}.with_indifferent_access)
end
end
@@ -42,4 +46,28 @@ RSpec.describe Analytics::InstanceStatistics::Measurement, type: :model do
it { is_expected.to match_array([measurement_1, measurement_2]) }
end
end
+
+ describe '#measurement_identifier_values' do
+ subject { described_class.measurement_identifier_values.count }
+
+ context 'when the `store_ci_pipeline_counts_by_status` feature flag is off' do
+ let(:expected_count) { Analytics::InstanceStatistics::Measurement.identifiers.size - Analytics::InstanceStatistics::Measurement::EXPERIMENTAL_IDENTIFIERS.size }
+
+ before do
+ stub_feature_flags(store_ci_pipeline_counts_by_status: false)
+ end
+
+ it { is_expected.to eq(expected_count) }
+ end
+
+ context 'when the `store_ci_pipeline_counts_by_status` feature flag is on' do
+ let(:expected_count) { Analytics::InstanceStatistics::Measurement.identifiers.size }
+
+ before do
+ stub_feature_flags(store_ci_pipeline_counts_by_status: true)
+ end
+
+ it { is_expected.to eq(expected_count) }
+ end
+ end
end
diff --git a/spec/models/application_setting/term_spec.rb b/spec/models/application_setting/term_spec.rb
index 82347453437..51a6027698f 100644
--- a/spec/models/application_setting/term_spec.rb
+++ b/spec/models/application_setting/term_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe ApplicationSetting::Term do
describe '#accepted_by_user?' do
let(:user) { create(:user) }
+ let(:project_bot) { create(:user, :project_bot) }
let(:term) { create(:term) }
it 'is true when the user accepted the terms' do
@@ -25,6 +26,10 @@ RSpec.describe ApplicationSetting::Term do
expect(term.accepted_by_user?(user)).to be(true)
end
+ it 'is true when user is a bot' do
+ expect(term.accepted_by_user?(project_bot)).to be(true)
+ end
+
it 'is false when the user declined the terms' do
decline_terms(term, user)
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 9f76fb3330d..f572e6ffade 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -320,7 +320,7 @@ RSpec.describe ApplicationSetting do
end
end
- it_behaves_like 'an object with email-formated attributes', :admin_notification_email do
+ it_behaves_like 'an object with email-formated attributes', :abuse_notification_email do
subject { setting }
end
diff --git a/spec/models/audit_event_spec.rb b/spec/models/audit_event_spec.rb
index a1ed48c57f4..5c87c2e68db 100644
--- a/spec/models/audit_event_spec.rb
+++ b/spec/models/audit_event_spec.rb
@@ -6,6 +6,13 @@ RSpec.describe AuditEvent do
let_it_be(:audit_event) { create(:project_audit_event) }
subject { audit_event }
+ describe 'validations' do
+ include_examples 'validates IP address' do
+ let(:attribute) { :ip_address }
+ let(:object) { create(:audit_event) }
+ end
+ end
+
describe '#as_json' do
context 'ip_address' do
subject { build(:group_audit_event, ip_address: '192.168.1.1').as_json }
diff --git a/spec/models/authentication_event_spec.rb b/spec/models/authentication_event_spec.rb
index 56b0111f2c7..483d45c08be 100644
--- a/spec/models/authentication_event_spec.rb
+++ b/spec/models/authentication_event_spec.rb
@@ -11,5 +11,41 @@ RSpec.describe AuthenticationEvent do
it { is_expected.to validate_presence_of(:provider) }
it { is_expected.to validate_presence_of(:user_name) }
it { is_expected.to validate_presence_of(:result) }
+
+ include_examples 'validates IP address' do
+ let(:attribute) { :ip_address }
+ let(:object) { create(:authentication_event) }
+ end
+ end
+
+ describe 'scopes' do
+ let_it_be(:ldap_event) { create(:authentication_event, provider: :ldapmain, result: :failed) }
+ let_it_be(:google_oauth2) { create(:authentication_event, provider: :google_oauth2, result: :success) }
+
+ describe '.for_provider' do
+ it 'returns events only for the specified provider' do
+ expect(described_class.for_provider(:ldapmain)).to match_array ldap_event
+ end
+ end
+
+ describe '.ldap' do
+ it 'returns all events for an LDAP provider' do
+ expect(described_class.ldap).to match_array ldap_event
+ end
+ end
+ end
+
+ describe '.providers' do
+ before do
+ create(:authentication_event, provider: :ldapmain)
+ create(:authentication_event, provider: :google_oauth2)
+ create(:authentication_event, provider: :standard)
+ create(:authentication_event, provider: :standard)
+ create(:authentication_event, provider: :standard)
+ end
+
+ it 'returns an array of distinct providers' do
+ expect(described_class.providers).to match_array %w(ldapmain google_oauth2 standard)
+ end
end
end
diff --git a/spec/models/ci/build_pending_state_spec.rb b/spec/models/ci/build_pending_state_spec.rb
new file mode 100644
index 00000000000..a546d2aff65
--- /dev/null
+++ b/spec/models/ci/build_pending_state_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::BuildPendingState do
+ describe '#crc32' do
+ context 'when checksum does not exist' do
+ let(:pending_state) do
+ build(:ci_build_pending_state, trace_checksum: nil)
+ end
+
+ it 'returns nil' do
+ expect(pending_state.crc32).to be_nil
+ end
+ end
+
+ context 'when checksum is in hexadecimal' do
+ let(:pending_state) do
+ build(:ci_build_pending_state, trace_checksum: 'crc32:75bcd15')
+ end
+
+ it 'returns decimal representation of the checksum' do
+ expect(pending_state.crc32).to eq 123456789
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 1e551d9ee33..cb29cbcbb72 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -4652,4 +4652,24 @@ RSpec.describe Ci::Build do
it { is_expected.to be_nil }
end
end
+
+ describe '#run_on_status_commit' do
+ it 'runs provided hook after status commit' do
+ action = spy('action')
+
+ build.run_on_status_commit { action.perform! }
+ build.success!
+
+ expect(action).to have_received(:perform!).once
+ end
+
+ it 'does not run hooks when status has not changed' do
+ action = spy('action')
+
+ build.run_on_status_commit { action.perform! }
+ build.save!
+
+ expect(action).not_to have_received(:perform!)
+ end
+ end
end
diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb
index fefe5e3bfca..57e58fe494f 100644
--- a/spec/models/ci/build_trace_chunk_spec.rb
+++ b/spec/models/ci/build_trace_chunk_spec.rb
@@ -779,4 +779,62 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
it_behaves_like 'deletes all build_trace_chunk and data in redis'
end
end
+
+ describe 'comparable build trace chunks' do
+ describe '#<=>' do
+ context 'when chunks are associated with different builds' do
+ let(:first) { create(:ci_build_trace_chunk, build: build, chunk_index: 1) }
+ let(:second) { create(:ci_build_trace_chunk, chunk_index: 1) }
+
+ it 'returns nil' do
+ expect(first <=> second).to be_nil
+ end
+ end
+
+ context 'when there are two chunks with different indexes' do
+ let(:first) { create(:ci_build_trace_chunk, build: build, chunk_index: 1) }
+ let(:second) { create(:ci_build_trace_chunk, build: build, chunk_index: 0) }
+
+ it 'indicates the the first one is greater than then second' do
+ expect(first <=> second).to eq 1
+ end
+ end
+
+ context 'when there are two chunks with the same index within the same build' do
+ let(:chunk) { create(:ci_build_trace_chunk) }
+
+ it 'indicates the these are equal' do
+ expect(chunk <=> chunk).to be_zero # rubocop:disable Lint/UselessComparison
+ end
+ end
+ end
+
+ describe '#==' do
+ context 'when chunks have the same index' do
+ let(:chunk) { create(:ci_build_trace_chunk) }
+
+ it 'indicates that the chunks are equal' do
+ expect(chunk).to eq chunk
+ end
+ end
+
+ context 'when chunks have different indexes' do
+ let(:first) { create(:ci_build_trace_chunk, build: build, chunk_index: 1) }
+ let(:second) { create(:ci_build_trace_chunk, build: build, chunk_index: 0) }
+
+ it 'indicates that the chunks are not equal' do
+ expect(first).not_to eq second
+ end
+ end
+
+ context 'when chunks are associated with different builds' do
+ let(:first) { create(:ci_build_trace_chunk, build: build, chunk_index: 1) }
+ let(:second) { create(:ci_build_trace_chunk, chunk_index: 1) }
+
+ it 'indicates that the chunks are not equal' do
+ expect(first).not_to eq second
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/ci/freeze_period_status_spec.rb b/spec/models/ci/freeze_period_status_spec.rb
index 831895cb528..f51381f7a5f 100644
--- a/spec/models/ci/freeze_period_status_spec.rb
+++ b/spec/models/ci/freeze_period_status_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Ci::FreezePeriodStatus do
shared_examples 'within freeze period' do |time|
it 'is frozen' do
- Timecop.freeze(time) do
+ travel_to(time) do
expect(subject).to be_truthy
end
end
@@ -19,7 +19,7 @@ RSpec.describe Ci::FreezePeriodStatus do
shared_examples 'outside freeze period' do |time|
it 'is not frozen' do
- Timecop.freeze(time) do
+ travel_to(time) do
expect(subject).to be_falsy
end
end
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 779839df670..26851c93ac3 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Ci::JobArtifact do
it_behaves_like 'having unique enum values'
- it_behaves_like 'UpdateProjectStatistics' do
+ it_behaves_like 'UpdateProjectStatistics', :with_counter_attribute do
let_it_be(:job, reload: true) { create(:ci_build) }
subject { build(:ci_job_artifact, :archive, job: job, size: 107464) }
@@ -44,7 +44,7 @@ RSpec.describe Ci::JobArtifact do
let!(:metrics_report) { create(:ci_job_artifact, :junit) }
let!(:codequality_report) { create(:ci_job_artifact, :codequality) }
- it { is_expected.to eq([metrics_report, codequality_report]) }
+ it { is_expected.to match_array([metrics_report, codequality_report]) }
end
end
diff --git a/spec/models/ci/pipeline_schedule_spec.rb b/spec/models/ci/pipeline_schedule_spec.rb
index 949d5f7bd04..cec3b544e50 100644
--- a/spec/models/ci/pipeline_schedule_spec.rb
+++ b/spec/models/ci/pipeline_schedule_spec.rb
@@ -56,7 +56,7 @@ RSpec.describe Ci::PipelineSchedule do
subject { described_class.runnable_schedules }
let!(:pipeline_schedule) do
- Timecop.freeze(1.day.ago) do
+ travel_to(1.day.ago) do
create(:ci_pipeline_schedule, :hourly)
end
end
@@ -118,7 +118,7 @@ RSpec.describe Ci::PipelineSchedule do
let(:pipeline_schedule) { create(:ci_pipeline_schedule, :every_minute) }
it "updates next_run_at to the sidekiq worker's execution time" do
- Timecop.freeze(Time.zone.parse("2019-06-01 12:18:00+0000")) do
+ travel_to(Time.zone.parse("2019-06-01 12:18:00+0000")) do
expect(pipeline_schedule.next_run_at).to eq(cron_worker_next_run_at)
end
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 228a1e8f7a2..d33ccf0e6f2 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -2436,7 +2436,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
describe '#retry_failed' do
- let(:latest_status) { pipeline.statuses.latest.pluck(:status) }
+ let(:latest_status) { pipeline.latest_statuses.pluck(:status) }
before do
stub_not_protect_default_branch
@@ -3628,6 +3628,16 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
expect(builds).to include(rspec, jest)
expect(builds).not_to include(karma)
end
+
+ it 'returns only latest builds' do
+ obsolete = create(:ci_build, name: "jest", coverage: 10.12, pipeline: pipeline, retried: true)
+ retried = create(:ci_build, name: "jest", coverage: 20.11, pipeline: pipeline)
+
+ builds = pipeline.builds_with_coverage
+
+ expect(builds).to include(retried)
+ expect(builds).not_to include(obsolete)
+ end
end
describe '#base_and_ancestors' do
diff --git a/spec/models/ci_platform_metric_spec.rb b/spec/models/ci_platform_metric_spec.rb
index 0b00875df43..f73db713791 100644
--- a/spec/models/ci_platform_metric_spec.rb
+++ b/spec/models/ci_platform_metric_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe CiPlatformMetric do
let(:tomorrow) { today + 1.day }
it 'inserts platform target counts for that day' do
- Timecop.freeze(today) do
+ travel_to(today) do
create(:ci_variable, key: described_class::CI_VARIABLE_KEY, value: 'ECS')
create(:ci_variable, key: described_class::CI_VARIABLE_KEY, value: 'ECS')
create(:ci_variable, key: described_class::CI_VARIABLE_KEY, value: 'FARGATE')
@@ -53,7 +53,7 @@ RSpec.describe CiPlatformMetric do
create(:ci_variable, key: described_class::CI_VARIABLE_KEY, value: 'FARGATE')
described_class.insert_auto_devops_platform_targets!
end
- Timecop.freeze(tomorrow) do
+ travel_to(tomorrow) do
create(:ci_variable, key: described_class::CI_VARIABLE_KEY, value: 'FARGATE')
described_class.insert_auto_devops_platform_targets!
end
@@ -69,7 +69,7 @@ RSpec.describe CiPlatformMetric do
let(:today) { Time.zone.local(1982, 4, 24) }
it 'ignores those values' do
- Timecop.freeze(today) do
+ travel_to(today) do
create(:ci_variable, key: described_class::CI_VARIABLE_KEY, value: 'ECS')
create(:ci_variable, key: described_class::CI_VARIABLE_KEY, value: 'FOO')
create(:ci_variable, key: described_class::CI_VARIABLE_KEY, value: 'BAR')
diff --git a/spec/models/clusters/applications/runner_spec.rb b/spec/models/clusters/applications/runner_spec.rb
index fbabfd25b2f..ef916c73e0b 100644
--- a/spec/models/clusters/applications/runner_spec.rb
+++ b/spec/models/clusters/applications/runner_spec.rb
@@ -69,8 +69,8 @@ RSpec.describe Clusters::Applications::Runner do
expect(values).to include('privileged: true')
expect(values).to include('image: ubuntu:16.04')
expect(values).to include('resources')
- expect(values).to match(/runnerToken: '?#{Regexp.escape(ci_runner.token)}/)
- expect(values).to match(/gitlabUrl: '?#{Regexp.escape(Gitlab::Routing.url_helpers.root_url)}/)
+ expect(values).to match(/runnerToken: ['"]?#{Regexp.escape(ci_runner.token)}/)
+ expect(values).to match(/gitlabUrl: ['"]?#{Regexp.escape(Gitlab::Routing.url_helpers.root_url)}/)
end
context 'without a runner' do
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 024539e34ec..dd9b96f39ad 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -47,6 +47,7 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
it { is_expected.to delegate_method(:external_hostname).to(:application_ingress).with_prefix }
it { is_expected.to respond_to :project }
+ it { is_expected.to be_namespace_per_environment }
describe 'applications have inverse_of: :cluster option' do
let(:cluster) { create(:cluster) }
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 6e23f95af03..877188097fd 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -493,47 +493,104 @@ RSpec.describe CommitStatus do
end
end
- describe '#group_name' do
- let(:commit_status) do
- build(:commit_status, pipeline: pipeline, stage: 'test')
- end
-
- subject { commit_status.group_name }
+ context 'with the one_dimensional_matrix feature flag disabled' do
+ describe '#group_name' do
+ before do
+ stub_feature_flags(one_dimensional_matrix: false)
+ end
- tests = {
- 'rspec:windows' => 'rspec:windows',
- 'rspec:windows 0' => 'rspec:windows 0',
- 'rspec:windows 0 test' => 'rspec:windows 0 test',
- 'rspec:windows 0 1' => 'rspec:windows',
- 'rspec:windows 0 1 name' => 'rspec:windows name',
- 'rspec:windows 0/1' => 'rspec:windows',
- 'rspec:windows 0/1 name' => 'rspec:windows name',
- 'rspec:windows 0:1' => 'rspec:windows',
- 'rspec:windows 0:1 name' => 'rspec:windows name',
- 'rspec:windows 10000 20000' => 'rspec:windows',
- 'rspec:windows 0 : / 1' => 'rspec:windows',
- 'rspec:windows 0 : / 1 name' => 'rspec:windows name',
- '0 1 name ruby' => 'name ruby',
- '0 :/ 1 name ruby' => 'name ruby',
- 'rspec: [aws]' => 'rspec: [aws]',
- 'rspec: [aws] 0/1' => 'rspec: [aws]',
- 'rspec: [aws, max memory]' => 'rspec',
- 'rspec:linux: [aws, max memory, data]' => 'rspec:linux',
- 'rspec: [inception: [something, other thing], value]' => 'rspec',
- 'rspec:windows 0/1: [name, other]' => 'rspec:windows',
- 'rspec:windows: [name, other] 0/1' => 'rspec:windows',
- 'rspec:windows: [name, 0/1] 0/1' => 'rspec:windows',
- 'rspec:windows: [0/1, name]' => 'rspec:windows',
- 'rspec:windows: [, ]' => 'rspec:windows',
- 'rspec:windows: [name]' => 'rspec:windows: [name]',
- 'rspec:windows: [name,other]' => 'rspec:windows: [name,other]'
- }
+ let(:commit_status) do
+ build(:commit_status, pipeline: pipeline, stage: 'test')
+ end
+
+ subject { commit_status.group_name }
+
+ tests = {
+ 'rspec:windows' => 'rspec:windows',
+ 'rspec:windows 0' => 'rspec:windows 0',
+ 'rspec:windows 0 test' => 'rspec:windows 0 test',
+ 'rspec:windows 0 1' => 'rspec:windows',
+ 'rspec:windows 0 1 name' => 'rspec:windows name',
+ 'rspec:windows 0/1' => 'rspec:windows',
+ 'rspec:windows 0/1 name' => 'rspec:windows name',
+ 'rspec:windows 0:1' => 'rspec:windows',
+ 'rspec:windows 0:1 name' => 'rspec:windows name',
+ 'rspec:windows 10000 20000' => 'rspec:windows',
+ 'rspec:windows 0 : / 1' => 'rspec:windows',
+ 'rspec:windows 0 : / 1 name' => 'rspec:windows name',
+ '0 1 name ruby' => 'name ruby',
+ '0 :/ 1 name ruby' => 'name ruby',
+ 'rspec: [aws]' => 'rspec: [aws]',
+ 'rspec: [aws] 0/1' => 'rspec: [aws]',
+ 'rspec: [aws, max memory]' => 'rspec',
+ 'rspec:linux: [aws, max memory, data]' => 'rspec:linux',
+ 'rspec: [inception: [something, other thing], value]' => 'rspec',
+ 'rspec:windows 0/1: [name, other]' => 'rspec:windows',
+ 'rspec:windows: [name, other] 0/1' => 'rspec:windows',
+ 'rspec:windows: [name, 0/1] 0/1' => 'rspec:windows',
+ 'rspec:windows: [0/1, name]' => 'rspec:windows',
+ 'rspec:windows: [, ]' => 'rspec:windows',
+ 'rspec:windows: [name]' => 'rspec:windows: [name]',
+ 'rspec:windows: [name,other]' => 'rspec:windows: [name,other]'
+ }
+
+ tests.each do |name, group_name|
+ it "'#{name}' puts in '#{group_name}'" do
+ commit_status.name = name
+
+ is_expected.to eq(group_name)
+ end
+ end
+ end
+ end
- tests.each do |name, group_name|
- it "'#{name}' puts in '#{group_name}'" do
- commit_status.name = name
+ context 'with one_dimensional_matrix feature flag enabled' do
+ describe '#group_name' do
+ before do
+ stub_feature_flags(one_dimensional_matrix: true)
+ end
- is_expected.to eq(group_name)
+ let(:commit_status) do
+ build(:commit_status, pipeline: pipeline, stage: 'test')
+ end
+
+ subject { commit_status.group_name }
+
+ tests = {
+ 'rspec:windows' => 'rspec:windows',
+ 'rspec:windows 0' => 'rspec:windows 0',
+ 'rspec:windows 0 test' => 'rspec:windows 0 test',
+ 'rspec:windows 0 1' => 'rspec:windows',
+ 'rspec:windows 0 1 name' => 'rspec:windows name',
+ 'rspec:windows 0/1' => 'rspec:windows',
+ 'rspec:windows 0/1 name' => 'rspec:windows name',
+ 'rspec:windows 0:1' => 'rspec:windows',
+ 'rspec:windows 0:1 name' => 'rspec:windows name',
+ 'rspec:windows 10000 20000' => 'rspec:windows',
+ 'rspec:windows 0 : / 1' => 'rspec:windows',
+ 'rspec:windows 0 : / 1 name' => 'rspec:windows name',
+ '0 1 name ruby' => 'name ruby',
+ '0 :/ 1 name ruby' => 'name ruby',
+ 'rspec: [aws]' => 'rspec',
+ 'rspec: [aws] 0/1' => 'rspec',
+ 'rspec: [aws, max memory]' => 'rspec',
+ 'rspec:linux: [aws, max memory, data]' => 'rspec:linux',
+ 'rspec: [inception: [something, other thing], value]' => 'rspec',
+ 'rspec:windows 0/1: [name, other]' => 'rspec:windows',
+ 'rspec:windows: [name, other] 0/1' => 'rspec:windows',
+ 'rspec:windows: [name, 0/1] 0/1' => 'rspec:windows',
+ 'rspec:windows: [0/1, name]' => 'rspec:windows',
+ 'rspec:windows: [, ]' => 'rspec:windows',
+ 'rspec:windows: [name]' => 'rspec:windows',
+ 'rspec:windows: [name,other]' => 'rspec:windows'
+ }
+
+ tests.each do |name, group_name|
+ it "'#{name}' puts in '#{group_name}'" do
+ commit_status.name = name
+
+ is_expected.to eq(group_name)
+ end
end
end
end
diff --git a/spec/models/concerns/avatarable_spec.rb b/spec/models/concerns/avatarable_spec.rb
index 8a8eeea39dc..5bed2cb9a14 100644
--- a/spec/models/concerns/avatarable_spec.rb
+++ b/spec/models/concerns/avatarable_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe Avatarable do
it 'validates the file size' do
expect(validator).to receive(:validate_each).and_call_original
- project.update(avatar: 'uploads/avatar.png')
+ project.update!(avatar: 'uploads/avatar.png')
end
end
@@ -29,7 +29,7 @@ RSpec.describe Avatarable do
it 'skips validation of file size' do
expect(validator).not_to receive(:validate_each)
- project.update(name: 'Hello world')
+ project.update!(name: 'Hello world')
end
end
end
diff --git a/spec/models/concerns/bulk_insertable_associations_spec.rb b/spec/models/concerns/bulk_insertable_associations_spec.rb
index 5a40639e493..25b13c8233d 100644
--- a/spec/models/concerns/bulk_insertable_associations_spec.rb
+++ b/spec/models/concerns/bulk_insertable_associations_spec.rb
@@ -187,7 +187,7 @@ RSpec.describe BulkInsertableAssociations do
it 'invalidates the parent and returns false' do
build_invalid_items(parent: parent)
- expect(save_with_bulk_inserts(parent, bangify: false)).to be false
+ expect(BulkInsertableAssociations.with_bulk_insert { parent.save }).to be false # rubocop:disable Rails/SaveBang
expect(parent.errors[:bulk_foos].size).to eq(1)
expect(BulkFoo.count).to eq(0)
@@ -211,8 +211,8 @@ RSpec.describe BulkInsertableAssociations do
private
- def save_with_bulk_inserts(entity, bangify: true)
- BulkInsertableAssociations.with_bulk_insert { bangify ? entity.save! : entity.save }
+ def save_with_bulk_inserts(entity)
+ BulkInsertableAssociations.with_bulk_insert { entity.save! }
end
def build_items(parent:, relation: :bulk_foos, count: 10)
diff --git a/spec/models/concerns/cache_markdown_field_spec.rb b/spec/models/concerns/cache_markdown_field_spec.rb
index 440943171c3..37e2f5fb8d4 100644
--- a/spec/models/concerns/cache_markdown_field_spec.rb
+++ b/spec/models/concerns/cache_markdown_field_spec.rb
@@ -285,7 +285,7 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
it_behaves_like 'a class with cached markdown fields'
describe '#attribute_invalidated?' do
- let(:thing) { klass.create(description: markdown, description_html: html, cached_markdown_version: cache_version) }
+ let(:thing) { klass.create!(description: markdown, description_html: html, cached_markdown_version: cache_version) }
it 'returns true when cached_markdown_version is different' do
thing.cached_markdown_version += 1
@@ -318,7 +318,7 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
let(:thing) do
# This forces the record to have outdated HTML. We can't use `create` because the `before_create` hook
# would re-render the HTML to the latest version
- klass.create.tap do |thing|
+ klass.create!.tap do |thing|
thing.update_columns(description: markdown, description_html: old_html, cached_markdown_version: old_version)
end
end
@@ -326,7 +326,7 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
it 'correctly updates cached HTML even if refresh_markdown_cache is called before updating the attribute' do
thing.refresh_markdown_cache
- thing.update(description: updated_markdown)
+ thing.update!(description: updated_markdown)
expect(thing.description_html).to eq(updated_html)
end
diff --git a/spec/models/concerns/case_sensitivity_spec.rb b/spec/models/concerns/case_sensitivity_spec.rb
index 521b47c63fd..5fb7cdb4443 100644
--- a/spec/models/concerns/case_sensitivity_spec.rb
+++ b/spec/models/concerns/case_sensitivity_spec.rb
@@ -12,8 +12,8 @@ RSpec.describe CaseSensitivity do
end
end
- let!(:model_1) { model.create(path: 'mOdEl-1', name: 'mOdEl 1') }
- let!(:model_2) { model.create(path: 'mOdEl-2', name: 'mOdEl 2') }
+ let!(:model_1) { model.create!(path: 'mOdEl-1', name: 'mOdEl 1') }
+ let!(:model_2) { model.create!(path: 'mOdEl-2', name: 'mOdEl 2') }
it 'finds a single instance by a single attribute regardless of case' do
expect(model.iwhere(path: 'MODEL-1')).to contain_exactly(model_1)
diff --git a/spec/models/concerns/checksummable_spec.rb b/spec/models/concerns/checksummable_spec.rb
index b469b2e5c18..3a0387333e8 100644
--- a/spec/models/concerns/checksummable_spec.rb
+++ b/spec/models/concerns/checksummable_spec.rb
@@ -3,17 +3,21 @@
require 'spec_helper'
RSpec.describe Checksummable do
- describe ".hexdigest" do
- let(:fake_class) do
- Class.new do
- include Checksummable
- end
+ subject do
+ Class.new { include Checksummable }
+ end
+
+ describe ".crc32" do
+ it 'returns the CRC32 of data' do
+ expect(subject.crc32('abcd')).to eq 3984772369
end
+ end
+ describe ".hexdigest" do
it 'returns the SHA256 sum of the file' do
expected = Digest::SHA256.file(__FILE__).hexdigest
- expect(fake_class.hexdigest(__FILE__)).to eq(expected)
+ expect(subject.hexdigest(__FILE__)).to eq(expected)
end
end
end
diff --git a/spec/models/concerns/counter_attribute_spec.rb b/spec/models/concerns/counter_attribute_spec.rb
index f23865a5dbb..a19fbae3cfb 100644
--- a/spec/models/concerns/counter_attribute_spec.rb
+++ b/spec/models/concerns/counter_attribute_spec.rb
@@ -12,6 +12,36 @@ RSpec.describe CounterAttribute, :counter_attribute, :clean_gitlab_redis_shared_
let(:model) { CounterAttributeModel.find(project_statistics.id) }
end
+ describe 'after_flush callbacks' do
+ let(:attribute) { model.class.counter_attributes.first}
+
+ subject { model.flush_increments_to_database!(attribute) }
+
+ it 'has registered callbacks' do # defined in :counter_attribute RSpec tag
+ expect(model.class.after_flush_callbacks.size).to eq(1)
+ end
+
+ context 'when there are increments to flush' do
+ before do
+ model.delayed_increment_counter(attribute, 10)
+ end
+
+ it 'executes the callbacks' do
+ subject
+
+ expect(model.flushed).to be_truthy
+ end
+ end
+
+ context 'when there are no increments to flush' do
+ it 'does not execute the callbacks' do
+ subject
+
+ expect(model.flushed).to be_nil
+ end
+ end
+ end
+
describe '.steal_increments' do
let(:increment_key) { 'counters:Model:123:attribute' }
let(:flushed_key) { 'counter:Model:123:attribute:flushed' }
diff --git a/spec/models/concerns/featurable_spec.rb b/spec/models/concerns/featurable_spec.rb
index 31186b5fc77..99acc563950 100644
--- a/spec/models/concerns/featurable_spec.rb
+++ b/spec/models/concerns/featurable_spec.rb
@@ -180,6 +180,6 @@ RSpec.describe Featurable do
def update_all_project_features(project, features, value)
project_feature_attributes = features.map { |f| ["#{f}_access_level", value] }.to_h
- project.project_feature.update(project_feature_attributes)
+ project.project_feature.update!(project_feature_attributes)
end
end
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index 44561e2e55a..ff5b270cf33 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe Issuable do
it 'returns nil when author is nil' do
issue.author_id = nil
- issue.save(validate: false)
+ issue.save!(validate: false)
expect(issue.author_name).to eq nil
end
@@ -361,13 +361,13 @@ RSpec.describe Issuable do
end
it 'returns true when a subcription exists and subscribed is true' do
- issue.subscriptions.create(user: user, project: project, subscribed: true)
+ issue.subscriptions.create!(user: user, project: project, subscribed: true)
expect(issue.subscribed?(user, project)).to be_truthy
end
it 'returns false when a subcription exists and subscribed is false' do
- issue.subscriptions.create(user: user, project: project, subscribed: false)
+ issue.subscriptions.create!(user: user, project: project, subscribed: false)
expect(issue.subscribed?(user, project)).to be_falsey
end
@@ -383,13 +383,13 @@ RSpec.describe Issuable do
end
it 'returns true when a subcription exists and subscribed is true' do
- issue.subscriptions.create(user: user, project: project, subscribed: true)
+ issue.subscriptions.create!(user: user, project: project, subscribed: true)
expect(issue.subscribed?(user, project)).to be_truthy
end
it 'returns false when a subcription exists and subscribed is false' do
- issue.subscriptions.create(user: user, project: project, subscribed: false)
+ issue.subscriptions.create!(user: user, project: project, subscribed: false)
expect(issue.subscribed?(user, project)).to be_falsey
end
@@ -437,7 +437,7 @@ RSpec.describe Issuable do
let(:labels) { create_list(:label, 2) }
before do
- issue.update(labels: [labels[1]])
+ issue.update!(labels: [labels[1]])
expect(Gitlab::HookData::IssuableBuilder)
.to receive(:new).with(issue).and_return(builder)
end
@@ -456,7 +456,7 @@ RSpec.describe Issuable do
context 'total_time_spent is updated' do
before do
issue.spend_time(duration: 2, user_id: user.id, spent_at: Time.current)
- issue.save
+ issue.save!
expect(Gitlab::HookData::IssuableBuilder)
.to receive(:new).with(issue).and_return(builder)
end
@@ -497,8 +497,8 @@ RSpec.describe Issuable do
let(:user2) { create(:user) }
before do
- merge_request.update(assignees: [user])
- merge_request.update(assignees: [user, user2])
+ merge_request.update!(assignees: [user])
+ merge_request.update!(assignees: [user, user2])
expect(Gitlab::HookData::IssuableBuilder)
.to receive(:new).with(merge_request).and_return(builder)
end
@@ -554,7 +554,7 @@ RSpec.describe Issuable do
before do
label_link = issue.label_links.find_by(label_id: second_label.id)
label_link.label_id = nil
- label_link.save(validate: false)
+ label_link.save!(validate: false)
end
it 'filters out bad labels' do
@@ -824,7 +824,7 @@ RSpec.describe Issuable do
where(:issuable_type, :supports_time_tracking) do
:issue | true
- :incident | false
+ :incident | true
:merge_request | true
end
@@ -926,58 +926,4 @@ RSpec.describe Issuable do
end
end
end
-
- describe '#update_severity' do
- let(:severity) { 'low' }
-
- subject(:update_severity) { issuable.update_severity(severity) }
-
- context 'when issuable not an incident' do
- %i(issue merge_request).each do |issuable_type|
- let(:issuable) { build_stubbed(issuable_type) }
-
- it { is_expected.to be_nil }
-
- it 'does not set severity' do
- expect { subject }.not_to change(IssuableSeverity, :count)
- end
- end
- end
-
- context 'when issuable is an incident' do
- let!(:issuable) { create(:incident) }
-
- context 'when issuable does not have issuable severity yet' do
- it 'creates new record' do
- expect { update_severity }.to change { IssuableSeverity.where(issue: issuable).count }.to(1)
- end
-
- it 'sets severity to specified value' do
- expect { update_severity }.to change { issuable.severity }.to('low')
- end
- end
-
- context 'when issuable has an issuable severity' do
- let!(:issuable_severity) { create(:issuable_severity, issue: issuable, severity: 'medium') }
-
- it 'does not create new record' do
- expect { update_severity }.not_to change(IssuableSeverity, :count)
- end
-
- it 'updates existing issuable severity' do
- expect { update_severity }.to change { issuable_severity.severity }.to(severity)
- end
- end
-
- context 'when severity value is unsupported' do
- let(:severity) { 'unsupported-severity' }
-
- it 'sets the severity to default value' do
- update_severity
-
- expect(issuable.issuable_severity.severity).to eq(IssuableSeverity::DEFAULT)
- end
- end
- end
- end
end
diff --git a/spec/models/concerns/mentionable_spec.rb b/spec/models/concerns/mentionable_spec.rb
index 758b5aa2ce4..516c0fd75bc 100644
--- a/spec/models/concerns/mentionable_spec.rb
+++ b/spec/models/concerns/mentionable_spec.rb
@@ -177,7 +177,7 @@ RSpec.describe Issue, "Mentionable" do
expect(SystemNoteService).not_to receive(:cross_reference)
- issue.update(description: 'New description')
+ issue.update!(description: 'New description')
issue.create_new_cross_references!
end
@@ -186,7 +186,7 @@ RSpec.describe Issue, "Mentionable" do
expect(SystemNoteService).to receive(:cross_reference).with(issues[1], any_args)
- issue.update(description: issues[1].to_reference)
+ issue.update!(description: issues[1].to_reference)
issue.create_new_cross_references!
end
@@ -196,7 +196,7 @@ RSpec.describe Issue, "Mentionable" do
expect(SystemNoteService).to receive(:cross_reference).with(issues[1], any_args)
- note.update(note: issues[1].to_reference)
+ note.update!(note: issues[1].to_reference)
note.create_new_cross_references!
end
end
diff --git a/spec/models/concerns/milestoneable_spec.rb b/spec/models/concerns/milestoneable_spec.rb
index f5b82e42ad4..c37582cb65d 100644
--- a/spec/models/concerns/milestoneable_spec.rb
+++ b/spec/models/concerns/milestoneable_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe Milestoneable do
it 'returns true with a milestone from the the parent of the issue project group' do
parent = create(:group)
- group.update(parent: parent)
+ group.update!(parent: parent)
milestone = create(:milestone, group: parent)
expect(build_milestoneable(milestone.id).milestone_available?).to be_truthy
diff --git a/spec/models/concerns/milestoneish_spec.rb b/spec/models/concerns/milestoneish_spec.rb
index 58cd054efd5..3b8fc465421 100644
--- a/spec/models/concerns/milestoneish_spec.rb
+++ b/spec/models/concerns/milestoneish_spec.rb
@@ -102,7 +102,7 @@ RSpec.describe Milestone, 'Milestoneish' do
with_them do
before do
- project.update(visibility_level: project_visibility_levels[visibility])
+ project.update!(visibility_level: project_visibility_levels[visibility])
end
it 'returns the proper participants' do
@@ -139,7 +139,7 @@ RSpec.describe Milestone, 'Milestoneish' do
with_them do
before do
- project.update(visibility_level: project_visibility_levels[visibility])
+ project.update!(visibility_level: project_visibility_levels[visibility])
end
it 'returns the proper participants' do
@@ -171,7 +171,7 @@ RSpec.describe Milestone, 'Milestoneish' do
context 'when project is private' do
before do
- project.update(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
end
it 'does not return any merge request for a non member' do
@@ -195,7 +195,7 @@ RSpec.describe Milestone, 'Milestoneish' do
context 'when merge requests are available to project members' do
before do
- project.project_feature.update(merge_requests_access_level: ProjectFeature::PRIVATE)
+ project.project_feature.update!(merge_requests_access_level: ProjectFeature::PRIVATE)
end
it 'does not return any merge request for a non member' do
diff --git a/spec/models/concerns/reactive_caching_spec.rb b/spec/models/concerns/reactive_caching_spec.rb
index b12ad82920f..7e031bdd263 100644
--- a/spec/models/concerns/reactive_caching_spec.rb
+++ b/spec/models/concerns/reactive_caching_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe ReactiveCaching, :use_clean_rails_memory_store_caching do
self.reactive_cache_lifetime = 5.minutes
self.reactive_cache_refresh_interval = 15.seconds
+ self.reactive_cache_work_type = :no_dependency
attr_reader :id
@@ -372,4 +373,14 @@ RSpec.describe ReactiveCaching, :use_clean_rails_memory_store_caching do
it { expect(subject.reactive_cache_hard_limit).to be_nil }
it { expect(subject.reactive_cache_worker_finder).to respond_to(:call) }
end
+
+ describe 'classes including this concern' do
+ it 'sets reactive_cache_work_type' do
+ classes = ObjectSpace.each_object(Class).select do |klass|
+ klass < described_class && klass.name
+ end
+
+ expect(classes).to all(have_attributes(reactive_cache_work_type: be_in(described_class::WORK_TYPE.keys)))
+ end
+ end
end
diff --git a/spec/models/concerns/resolvable_discussion_spec.rb b/spec/models/concerns/resolvable_discussion_spec.rb
index c91ddfee944..c0e5ddc23b1 100644
--- a/spec/models/concerns/resolvable_discussion_spec.rb
+++ b/spec/models/concerns/resolvable_discussion_spec.rb
@@ -553,13 +553,13 @@ RSpec.describe Discussion, ResolvableDiscussion do
let(:time) { Time.current.utc }
before do
- Timecop.freeze(time - 1.second) do
+ travel_to(time - 1.second) do
first_note.resolve!(current_user)
end
- Timecop.freeze(time) do
+ travel_to(time) do
third_note.resolve!(current_user)
end
- Timecop.freeze(time + 1.second) do
+ travel_to(time + 1.second) do
second_note.resolve!(current_user)
end
end
diff --git a/spec/models/concerns/routable_spec.rb b/spec/models/concerns/routable_spec.rb
index 15d754861b2..e4cf68663ef 100644
--- a/spec/models/concerns/routable_spec.rb
+++ b/spec/models/concerns/routable_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Group, 'Routable' do
end
it 'updates route record on path change' do
- group.update(path: 'wow', name: 'much')
+ group.update!(path: 'wow', name: 'much')
expect(group.route.path).to eq('wow')
expect(group.route.name).to eq('much')
diff --git a/spec/models/concerns/schedulable_spec.rb b/spec/models/concerns/schedulable_spec.rb
index 875c2d80e55..62acd12e267 100644
--- a/spec/models/concerns/schedulable_spec.rb
+++ b/spec/models/concerns/schedulable_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe Schedulable do
context 'for a pipeline_schedule' do
# let! is used to reset the next_run_at value before each spec
let(:object) do
- Timecop.freeze(1.day.ago) do
+ travel_to(1.day.ago) do
create(:ci_pipeline_schedule, :hourly)
end
end
diff --git a/spec/models/concerns/subscribable_spec.rb b/spec/models/concerns/subscribable_spec.rb
index 2a43e748e58..3e52ca5cf63 100644
--- a/spec/models/concerns/subscribable_spec.rb
+++ b/spec/models/concerns/subscribable_spec.rb
@@ -20,13 +20,13 @@ RSpec.describe Subscribable, 'Subscribable' do
end
it 'returns true when a subcription exists and subscribed is true' do
- resource.subscriptions.create(user: user_1, subscribed: true)
+ resource.subscriptions.create!(user: user_1, subscribed: true)
expect(resource.subscribed?(user_1)).to be_truthy
end
it 'returns false when a subcription exists and subscribed is false' do
- resource.subscriptions.create(user: user_1, subscribed: false)
+ resource.subscriptions.create!(user: user_1, subscribed: false)
expect(resource.subscribed?(user_1)).to be_falsey
end
@@ -38,13 +38,13 @@ RSpec.describe Subscribable, 'Subscribable' do
end
it 'returns true when a subcription exists and subscribed is true' do
- resource.subscriptions.create(user: user_1, project: project, subscribed: true)
+ resource.subscriptions.create!(user: user_1, project: project, subscribed: true)
expect(resource.subscribed?(user_1, project)).to be_truthy
end
it 'returns false when a subcription exists and subscribed is false' do
- resource.subscriptions.create(user: user_1, project: project, subscribed: false)
+ resource.subscriptions.create!(user: user_1, project: project, subscribed: false)
expect(resource.subscribed?(user_1, project)).to be_falsey
end
@@ -58,9 +58,9 @@ RSpec.describe Subscribable, 'Subscribable' do
it 'returns the subscribed users' do
user_2 = create(:user)
- resource.subscriptions.create(user: user_1, subscribed: true)
- resource.subscriptions.create(user: user_2, project: project, subscribed: true)
- resource.subscriptions.create(user: create(:user), project: project, subscribed: false)
+ resource.subscriptions.create!(user: user_1, subscribed: true)
+ resource.subscriptions.create!(user: user_2, project: project, subscribed: true)
+ resource.subscriptions.create!(user: create(:user), project: project, subscribed: false)
expect(resource.subscribers(project)).to contain_exactly(user_1, user_2)
end
@@ -113,7 +113,7 @@ RSpec.describe Subscribable, 'Subscribable' do
describe '#unsubscribe' do
context 'without project' do
it 'unsubscribes the given current user' do
- resource.subscriptions.create(user: user_1, subscribed: true)
+ resource.subscriptions.create!(user: user_1, subscribed: true)
expect(resource.subscribed?(user_1)).to be_truthy
resource.unsubscribe(user_1)
@@ -124,7 +124,7 @@ RSpec.describe Subscribable, 'Subscribable' do
context 'with project' do
it 'unsubscribes the given current user' do
- resource.subscriptions.create(user: user_1, project: project, subscribed: true)
+ resource.subscriptions.create!(user: user_1, project: project, subscribed: true)
expect(resource.subscribed?(user_1, project)).to be_truthy
resource.unsubscribe(user_1, project)
@@ -139,7 +139,7 @@ RSpec.describe Subscribable, 'Subscribable' do
context 'when desired_state is set to true' do
context 'when a user is subscribed to the resource' do
it 'keeps the user subscribed' do
- resource.subscriptions.create(user: user_1, subscribed: true, project: resource_project)
+ resource.subscriptions.create!(user: user_1, subscribed: true, project: resource_project)
resource.set_subscription(user_1, true, resource_project)
@@ -159,7 +159,7 @@ RSpec.describe Subscribable, 'Subscribable' do
context 'when desired_state is set to false' do
context 'when a user is subscribed to the resource' do
it 'unsubscribes the user from the resource' do
- resource.subscriptions.create(user: user_1, subscribed: true, project: resource_project)
+ resource.subscriptions.create!(user: user_1, subscribed: true, project: resource_project)
expect { resource.set_subscription(user_1, false, resource_project) }
.to change { resource.subscribed?(user_1, resource_project) }
diff --git a/spec/models/concerns/token_authenticatable_spec.rb b/spec/models/concerns/token_authenticatable_spec.rb
index e0e764fc63c..90e94b5dca9 100644
--- a/spec/models/concerns/token_authenticatable_spec.rb
+++ b/spec/models/concerns/token_authenticatable_spec.rb
@@ -137,7 +137,7 @@ RSpec.describe PersonalAccessToken, 'TokenAuthenticatable' do
subject { PersonalAccessToken.find_by_token(token_value) }
it 'finds the token' do
- personal_access_token.save
+ personal_access_token.save!
expect(subject).to eq(personal_access_token)
end
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index 953f92d103b..2a7aaed5204 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -184,6 +184,33 @@ RSpec.describe ContainerRepository do
end
end
+ describe '#start_expiration_policy!' do
+ subject { repository.start_expiration_policy! }
+
+ it 'sets the expiration policy started at to now' do
+ Timecop.freeze do
+ expect { subject }
+ .to change { repository.expiration_policy_started_at }.from(nil).to(Time.zone.now)
+ end
+ end
+ end
+
+ describe '#reset_expiration_policy_started_at!' do
+ subject { repository.reset_expiration_policy_started_at! }
+
+ before do
+ repository.start_expiration_policy!
+ end
+
+ it 'resets the expiration policy started at' do
+ started_at = repository.expiration_policy_started_at
+
+ expect(started_at).not_to be_nil
+ expect { subject }
+ .to change { repository.expiration_policy_started_at }.from(started_at).to(nil)
+ end
+ end
+
describe '.build_from_path' do
let(:registry_path) do
ContainerRegistry::Path.new(project.full_path + '/some/image')
diff --git a/spec/models/design_management/design_collection_spec.rb b/spec/models/design_management/design_collection_spec.rb
index 8575cc80b5b..bc8330c7dd3 100644
--- a/spec/models/design_management/design_collection_spec.rb
+++ b/spec/models/design_management/design_collection_spec.rb
@@ -101,6 +101,18 @@ RSpec.describe DesignManagement::DesignCollection do
end
end
+ describe "#empty?" do
+ it "is true when the design collection has no designs" do
+ expect(collection).to be_empty
+ end
+
+ it "is false when the design collection has designs" do
+ create(:design, issue: issue)
+
+ expect(collection).not_to be_empty
+ end
+ end
+
describe "#versions" do
it "includes versions for all designs" do
version_1 = create(:design_version)
diff --git a/spec/models/design_management/design_spec.rb b/spec/models/design_management/design_spec.rb
index d4adc0d42d0..2ce9f00a056 100644
--- a/spec/models/design_management/design_spec.rb
+++ b/spec/models/design_management/design_spec.rb
@@ -206,6 +206,15 @@ RSpec.describe DesignManagement::Design do
end
end
+ describe ".build_full_path" do
+ it "builds the full path for a design" do
+ design = build(:design, issue: issue, filename: "hello.jpg")
+ expected_path = "#{DesignManagement.designs_directory}/issue-#{design.issue.iid}/hello.jpg"
+
+ expect(described_class.build_full_path(issue, design)).to eq(expected_path)
+ end
+ end
+
describe '#visible_in?' do
let_it_be(:issue) { create(:issue, project: issue.project) }
diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb
index bafcb7a3741..47492715c11 100644
--- a/spec/models/event_spec.rb
+++ b/spec/models/event_spec.rb
@@ -918,6 +918,56 @@ RSpec.describe Event do
expect(destroyed).to eq('deleted')
expect(archived).to eq('archived')
end
+
+ it 'handles correct push_action' do
+ project = create(:project)
+ user = create(:user)
+ project.add_developer(user)
+ push_event = create_push_event(project, user)
+
+ expect(push_event.push_action?).to be true
+ expect(push_event.action_name).to eq('pushed to')
+ end
+
+ context 'handles correct base actions' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:trait, :action_name) do
+ :created | 'created'
+ :updated | 'opened'
+ :closed | 'closed'
+ :reopened | 'opened'
+ :commented | 'commented on'
+ :merged | 'accepted'
+ :joined | 'joined'
+ :left | 'left'
+ :destroyed | 'destroyed'
+ :expired | 'removed due to membership expiration from'
+ :approved | 'approved'
+ end
+
+ with_them do
+ it 'with correct name and method' do
+ event = build(:event, trait)
+
+ expect(event.action_name).to eq(action_name)
+ end
+ end
+ end
+
+ context 'for created_project_action?' do
+ it 'returns created for created event' do
+ action = build(:project_created_event)
+
+ expect(action.action_name).to eq('created')
+ end
+
+ it 'returns imported for imported event' do
+ action = build(:project_imported_event)
+
+ expect(action.action_name).to eq('imported')
+ end
+ end
end
def create_push_event(project, user)
diff --git a/spec/models/group_import_state_spec.rb b/spec/models/group_import_state_spec.rb
index 4404ef64966..469b5c96ac9 100644
--- a/spec/models/group_import_state_spec.rb
+++ b/spec/models/group_import_state_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe GroupImportState do
describe 'validations' do
let_it_be(:group) { create(:group) }
+ it { is_expected.to belong_to(:user).required }
it { is_expected.to validate_presence_of(:group) }
it { is_expected.to validate_presence_of(:status) }
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 15972f66fd6..ed027d02b5b 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -224,6 +224,20 @@ RSpec.describe Group do
end
end
+ describe '.without_integration' do
+ let(:another_group) { create(:group) }
+ let(:instance_integration) { build(:jira_service, :instance) }
+
+ before do
+ create(:jira_service, group: group, project: nil)
+ create(:slack_service, group: another_group, project: nil)
+ end
+
+ it 'returns groups without integration' do
+ expect(Group.without_integration(instance_integration)).to contain_exactly(another_group)
+ end
+ end
+
describe '.public_or_visible_to_user' do
let!(:private_group) { create(:group, :private) }
let!(:internal_group) { create(:group, :internal) }
@@ -1330,229 +1344,134 @@ RSpec.describe Group do
end
end
- describe '#shared_runners_allowed?' do
- using RSpec::Parameterized::TableSyntax
-
- where(:shared_runners_enabled, :allow_descendants_override, :expected_shared_runners_allowed) do
- true | false | true
- true | true | true
- false | false | false
- false | true | true
- end
-
- with_them do
- let!(:group) { create(:group, shared_runners_enabled: shared_runners_enabled, allow_descendants_override_disabled_shared_runners: allow_descendants_override) }
-
- it 'returns the expected result' do
- expect(group.shared_runners_allowed?).to eq(expected_shared_runners_allowed)
- end
- end
+ def subject_and_reload(*models)
+ subject
+ models.map(&:reload)
end
- describe '#parent_allows_shared_runners?' do
- context 'when parent group is present' do
- using RSpec::Parameterized::TableSyntax
-
- where(:shared_runners_enabled, :allow_descendants_override, :expected_shared_runners_allowed) do
- true | false | true
- true | true | true
- false | false | false
- false | true | true
- end
-
- with_them do
- let!(:parent_group) { create(:group, shared_runners_enabled: shared_runners_enabled, allow_descendants_override_disabled_shared_runners: allow_descendants_override) }
- let!(:group) { create(:group, parent: parent_group) }
-
- it 'returns the expected result' do
- expect(group.parent_allows_shared_runners?).to eq(expected_shared_runners_allowed)
+ describe '#update_shared_runners_setting!' do
+ context 'enabled' do
+ subject { group.update_shared_runners_setting!('enabled') }
+
+ context 'group that its ancestors have shared runners disabled' do
+ let_it_be(:parent) { create(:group, :shared_runners_disabled) }
+ let_it_be(:group) { create(:group, :shared_runners_disabled, parent: parent) }
+ let_it_be(:project) { create(:project, shared_runners_enabled: false, group: group) }
+
+ it 'raises error and does not enable shared Runners' do
+ expect { subject_and_reload(parent, group, project) }
+ .to raise_error(ActiveRecord::RecordInvalid, 'Validation failed: Shared runners enabled cannot be enabled because parent group has shared Runners disabled')
+ .and not_change { parent.shared_runners_enabled }
+ .and not_change { group.shared_runners_enabled }
+ .and not_change { project.shared_runners_enabled }
end
end
- end
-
- context 'when parent group is missing' do
- let!(:group) { create(:group) }
-
- it 'returns true' do
- expect(group.parent_allows_shared_runners?).to be_truthy
- end
- end
- end
-
- describe '#parent_enabled_shared_runners?' do
- subject { group.parent_enabled_shared_runners? }
-
- context 'when parent group is present' do
- context 'When shared Runners are disabled' do
- let!(:parent_group) { create(:group, :shared_runners_disabled) }
- let!(:group) { create(:group, parent: parent_group) }
-
- it { is_expected.to be_falsy }
- end
-
- context 'When shared Runners are enabled' do
- let!(:parent_group) { create(:group) }
- let!(:group) { create(:group, parent: parent_group) }
-
- it { is_expected.to be_truthy }
- end
- end
-
- context 'when parent group is missing' do
- let!(:group) { create(:group) }
-
- it { is_expected.to be_truthy }
- end
- end
-
- describe '#enable_shared_runners!' do
- subject { group.enable_shared_runners! }
- context 'group that its ancestors have shared runners disabled' do
- let_it_be(:parent) { create(:group, :shared_runners_disabled) }
- let_it_be(:group) { create(:group, :shared_runners_disabled, parent: parent) }
- let_it_be(:project) { create(:project, shared_runners_enabled: false, group: group) }
+ context 'root group with shared runners disabled' do
+ let_it_be(:group) { create(:group, :shared_runners_disabled) }
+ let_it_be(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
+ let_it_be(:project) { create(:project, shared_runners_enabled: false, group: sub_group) }
- it 'raises error and does not enable shared Runners' do
- expect { subject }
- .to raise_error(described_class::UpdateSharedRunnersError, 'Shared Runners disabled for the parent group')
- .and not_change { parent.reload.shared_runners_enabled }
- .and not_change { group.reload.shared_runners_enabled }
- .and not_change { project.reload.shared_runners_enabled }
+ it 'enables shared Runners only for itself' do
+ expect { subject_and_reload(group, sub_group, project) }
+ .to change { group.shared_runners_enabled }.from(false).to(true)
+ .and not_change { sub_group.shared_runners_enabled }
+ .and not_change { project.shared_runners_enabled }
+ end
end
end
- context 'root group with shared runners disabled' do
- let_it_be(:group) { create(:group, :shared_runners_disabled) }
- let_it_be(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
- let_it_be(:project) { create(:project, shared_runners_enabled: false, group: sub_group) }
-
- it 'enables shared Runners only for itself' do
- expect { subject }
- .to change { group.reload.shared_runners_enabled }.from(false).to(true)
- .and not_change { sub_group.reload.shared_runners_enabled }
- .and not_change { project.reload.shared_runners_enabled }
+ context 'disabled_and_unoverridable' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:sub_group) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners, parent: group) }
+ let_it_be(:sub_group_2) { create(:group, parent: group) }
+ let_it_be(:project) { create(:project, group: group, shared_runners_enabled: true) }
+ let_it_be(:project_2) { create(:project, group: sub_group_2, shared_runners_enabled: true) }
+
+ subject { group.update_shared_runners_setting!('disabled_and_unoverridable') }
+
+ it 'disables shared Runners for all descendant groups and projects' do
+ expect { subject_and_reload(group, sub_group, sub_group_2, project, project_2) }
+ .to change { group.shared_runners_enabled }.from(true).to(false)
+ .and not_change { group.allow_descendants_override_disabled_shared_runners }
+ .and not_change { sub_group.shared_runners_enabled }
+ .and change { sub_group.allow_descendants_override_disabled_shared_runners }.from(true).to(false)
+ .and change { sub_group_2.shared_runners_enabled }.from(true).to(false)
+ .and not_change { sub_group_2.allow_descendants_override_disabled_shared_runners }
+ .and change { project.shared_runners_enabled }.from(true).to(false)
+ .and change { project_2.shared_runners_enabled }.from(true).to(false)
+ end
+
+ context 'with override on self' do
+ let_it_be(:group) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners) }
+
+ it 'disables it' do
+ expect { subject_and_reload(group) }
+ .to not_change { group.shared_runners_enabled }
+ .and change { group.allow_descendants_override_disabled_shared_runners }.from(true).to(false)
+ end
end
end
- end
-
- describe '#disable_shared_runners!' do
- let_it_be(:group) { create(:group) }
- let_it_be(:sub_group) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners, parent: group) }
- let_it_be(:sub_group_2) { create(:group, parent: group) }
- let_it_be(:project) { create(:project, group: group, shared_runners_enabled: true) }
- let_it_be(:project_2) { create(:project, group: sub_group_2, shared_runners_enabled: true) }
-
- subject { group.disable_shared_runners! }
-
- it 'disables shared Runners for all descendant groups and projects' do
- expect { subject }
- .to change { group.reload.shared_runners_enabled }.from(true).to(false)
- .and not_change { group.reload.allow_descendants_override_disabled_shared_runners }
- .and not_change { sub_group.reload.shared_runners_enabled }
- .and not_change { sub_group.reload.allow_descendants_override_disabled_shared_runners }
- .and change { sub_group_2.reload.shared_runners_enabled }.from(true).to(false)
- .and not_change { sub_group_2.reload.allow_descendants_override_disabled_shared_runners }
- .and change { project.reload.shared_runners_enabled }.from(true).to(false)
- .and change { project_2.reload.shared_runners_enabled }.from(true).to(false)
- end
- end
-
- describe '#allow_descendants_override_disabled_shared_runners!' do
- subject { group.allow_descendants_override_disabled_shared_runners! }
- context 'top level group' do
- let_it_be(:group) { create(:group, :shared_runners_disabled) }
- let_it_be(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
- let_it_be(:project) { create(:project, shared_runners_enabled: false, group: sub_group) }
+ context 'disabled_with_override' do
+ subject { group.update_shared_runners_setting!('disabled_with_override') }
- it 'enables allow descendants to override only for itself' do
- expect { subject }
- .to change { group.reload.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
- .and not_change { group.reload.shared_runners_enabled }
- .and not_change { sub_group.reload.allow_descendants_override_disabled_shared_runners }
- .and not_change { sub_group.reload.shared_runners_enabled }
- .and not_change { project.reload.shared_runners_enabled }
- end
- end
+ context 'top level group' do
+ let_it_be(:group) { create(:group, :shared_runners_disabled) }
+ let_it_be(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
+ let_it_be(:project) { create(:project, shared_runners_enabled: false, group: sub_group) }
- context 'group that its ancestors have shared Runners disabled but allows to override' do
- let_it_be(:parent) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners) }
- let_it_be(:group) { create(:group, :shared_runners_disabled, parent: parent) }
- let_it_be(:project) { create(:project, shared_runners_enabled: false, group: group) }
-
- it 'enables allow descendants to override' do
- expect { subject }
- .to not_change { parent.reload.allow_descendants_override_disabled_shared_runners }
- .and not_change { parent.reload.shared_runners_enabled }
- .and change { group.reload.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
- .and not_change { group.reload.shared_runners_enabled }
- .and not_change { project.reload.shared_runners_enabled }
+ it 'enables allow descendants to override only for itself' do
+ expect { subject_and_reload(group, sub_group, project) }
+ .to change { group.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
+ .and not_change { group.shared_runners_enabled }
+ .and not_change { sub_group.allow_descendants_override_disabled_shared_runners }
+ .and not_change { sub_group.shared_runners_enabled }
+ .and not_change { project.shared_runners_enabled }
+ end
end
- end
- context 'when parent does not allow' do
- let_it_be(:parent) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false ) }
- let_it_be(:group) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false, parent: parent) }
+ context 'group that its ancestors have shared Runners disabled but allows to override' do
+ let_it_be(:parent) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners) }
+ let_it_be(:group) { create(:group, :shared_runners_disabled, parent: parent) }
+ let_it_be(:project) { create(:project, shared_runners_enabled: false, group: group) }
- it 'raises error and does not allow descendants to override' do
- expect { subject }
- .to raise_error(described_class::UpdateSharedRunnersError, 'Group level shared Runners not allowed')
- .and not_change { parent.reload.allow_descendants_override_disabled_shared_runners }
- .and not_change { parent.reload.shared_runners_enabled }
- .and not_change { group.reload.allow_descendants_override_disabled_shared_runners }
- .and not_change { group.reload.shared_runners_enabled }
+ it 'enables allow descendants to override' do
+ expect { subject_and_reload(parent, group, project) }
+ .to not_change { parent.allow_descendants_override_disabled_shared_runners }
+ .and not_change { parent.shared_runners_enabled }
+ .and change { group.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
+ .and not_change { group.shared_runners_enabled }
+ .and not_change { project.shared_runners_enabled }
+ end
end
- end
- context 'top level group that has shared Runners enabled' do
- let_it_be(:group) { create(:group, shared_runners_enabled: true) }
- let_it_be(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
- let_it_be(:project) { create(:project, shared_runners_enabled: false, group: sub_group) }
+ context 'when parent does not allow' do
+ let_it_be(:parent) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false ) }
+ let_it_be(:group) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false, parent: parent) }
- it 'raises error and does not change config' do
- expect { subject }
- .to raise_error(described_class::UpdateSharedRunnersError, 'Shared Runners enabled')
- .and not_change { group.reload.allow_descendants_override_disabled_shared_runners }
- .and not_change { group.reload.shared_runners_enabled }
- .and not_change { sub_group.reload.allow_descendants_override_disabled_shared_runners }
- .and not_change { sub_group.reload.shared_runners_enabled }
- .and not_change { project.reload.shared_runners_enabled }
+ it 'raises error and does not allow descendants to override' do
+ expect { subject_and_reload(parent, group) }
+ .to raise_error(ActiveRecord::RecordInvalid, 'Validation failed: Allow descendants override disabled shared runners cannot be enabled because parent group does not allow it')
+ .and not_change { parent.allow_descendants_override_disabled_shared_runners }
+ .and not_change { parent.shared_runners_enabled }
+ .and not_change { group.allow_descendants_override_disabled_shared_runners }
+ .and not_change { group.shared_runners_enabled }
+ end
end
- end
- end
- describe '#disallow_descendants_override_disabled_shared_runners!' do
- subject { group.disallow_descendants_override_disabled_shared_runners! }
+ context 'top level group that has shared Runners enabled' do
+ let_it_be(:group) { create(:group, shared_runners_enabled: true) }
+ let_it_be(:sub_group) { create(:group, shared_runners_enabled: true, parent: group) }
+ let_it_be(:project) { create(:project, shared_runners_enabled: true, group: sub_group) }
- context 'top level group' do
- let_it_be(:group) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners ) }
- let_it_be(:sub_group) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners, parent: group) }
- let_it_be(:project) { create(:project, shared_runners_enabled: true, group: sub_group) }
-
- it 'disables allow project to override for descendants and disables project shared Runners' do
- expect { subject }
- .to not_change { group.reload.shared_runners_enabled }
- .and change { group.reload.allow_descendants_override_disabled_shared_runners }.from(true).to(false)
- .and not_change { sub_group.reload.shared_runners_enabled }
- .and change { sub_group.reload.allow_descendants_override_disabled_shared_runners }.from(true).to(false)
- .and change { project.reload.shared_runners_enabled }.from(true).to(false)
- end
- end
-
- context 'top level group that has shared Runners enabled' do
- let_it_be(:group) { create(:group, shared_runners_enabled: true) }
- let_it_be(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
- let_it_be(:project) { create(:project, shared_runners_enabled: false, group: sub_group) }
-
- it 'results error and does not change config' do
- expect { subject }
- .to raise_error(described_class::UpdateSharedRunnersError, 'Shared Runners enabled')
- .and not_change { group.reload.allow_descendants_override_disabled_shared_runners }
- .and not_change { group.reload.shared_runners_enabled }
- .and not_change { sub_group.reload.allow_descendants_override_disabled_shared_runners }
- .and not_change { sub_group.reload.shared_runners_enabled }
- .and not_change { project.reload.shared_runners_enabled }
+ it 'enables allow descendants to override & disables shared runners everywhere' do
+ expect { subject_and_reload(group, sub_group, project) }
+ .to change { group.shared_runners_enabled }.from(true).to(false)
+ .and change { group.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
+ .and change { sub_group.shared_runners_enabled }.from(true).to(false)
+ .and change { project.shared_runners_enabled }.from(true).to(false)
+ end
end
end
end
diff --git a/spec/models/import_failure_spec.rb b/spec/models/import_failure_spec.rb
index cdef125e890..9fee1b0ae7b 100644
--- a/spec/models/import_failure_spec.rb
+++ b/spec/models/import_failure_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe ImportFailure do
it 'orders hard failures by newest first' do
older_failure = hard_failure.dup
- Timecop.freeze(1.day.before(hard_failure.created_at)) do
+ travel_to(1.day.before(hard_failure.created_at)) do
older_failure.save!
expect(ImportFailure.hard_failures_by_correlation_id(correlation_id)).to eq([hard_failure, older_failure])
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index 87ba0f3f7e6..e4bb6522689 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -11,18 +11,18 @@ RSpec.describe Integration do
before do
create(:jira_service, project: project_1, inherit_from_id: instance_integration.id)
create(:jira_service, project: project_2, inherit_from_id: nil)
- create(:slack_service, project: project_1, inherit_from_id: nil)
+ create(:slack_service, project: project_3, inherit_from_id: nil)
end
- describe '#with_custom_integration_for' do
+ describe '.with_custom_integration_for' do
it 'returns projects with custom integrations' do
expect(Project.with_custom_integration_for(instance_integration)).to contain_exactly(project_2)
end
end
- describe '#ids_without_integration' do
- it 'returns projects ids without an integration' do
- expect(Project.ids_without_integration(instance_integration, 100)).to contain_exactly(project_3.id)
+ describe '.without_integration' do
+ it 'returns projects without integration' do
+ expect(Project.without_integration(instance_integration)).to contain_exactly(project_3)
end
end
end
diff --git a/spec/models/issue/metrics_spec.rb b/spec/models/issue/metrics_spec.rb
index 966e4321378..1d3c09a48b7 100644
--- a/spec/models/issue/metrics_spec.rb
+++ b/spec/models/issue/metrics_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Issue::Metrics do
context "milestones" do
it "records the first time an issue is associated with a milestone" do
time = Time.current
- Timecop.freeze(time) { subject.update(milestone: create(:milestone, project: project)) }
+ travel_to(time) { subject.update(milestone: create(:milestone, project: project)) }
metrics = subject.metrics
expect(metrics).to be_present
@@ -47,9 +47,9 @@ RSpec.describe Issue::Metrics do
it "does not record the second time an issue is associated with a milestone" do
time = Time.current
- Timecop.freeze(time) { subject.update(milestone: create(:milestone, project: project)) }
- Timecop.freeze(time + 2.hours) { subject.update(milestone: nil) }
- Timecop.freeze(time + 6.hours) { subject.update(milestone: create(:milestone, project: project)) }
+ travel_to(time) { subject.update(milestone: create(:milestone, project: project)) }
+ travel_to(time + 2.hours) { subject.update(milestone: nil) }
+ travel_to(time + 6.hours) { subject.update(milestone: create(:milestone, project: project)) }
metrics = subject.metrics
expect(metrics).to be_present
@@ -61,7 +61,7 @@ RSpec.describe Issue::Metrics do
it "records the first time an issue is associated with a list label" do
list_label = create(:list).label
time = Time.current
- Timecop.freeze(time) { subject.update(label_ids: [list_label.id]) }
+ travel_to(time) { subject.update(label_ids: [list_label.id]) }
metrics = subject.metrics
expect(metrics).to be_present
@@ -71,9 +71,9 @@ RSpec.describe Issue::Metrics do
it "does not record the second time an issue is associated with a list label" do
time = Time.current
first_list_label = create(:list).label
- Timecop.freeze(time) { subject.update(label_ids: [first_list_label.id]) }
+ travel_to(time) { subject.update(label_ids: [first_list_label.id]) }
second_list_label = create(:list).label
- Timecop.freeze(time + 5.hours) { subject.update(label_ids: [second_list_label.id]) }
+ travel_to(time + 5.hours) { subject.update(label_ids: [second_list_label.id]) }
metrics = subject.metrics
expect(metrics).to be_present
diff --git a/spec/models/issue_email_participant_spec.rb b/spec/models/issue_email_participant_spec.rb
new file mode 100644
index 00000000000..f19e65e31f3
--- /dev/null
+++ b/spec/models/issue_email_participant_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IssueEmailParticipant do
+ describe "Associations" do
+ it { is_expected.to belong_to(:issue) }
+ end
+
+ describe 'Validations' do
+ subject { build(:issue_email_participant) }
+
+ it { is_expected.to validate_presence_of(:issue) }
+ it { is_expected.to validate_presence_of(:email) }
+ it { is_expected.to validate_uniqueness_of(:email).scoped_to([:issue_id]) }
+
+ it_behaves_like 'an object with RFC3696 compliant email-formated attributes', :email
+ end
+end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 283d945157b..16ea2989eda 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -28,10 +28,11 @@ RSpec.describe Issue do
it { is_expected.to have_and_belong_to_many(:prometheus_alert_events) }
it { is_expected.to have_and_belong_to_many(:self_managed_prometheus_alert_events) }
it { is_expected.to have_many(:prometheus_alerts) }
+ it { is_expected.to have_many(:issue_email_participants) }
describe 'versions.most_recent' do
it 'returns the most recent version' do
- issue = create(:issue)
+ issue = create(:issue, project: reusable_project)
create_list(:design_version, 2, issue: issue)
last_version = create(:design_version, issue: issue)
@@ -79,19 +80,19 @@ RSpec.describe Issue do
end
end
- subject { create(:issue) }
+ subject { create(:issue, project: reusable_project) }
describe 'callbacks' do
describe '#ensure_metrics' do
it 'creates metrics after saving' do
- issue = create(:issue)
+ issue = create(:issue, project: reusable_project)
expect(issue.metrics).to be_persisted
expect(Issue::Metrics.count).to eq(1)
end
it 'does not create duplicate metrics for an issue' do
- issue = create(:issue)
+ issue = create(:issue, project: reusable_project)
issue.close!
@@ -102,6 +103,14 @@ RSpec.describe Issue do
it 'records current metrics' do
expect_any_instance_of(Issue::Metrics).to receive(:record!)
+ create(:issue, project: reusable_project)
+ end
+ end
+
+ describe '#record_create_action' do
+ it 'records the creation action after saving' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_created_action)
+
create(:issue)
end
end
@@ -111,8 +120,8 @@ RSpec.describe Issue do
subject { described_class.with_alert_management_alerts }
it 'gets only issues with alerts' do
- alert = create(:alert_management_alert, issue: create(:issue))
- issue = create(:issue)
+ alert = create(:alert_management_alert, project: reusable_project, issue: create(:issue, project: reusable_project))
+ issue = create(:issue, project: reusable_project)
expect(subject).to contain_exactly(alert.issue)
expect(subject).not_to include(issue)
@@ -130,10 +139,9 @@ RSpec.describe Issue do
end
describe '.with_issue_type' do
- let_it_be(:project) { create(:project) }
- let_it_be(:issue) { create(:issue, project: project) }
- let_it_be(:incident) { create(:incident, project: project) }
- let_it_be(:test_case) { create(:quality_test_case, project: project) }
+ let_it_be(:issue) { create(:issue, project: reusable_project) }
+ let_it_be(:incident) { create(:incident, project: reusable_project) }
+ let_it_be(:test_case) { create(:quality_test_case, project: reusable_project) }
it 'gives issues with the given issue type' do
expect(described_class.with_issue_type('issue'))
@@ -146,6 +154,24 @@ RSpec.describe Issue do
end
end
+ describe '.order_severity' do
+ let_it_be(:issue_high_severity) { create(:issuable_severity, severity: :high).issue }
+ let_it_be(:issue_low_severity) { create(:issuable_severity, severity: :low).issue }
+ let_it_be(:issue_no_severity) { create(:incident) }
+
+ context 'sorting ascending' do
+ subject { described_class.order_severity_asc }
+
+ it { is_expected.to eq([issue_no_severity, issue_low_severity, issue_high_severity]) }
+ end
+
+ context 'sorting descending' do
+ subject { described_class.order_severity_desc }
+
+ it { is_expected.to eq([issue_high_severity, issue_low_severity, issue_no_severity]) }
+ end
+ end
+
describe '#order_by_position_and_priority' do
let(:project) { reusable_project }
let(:p1) { create(:label, title: 'P1', project: project, priority: 1) }
@@ -195,7 +221,7 @@ RSpec.describe Issue do
end
describe '#close' do
- subject(:issue) { create(:issue, state: 'opened') }
+ subject(:issue) { create(:issue, project: reusable_project, state: 'opened') }
it 'sets closed_at to Time.current when an issue is closed' do
expect { issue.close }.to change { issue.closed_at }.from(nil)
@@ -210,7 +236,7 @@ RSpec.describe Issue do
end
describe '#reopen' do
- let(:issue) { create(:issue, state: 'closed', closed_at: Time.current, closed_by: user) }
+ let(:issue) { create(:issue, project: reusable_project, state: 'closed', closed_at: Time.current, closed_by: user) }
it 'sets closed_at to nil when an issue is reopend' do
expect { issue.reopen }.to change { issue.closed_at }.to(nil)
@@ -293,7 +319,7 @@ RSpec.describe Issue do
end
describe '#assignee_or_author?' do
- let(:issue) { create(:issue) }
+ let(:issue) { create(:issue, project: reusable_project) }
it 'returns true for a user that is assigned to an issue' do
issue.assignees << user
@@ -313,22 +339,21 @@ RSpec.describe Issue do
end
describe '#related_issues' do
- let(:user) { create(:user) }
- let(:authorized_project) { create(:project) }
- let(:authorized_project2) { create(:project) }
- let(:unauthorized_project) { create(:project) }
+ let_it_be(:authorized_project) { create(:project) }
+ let_it_be(:authorized_project2) { create(:project) }
+ let_it_be(:unauthorized_project) { create(:project) }
- let(:authorized_issue_a) { create(:issue, project: authorized_project) }
- let(:authorized_issue_b) { create(:issue, project: authorized_project) }
- let(:authorized_issue_c) { create(:issue, project: authorized_project2) }
+ let_it_be(:authorized_issue_a) { create(:issue, project: authorized_project) }
+ let_it_be(:authorized_issue_b) { create(:issue, project: authorized_project) }
+ let_it_be(:authorized_issue_c) { create(:issue, project: authorized_project2) }
- let(:unauthorized_issue) { create(:issue, project: unauthorized_project) }
+ let_it_be(:unauthorized_issue) { create(:issue, project: unauthorized_project) }
- let!(:issue_link_a) { create(:issue_link, source: authorized_issue_a, target: authorized_issue_b) }
- let!(:issue_link_b) { create(:issue_link, source: authorized_issue_a, target: unauthorized_issue) }
- let!(:issue_link_c) { create(:issue_link, source: authorized_issue_a, target: authorized_issue_c) }
+ let_it_be(:issue_link_a) { create(:issue_link, source: authorized_issue_a, target: authorized_issue_b) }
+ let_it_be(:issue_link_b) { create(:issue_link, source: authorized_issue_a, target: unauthorized_issue) }
+ let_it_be(:issue_link_c) { create(:issue_link, source: authorized_issue_a, target: authorized_issue_c) }
- before do
+ before_all do
authorized_project.add_developer(user)
authorized_project2.add_developer(user)
end
@@ -366,17 +391,16 @@ RSpec.describe Issue do
end
context 'user is reporter in project issue belongs to' do
- let(:project) { create(:project) }
- let(:issue) { create(:issue, project: project) }
+ let(:issue) { create(:issue, project: reusable_project) }
- before do
- project.add_reporter(user)
+ before_all do
+ reusable_project.add_reporter(user)
end
it { is_expected.to eq true }
context 'issue not persisted' do
- let(:issue) { build(:issue, project: project) }
+ let(:issue) { build(:issue, project: reusable_project) }
it { is_expected.to eq false }
end
@@ -384,7 +408,7 @@ RSpec.describe Issue do
context 'checking destination project also' do
subject { issue.can_move?(user, to_project) }
- let(:to_project) { create(:project) }
+ let_it_be(:to_project) { create(:project) }
context 'destination project allowed' do
before do
@@ -420,7 +444,7 @@ RSpec.describe Issue do
end
describe '#duplicated?' do
- let(:issue) { create(:issue) }
+ let(:issue) { create(:issue, project: reusable_project) }
subject { issue.duplicated? }
@@ -429,7 +453,7 @@ RSpec.describe Issue do
end
context 'issue already duplicated' do
- let(:duplicated_to_issue) { create(:issue) }
+ let(:duplicated_to_issue) { create(:issue, project: reusable_project) }
let(:issue) { create(:issue, duplicated_to: duplicated_to_issue) }
it { is_expected.to eq true }
@@ -440,13 +464,13 @@ RSpec.describe Issue do
subject { issue.from_service_desk? }
context 'when issue author is support bot' do
- let(:issue) { create(:issue, author: ::User.support_bot) }
+ let(:issue) { create(:issue, project: reusable_project, author: ::User.support_bot) }
it { is_expected.to be_truthy }
end
context 'when issue author is not support bot' do
- let(:issue) { create(:issue) }
+ let(:issue) { create(:issue, project: reusable_project) }
it { is_expected.to be_falsey }
end
@@ -495,7 +519,7 @@ RSpec.describe Issue do
end
describe '#has_related_branch?' do
- let(:issue) { create(:issue, title: "Blue Bell Knoll") }
+ let(:issue) { create(:issue, project: reusable_project, title: "Blue Bell Knoll") }
subject { issue.has_related_branch? }
@@ -528,7 +552,7 @@ RSpec.describe Issue do
end
describe "#to_branch_name" do
- let(:issue) { create(:issue, title: 'testing-issue') }
+ let_it_be(:issue) { create(:issue, project: reusable_project, title: 'testing-issue') }
it 'starts with the issue iid' do
expect(issue.to_branch_name).to match(/\A#{issue.iid}-[A-Za-z\-]+\z/)
@@ -539,12 +563,12 @@ RSpec.describe Issue do
end
it "does not contain the issue title if confidential" do
- issue = create(:issue, title: 'testing-issue', confidential: true)
+ issue = create(:issue, project: reusable_project, title: 'testing-issue', confidential: true)
expect(issue.to_branch_name).to match(/confidential-issue\z/)
end
context 'issue title longer than 100 characters' do
- let(:issue) { create(:issue, iid: 999, title: 'Lorem ipsum dolor sit amet consectetur adipiscing elit Mauris sit amet ipsum id lacus custom fringilla convallis') }
+ let_it_be(:issue) { create(:issue, project: reusable_project, iid: 999, title: 'Lorem ipsum dolor sit amet consectetur adipiscing elit Mauris sit amet ipsum id lacus custom fringilla convallis') }
it "truncates branch name to at most 100 characters" do
expect(issue.to_branch_name.length).to be <= 100
@@ -581,15 +605,14 @@ RSpec.describe Issue do
describe '#participants' do
context 'using a public project' do
- let(:project) { create(:project, :public) }
- let(:issue) { create(:issue, project: project) }
+ let_it_be(:issue) { create(:issue, project: reusable_project) }
let!(:note1) do
- create(:note_on_issue, noteable: issue, project: project, note: 'a')
+ create(:note_on_issue, noteable: issue, project: reusable_project, note: 'a')
end
let!(:note2) do
- create(:note_on_issue, noteable: issue, project: project, note: 'b')
+ create(:note_on_issue, noteable: issue, project: reusable_project, note: 'b')
end
it 'includes the issue author' do
@@ -604,8 +627,8 @@ RSpec.describe Issue do
context 'using a private project' do
it 'does not include mentioned users that do not have access to the project' do
project = create(:project)
- user = create(:user)
issue = create(:issue, project: project)
+ user = create(:user)
create(:note_on_issue,
noteable: issue,
@@ -621,10 +644,9 @@ RSpec.describe Issue do
it 'updates when assignees change' do
user1 = create(:user)
user2 = create(:user)
- project = create(:project)
- issue = create(:issue, assignees: [user1], project: project)
- project.add_developer(user1)
- project.add_developer(user2)
+ issue = create(:issue, assignees: [user1], project: reusable_project)
+ reusable_project.add_developer(user1)
+ reusable_project.add_developer(user2)
expect(user1.assigned_open_issues_count).to eq(1)
expect(user2.assigned_open_issues_count).to eq(0)
@@ -638,9 +660,8 @@ RSpec.describe Issue do
end
describe '#visible_to_user?' do
- let(:project) { build(:project) }
+ let(:project) { reusable_project }
let(:issue) { build(:issue, project: project) }
- let(:user) { create(:user) }
subject { issue.visible_to_user?(user) }
@@ -661,6 +682,10 @@ RSpec.describe Issue do
context 'without a user' do
let(:user) { nil }
+ before do
+ project.project_feature.update_attribute(:issues_access_level, ProjectFeature::PUBLIC)
+ end
+
it 'returns true when the issue is publicly visible' do
expect(issue).to receive(:publicly_visible?).and_return(true)
@@ -995,7 +1020,8 @@ RSpec.describe Issue do
with_them do
it 'checks for spam on issues that can be seen anonymously' do
- project = create(:project, visibility_level: visibility_level)
+ project = reusable_project
+ project.update(visibility_level: visibility_level)
issue = create(:issue, project: project, confidential: confidential, description: 'original description')
issue.assign_attributes(new_attributes)
@@ -1016,8 +1042,8 @@ RSpec.describe Issue do
describe '.public_only' do
it 'only returns public issues' do
- public_issue = create(:issue)
- create(:issue, confidential: true)
+ public_issue = create(:issue, project: reusable_project)
+ create(:issue, project: reusable_project, confidential: true)
expect(described_class.public_only).to eq([public_issue])
end
@@ -1025,15 +1051,15 @@ RSpec.describe Issue do
describe '.confidential_only' do
it 'only returns confidential_only issues' do
- create(:issue)
- confidential_issue = create(:issue, confidential: true)
+ create(:issue, project: reusable_project)
+ confidential_issue = create(:issue, project: reusable_project, confidential: true)
expect(described_class.confidential_only).to eq([confidential_issue])
end
end
describe '.by_project_id_and_iid' do
- let_it_be(:issue_a) { create(:issue) }
+ let_it_be(:issue_a) { create(:issue, project: reusable_project) }
let_it_be(:issue_b) { create(:issue, iid: issue_a.iid) }
let_it_be(:issue_c) { create(:issue, project: issue_a.project) }
let_it_be(:issue_d) { create(:issue, project: issue_a.project) }
@@ -1050,8 +1076,8 @@ RSpec.describe Issue do
describe '.service_desk' do
it 'returns the service desk issue' do
- service_desk_issue = create(:issue, author: ::User.support_bot)
- regular_issue = create(:issue)
+ service_desk_issue = create(:issue, project: reusable_project, author: ::User.support_bot)
+ regular_issue = create(:issue, project: reusable_project)
expect(described_class.service_desk).to include(service_desk_issue)
expect(described_class.service_desk).not_to include(regular_issue)
@@ -1064,7 +1090,7 @@ RSpec.describe Issue do
describe "#labels_hook_attrs" do
let(:label) { create(:label) }
- let(:issue) { create(:labeled_issue, labels: [label]) }
+ let(:issue) { create(:labeled_issue, project: reusable_project, labels: [label]) }
it "returns a list of label hook attributes" do
expect(issue.labels_hook_attrs).to eq([label.hook_attrs])
@@ -1073,7 +1099,7 @@ RSpec.describe Issue do
context "relative positioning" do
it_behaves_like "a class that supports relative positioning" do
- let_it_be(:project) { create(:project) }
+ let_it_be(:project) { reusable_project }
let(:factory) { :issue }
let(:default_params) { { project: project } }
end
@@ -1083,7 +1109,7 @@ RSpec.describe Issue do
describe "#previous_updated_at" do
let_it_be(:updated_at) { Time.zone.local(2012, 01, 06) }
- let_it_be(:issue) { create(:issue, updated_at: updated_at) }
+ let_it_be(:issue) { create(:issue, project: reusable_project, updated_at: updated_at) }
it 'returns updated_at value if updated_at did not change at all' do
allow(issue).to receive(:previous_changes).and_return({})
@@ -1121,7 +1147,7 @@ RSpec.describe Issue do
end
describe 'current designs' do
- let(:issue) { create(:issue) }
+ let(:issue) { create(:issue, project: reusable_project) }
subject { issue.designs.current }
@@ -1213,4 +1239,12 @@ RSpec.describe Issue do
expect(issue.allows_reviewers?).to be(false)
end
end
+
+ describe '#issue_type_supports?' do
+ let_it_be(:issue) { create(:issue) }
+
+ it 'raises error when feature is invalid' do
+ expect { issue.issue_type_supports?(:unkown_feature) }.to raise_error(ArgumentError)
+ end
+ end
end
diff --git a/spec/models/iteration_spec.rb b/spec/models/iteration_spec.rb
index 19a1625aad3..e7ec5de0ef1 100644
--- a/spec/models/iteration_spec.rb
+++ b/spec/models/iteration_spec.rb
@@ -119,7 +119,7 @@ RSpec.describe Iteration do
let(:start_date) { 5.days.from_now }
let(:due_date) { 6.days.from_now }
- shared_examples_for 'overlapping dates' do
+ shared_examples_for 'overlapping dates' do |skip_constraint_test: false|
context 'when start_date is in range' do
let(:start_date) { 5.days.from_now }
let(:due_date) { 3.weeks.from_now }
@@ -129,9 +129,11 @@ RSpec.describe Iteration do
expect(subject.errors[:base]).to include('Dates cannot overlap with other existing Iterations')
end
- it 'is not valid even if forced' do
- subject.validate # to generate iid/etc
- expect { subject.save!(validate: false) }.to raise_exception(ActiveRecord::StatementInvalid, /#{constraint_name}/)
+ unless skip_constraint_test
+ it 'is not valid even if forced' do
+ subject.validate # to generate iid/etc
+ expect { subject.save!(validate: false) }.to raise_exception(ActiveRecord::StatementInvalid, /#{constraint_name}/)
+ end
end
end
@@ -144,9 +146,11 @@ RSpec.describe Iteration do
expect(subject.errors[:base]).to include('Dates cannot overlap with other existing Iterations')
end
- it 'is not valid even if forced' do
- subject.validate # to generate iid/etc
- expect { subject.save!(validate: false) }.to raise_exception(ActiveRecord::StatementInvalid, /#{constraint_name}/)
+ unless skip_constraint_test
+ it 'is not valid even if forced' do
+ subject.validate # to generate iid/etc
+ expect { subject.save!(validate: false) }.to raise_exception(ActiveRecord::StatementInvalid, /#{constraint_name}/)
+ end
end
end
@@ -156,9 +160,11 @@ RSpec.describe Iteration do
expect(subject.errors[:base]).to include('Dates cannot overlap with other existing Iterations')
end
- it 'is not valid even if forced' do
- subject.validate # to generate iid/etc
- expect { subject.save!(validate: false) }.to raise_exception(ActiveRecord::StatementInvalid, /#{constraint_name}/)
+ unless skip_constraint_test
+ it 'is not valid even if forced' do
+ subject.validate # to generate iid/etc
+ expect { subject.save!(validate: false) }.to raise_exception(ActiveRecord::StatementInvalid, /#{constraint_name}/)
+ end
end
end
end
@@ -177,6 +183,14 @@ RSpec.describe Iteration do
expect { subject.save! }.not_to raise_exception
end
end
+
+ context 'sub-group' do
+ let(:subgroup) { create(:group, parent: group) }
+
+ subject { build(:iteration, group: subgroup, start_date: start_date, due_date: due_date) }
+
+ it_behaves_like 'overlapping dates', skip_constraint_test: true
+ end
end
context 'project' do
@@ -210,6 +224,17 @@ RSpec.describe Iteration do
end
end
end
+
+ context 'project in a group' do
+ let_it_be(:project) { create(:project, group: create(:group)) }
+ let_it_be(:existing_iteration) { create(:iteration, :skip_project_validation, project: project, start_date: 4.days.from_now, due_date: 1.week.from_now) }
+
+ subject { build(:iteration, :skip_project_validation, project: project, start_date: start_date, due_date: due_date) }
+
+ it_behaves_like 'overlapping dates' do
+ let(:constraint_name) { 'iteration_start_and_due_daterange_project_id_constraint' }
+ end
+ end
end
end
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index 90950d93db4..118b1492cd6 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -212,6 +212,16 @@ RSpec.describe Member do
it { expect(described_class.non_request).to include @accepted_request_member }
end
+ describe '.not_accepted_invitations' do
+ let_it_be(:not_accepted_invitation) { create(:project_member, :invited) }
+ let_it_be(:accepted_invitation) { create(:project_member, :invited, invite_accepted_at: Date.today) }
+
+ subject { described_class.not_accepted_invitations }
+
+ it { is_expected.to include(not_accepted_invitation) }
+ it { is_expected.not_to include(accepted_invitation) }
+ end
+
describe '.not_accepted_invitations_by_user' do
let(:invited_by_user) { create(:project_member, :invited, project: project, created_by: @owner_user) }
@@ -225,6 +235,33 @@ RSpec.describe Member do
it { is_expected.to contain_exactly(invited_by_user) }
end
+ describe '.not_expired' do
+ let_it_be(:expiring_yesterday) { create(:group_member, expires_at: 1.day.from_now) }
+ let_it_be(:expiring_today) { create(:group_member, expires_at: 2.days.from_now) }
+ let_it_be(:expiring_tomorrow) { create(:group_member, expires_at: 3.days.from_now) }
+ let_it_be(:not_expiring) { create(:group_member) }
+
+ subject { described_class.not_expired }
+
+ around do |example|
+ travel_to(2.days.from_now) { example.run }
+ end
+
+ it { is_expected.not_to include(expiring_yesterday, expiring_today) }
+ it { is_expected.to include(expiring_tomorrow, not_expiring) }
+ end
+
+ describe '.last_ten_days_excluding_today' do
+ let_it_be(:created_today) { create(:group_member, created_at: Date.today.beginning_of_day) }
+ let_it_be(:created_yesterday) { create(:group_member, created_at: 1.day.ago) }
+ let_it_be(:created_eleven_days_ago) { create(:group_member, created_at: 11.days.ago) }
+
+ subject { described_class.last_ten_days_excluding_today }
+
+ it { is_expected.to include(created_yesterday) }
+ it { is_expected.not_to include(created_today, created_eleven_days_ago) }
+ end
+
describe '.search_invite_email' do
it 'returns only members the matching e-mail' do
create(:group_member, :invited)
@@ -683,6 +720,45 @@ RSpec.describe Member do
end
end
+ describe '#send_invitation_reminder' do
+ subject { member.send_invitation_reminder(0) }
+
+ context 'an invited group member' do
+ let!(:member) { create(:group_member, :invited) }
+
+ it 'sends a reminder' do
+ expect_any_instance_of(NotificationService).to receive(:invite_member_reminder).with(member, member.raw_invite_token, 0)
+
+ subject
+ end
+ end
+
+ context 'an invited member without a raw invite token set' do
+ let!(:member) { create(:group_member, :invited) }
+
+ before do
+ member.instance_variable_set(:@raw_invite_token, nil)
+ allow_any_instance_of(NotificationService).to receive(:invite_member_reminder)
+ end
+
+ it 'generates a new token' do
+ expect(member).to receive(:generate_invite_token!)
+
+ subject
+ end
+ end
+
+ context 'an uninvited member' do
+ let!(:member) { create(:group_member) }
+
+ it 'does not send a reminder' do
+ expect_any_instance_of(NotificationService).not_to receive(:invite_member_reminder)
+
+ subject
+ end
+ end
+ end
+
describe "#invite_to_unknown_user?" do
subject { member.invite_to_unknown_user? }
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index 2c64201e84d..4ce2b5f0f48 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -180,6 +180,17 @@ RSpec.describe MergeRequestDiff do
expect(diff.external_diff_store).to eq(file_store)
end
+ it 'migrates a nil diff file' do
+ expect(diff).not_to be_stored_externally
+ MergeRequestDiffFile.where(merge_request_diff_id: diff.id).update_all(diff: nil)
+
+ stub_external_diffs_setting(enabled: true)
+
+ diff.migrate_files_to_external_storage!
+
+ expect(diff).to be_stored_externally
+ end
+
it 'safely handles a transaction error when migrating to external storage' do
expect(diff).not_to be_stored_externally
expect(diff.external_diff).not_to be_exists
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 98f709a0610..5fde9b8661d 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -2358,48 +2358,43 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
- context 'when state event tracking is disabled' do
+ context 'when no metrics or merge event exists' do
+ let(:user) { create(:user) }
+ let(:merge_request) { create(:merge_request, :merged) }
+
before do
- stub_feature_flags(track_resource_state_change_events: false)
+ merge_request.metrics.destroy!
end
- context 'when merging note is persisted, but no metrics or merge event exists' do
- let(:user) { create(:user) }
- let(:merge_request) { create(:merge_request, :merged) }
-
+ context 'when resource event for the merge exists' do
before do
- merge_request.metrics.destroy!
-
SystemNoteService.change_status(merge_request,
merge_request.target_project,
user,
merge_request.state, nil)
end
- it 'returns merging note creation date' do
+ it 'returns the resource event creation date' do
expect(merge_request.reload.metrics).to be_nil
expect(merge_request.merge_event).to be_nil
- expect(merge_request.notes.count).to eq(1)
- expect(merge_request.merged_at).to eq(merge_request.notes.first.created_at)
+ expect(merge_request.resource_state_events.count).to eq(1)
+ expect(merge_request.merged_at).to eq(merge_request.resource_state_events.first.created_at)
end
end
- end
-
- context 'when state event tracking is enabled' do
- let(:user) { create(:user) }
- let(:merge_request) { create(:merge_request, :merged) }
- before do
- merge_request.metrics.destroy!
-
- SystemNoteService.change_status(merge_request,
- merge_request.target_project,
- user,
- merge_request.state, nil)
- end
+ context 'when system note for the merge exists' do
+ before do
+ # We do not create these system notes anymore but we need this to work for existing MRs
+ # that used system notes instead of resource state events
+ create(:note, :system, noteable: merge_request, note: 'merged')
+ end
- it 'does not create a system note' do
- expect(merge_request.notes).to be_empty
+ it 'returns the merging note creation date' do
+ expect(merge_request.reload.metrics).to be_nil
+ expect(merge_request.merge_event).to be_nil
+ expect(merge_request.notes.count).to eq(1)
+ expect(merge_request.merged_at).to eq(merge_request.notes.first.created_at)
+ end
end
end
end
@@ -4261,24 +4256,6 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
- describe '#allows_reviewers?' do
- it 'returns false without merge_request_reviewers feature' do
- stub_feature_flags(merge_request_reviewers: false)
-
- merge_request = build_stubbed(:merge_request)
-
- expect(merge_request.allows_reviewers?).to be(false)
- end
-
- it 'returns true with merge_request_reviewers feature' do
- stub_feature_flags(merge_request_reviewers: true)
-
- merge_request = build_stubbed(:merge_request)
-
- expect(merge_request.allows_reviewers?).to be(true)
- end
- end
-
describe '#merge_ref_head' do
let(:merge_request) { create(:merge_request) }
@@ -4304,4 +4281,36 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
end
+
+ describe '#allows_reviewers?' do
+ it 'returns false without merge_request_reviewers feature' do
+ stub_feature_flags(merge_request_reviewers: false)
+
+ merge_request = build_stubbed(:merge_request)
+
+ expect(merge_request.allows_reviewers?).to be(false)
+ end
+
+ it 'returns true with merge_request_reviewers feature' do
+ stub_feature_flags(merge_request_reviewers: true)
+
+ merge_request = build_stubbed(:merge_request)
+
+ expect(merge_request.allows_reviewers?).to be(true)
+ end
+ end
+
+ describe '#update_and_mark_in_progress_merge_commit_sha' do
+ let(:ref) { subject.target_project.repository.commit.id }
+
+ before do
+ expect(subject.target_project).to receive(:mark_primary_write_location)
+ end
+
+ it 'updates commit ID' do
+ expect { subject.update_and_mark_in_progress_merge_commit_sha(ref) }
+ .to change { subject.in_progress_merge_commit_sha }
+ .from(nil).to(ref)
+ end
+ end
end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index ca1f06370d4..2e607639e1b 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -1320,4 +1320,140 @@ RSpec.describe Namespace do
end
end
end
+
+ describe '#shared_runners_setting' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:shared_runners_enabled, :allow_descendants_override_disabled_shared_runners, :shared_runners_setting) do
+ true | true | 'enabled'
+ true | false | 'enabled'
+ false | true | 'disabled_with_override'
+ false | false | 'disabled_and_unoverridable'
+ end
+
+ with_them do
+ let(:namespace) { build(:namespace, shared_runners_enabled: shared_runners_enabled, allow_descendants_override_disabled_shared_runners: allow_descendants_override_disabled_shared_runners)}
+
+ it 'returns the result' do
+ expect(namespace.shared_runners_setting).to eq(shared_runners_setting)
+ end
+ end
+ end
+
+ describe '#shared_runners_setting_higher_than?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:shared_runners_enabled, :allow_descendants_override_disabled_shared_runners, :other_setting, :result) do
+ true | true | 'enabled' | false
+ true | true | 'disabled_with_override' | true
+ true | true | 'disabled_and_unoverridable' | true
+ false | true | 'enabled' | false
+ false | true | 'disabled_with_override' | false
+ false | true | 'disabled_and_unoverridable' | true
+ false | false | 'enabled' | false
+ false | false | 'disabled_with_override' | false
+ false | false | 'disabled_and_unoverridable' | false
+ end
+
+ with_them do
+ let(:namespace) { build(:namespace, shared_runners_enabled: shared_runners_enabled, allow_descendants_override_disabled_shared_runners: allow_descendants_override_disabled_shared_runners)}
+
+ it 'returns the result' do
+ expect(namespace.shared_runners_setting_higher_than?(other_setting)).to eq(result)
+ end
+ end
+ end
+
+ describe 'validation #changing_shared_runners_enabled_is_allowed' do
+ context 'without a parent' do
+ let(:namespace) { build(:namespace, shared_runners_enabled: true) }
+
+ it 'is valid' do
+ expect(namespace).to be_valid
+ end
+ end
+
+ context 'with a parent' do
+ context 'when parent has shared runners disabled' do
+ let(:parent) { create(:namespace, :shared_runners_disabled) }
+ let(:sub_namespace) { build(:namespace, shared_runners_enabled: true, parent_id: parent.id) }
+
+ it 'is invalid' do
+ expect(sub_namespace).to be_invalid
+ expect(sub_namespace.errors[:shared_runners_enabled]).to include('cannot be enabled because parent group has shared Runners disabled')
+ end
+ end
+
+ context 'when parent has shared runners disabled but allows override' do
+ let(:parent) { create(:namespace, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners) }
+ let(:sub_namespace) { build(:namespace, shared_runners_enabled: true, parent_id: parent.id) }
+
+ it 'is valid' do
+ expect(sub_namespace).to be_valid
+ end
+ end
+
+ context 'when parent has shared runners enabled' do
+ let(:parent) { create(:namespace, shared_runners_enabled: true) }
+ let(:sub_namespace) { build(:namespace, shared_runners_enabled: true, parent_id: parent.id) }
+
+ it 'is valid' do
+ expect(sub_namespace).to be_valid
+ end
+ end
+ end
+ end
+
+ describe 'validation #changing_allow_descendants_override_disabled_shared_runners_is_allowed' do
+ context 'without a parent' do
+ context 'with shared runners disabled' do
+ let(:namespace) { build(:namespace, :allow_descendants_override_disabled_shared_runners, :shared_runners_disabled) }
+
+ it 'is valid' do
+ expect(namespace).to be_valid
+ end
+ end
+
+ context 'with shared runners enabled' do
+ let(:namespace) { create(:namespace) }
+
+ it 'is invalid' do
+ namespace.allow_descendants_override_disabled_shared_runners = true
+
+ expect(namespace).to be_invalid
+ expect(namespace.errors[:allow_descendants_override_disabled_shared_runners]).to include('cannot be changed if shared runners are enabled')
+ end
+ end
+ end
+
+ context 'with a parent' do
+ context 'when parent does not allow shared runners' do
+ let(:parent) { create(:namespace, :shared_runners_disabled) }
+ let(:sub_namespace) { build(:namespace, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners, parent_id: parent.id) }
+
+ it 'is invalid' do
+ expect(sub_namespace).to be_invalid
+ expect(sub_namespace.errors[:allow_descendants_override_disabled_shared_runners]).to include('cannot be enabled because parent group does not allow it')
+ end
+ end
+
+ context 'when parent allows shared runners and setting to true' do
+ let(:parent) { create(:namespace, shared_runners_enabled: true) }
+ let(:sub_namespace) { build(:namespace, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners, parent_id: parent.id) }
+
+ it 'is valid' do
+ expect(sub_namespace).to be_valid
+ end
+ end
+
+ context 'when parent allows shared runners and setting to false' do
+ let(:parent) { create(:namespace, shared_runners_enabled: true) }
+ let(:sub_namespace) { build(:namespace, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false, parent_id: parent.id) }
+
+ it 'is valid' do
+ expect(sub_namespace).to be_valid
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/notification_setting_spec.rb b/spec/models/notification_setting_spec.rb
index 0f765d6b09b..bc50e2af373 100644
--- a/spec/models/notification_setting_spec.rb
+++ b/spec/models/notification_setting_spec.rb
@@ -175,6 +175,7 @@ RSpec.describe NotificationSetting do
:reopen_merge_request,
:close_merge_request,
:reassign_merge_request,
+ :change_reviewer_merge_request,
:merge_merge_request,
:failed_pipeline,
:success_pipeline,
diff --git a/spec/models/operations/feature_flag_spec.rb b/spec/models/operations/feature_flag_spec.rb
index db432e73355..b4e941f2856 100644
--- a/spec/models/operations/feature_flag_spec.rb
+++ b/spec/models/operations/feature_flag_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Operations::FeatureFlag do
context 'a version 1 feature flag' do
it 'is valid if associated with Operations::FeatureFlagScope models' do
project = create(:project)
- feature_flag = described_class.create({ name: 'test', project: project, version: 1,
+ feature_flag = described_class.create!({ name: 'test', project: project, version: 1,
scopes_attributes: [{ environment_scope: '*', active: false }] })
expect(feature_flag).to be_valid
@@ -33,9 +33,10 @@ RSpec.describe Operations::FeatureFlag do
it 'is invalid if associated with Operations::FeatureFlags::Strategy models' do
project = create(:project)
- feature_flag = described_class.create({ name: 'test', project: project, version: 1,
+ feature_flag = described_class.new({ name: 'test', project: project, version: 1,
strategies_attributes: [{ name: 'default', parameters: {} }] })
+ expect(feature_flag.valid?).to eq(false)
expect(feature_flag.errors.messages).to eq({
version_associations: ["version 1 feature flags may not have strategies"]
})
@@ -45,9 +46,10 @@ RSpec.describe Operations::FeatureFlag do
context 'a version 2 feature flag' do
it 'is invalid if associated with Operations::FeatureFlagScope models' do
project = create(:project)
- feature_flag = described_class.create({ name: 'test', project: project, version: 2,
+ feature_flag = described_class.new({ name: 'test', project: project, version: 2,
scopes_attributes: [{ environment_scope: '*', active: false }] })
+ expect(feature_flag.valid?).to eq(false)
expect(feature_flag.errors.messages).to eq({
version_associations: ["version 2 feature flags may not have scopes"]
})
@@ -55,7 +57,7 @@ RSpec.describe Operations::FeatureFlag do
it 'is valid if associated with Operations::FeatureFlags::Strategy models' do
project = create(:project)
- feature_flag = described_class.create({ name: 'test', project: project, version: 2,
+ feature_flag = described_class.create!({ name: 'test', project: project, version: 2,
strategies_attributes: [{ name: 'default', parameters: {} }] })
expect(feature_flag).to be_valid
@@ -75,7 +77,7 @@ RSpec.describe Operations::FeatureFlag do
it 'defaults to 1 if unspecified' do
project = create(:project)
- feature_flag = described_class.create(name: 'my_flag', project: project, active: true)
+ feature_flag = described_class.create!(name: 'my_flag', project: project, active: true)
expect(feature_flag).to be_valid
expect(feature_flag.version_before_type_cast).to eq(1)
@@ -113,14 +115,14 @@ RSpec.describe Operations::FeatureFlag do
context 'with a version 1 feature flag' do
it 'creates a default scope' do
- feature_flag = described_class.create({ name: 'test', project: project, scopes_attributes: [], version: 1 })
+ feature_flag = described_class.create!({ name: 'test', project: project, scopes_attributes: [], version: 1 })
expect(feature_flag.scopes.count).to eq(1)
expect(feature_flag.scopes.first.environment_scope).to eq('*')
end
it 'allows specifying the default scope in the parameters' do
- feature_flag = described_class.create({ name: 'test', project: project,
+ feature_flag = described_class.create!({ name: 'test', project: project,
scopes_attributes: [{ environment_scope: '*', active: false },
{ environment_scope: 'review/*', active: true }], version: 1 })
@@ -131,7 +133,7 @@ RSpec.describe Operations::FeatureFlag do
context 'with a version 2 feature flag' do
it 'does not create a default scope' do
- feature_flag = described_class.create({ name: 'test', project: project, scopes_attributes: [], version: 2 })
+ feature_flag = described_class.create!({ name: 'test', project: project, scopes_attributes: [], version: 2 })
expect(feature_flag.scopes).to eq([])
end
diff --git a/spec/models/operations/feature_flags/strategy_spec.rb b/spec/models/operations/feature_flags/strategy_spec.rb
index 04e3ef26e9d..0ecb49e75f3 100644
--- a/spec/models/operations/feature_flags/strategy_spec.rb
+++ b/spec/models/operations/feature_flags/strategy_spec.rb
@@ -4,11 +4,12 @@ require 'spec_helper'
RSpec.describe Operations::FeatureFlags::Strategy do
let_it_be(:project) { create(:project) }
+ let_it_be(:feature_flag) { create(:operations_feature_flag, project: project) }
describe 'validations' do
it do
is_expected.to validate_inclusion_of(:name)
- .in_array(%w[default gradualRolloutUserId userWithId gitlabUserList])
+ .in_array(%w[default gradualRolloutUserId flexibleRollout userWithId gitlabUserList])
.with_message('strategy name is invalid')
end
@@ -19,7 +20,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'skips parameters validation' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: invalid_name, parameters: { bad: 'params' })
@@ -36,7 +36,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'must have valid parameters for the strategy' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gradualRolloutUserId', parameters: invalid_parameters)
@@ -45,7 +44,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
it 'allows the parameters in any order' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gradualRolloutUserId',
parameters: { percentage: '10', groupId: 'mygroup' })
@@ -55,13 +53,12 @@ RSpec.describe Operations::FeatureFlags::Strategy do
describe 'percentage' do
where(:invalid_value) do
- [50, 40.0, { key: "value" }, "garbage", "00", "01", "101", "-1", "-10", "0100",
- "1000", "10.0", "5%", "25%", "100hi", "e100", "30m", " ", "\r\n", "\n", "\t",
- "\n10", "20\n", "\n100", "100\n", "\n ", nil]
+ [50, 40.0, { key: "value" }, "garbage", "101", "-1", "-10", "1000", "10.0", "5%", "25%",
+ "100hi", "e100", "30m", " ", "\r\n", "\n", "\t", "\n10", "20\n", "\n100", "100\n",
+ "\n ", nil]
end
with_them do
it 'must be a string value between 0 and 100 inclusive and without a percentage sign' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gradualRolloutUserId',
parameters: { groupId: 'mygroup', percentage: invalid_value })
@@ -75,7 +72,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'must be a string value between 0 and 100 inclusive and without a percentage sign' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gradualRolloutUserId',
parameters: { groupId: 'mygroup', percentage: valid_value })
@@ -92,7 +88,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'must be a string value of up to 32 lowercase characters' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gradualRolloutUserId',
parameters: { groupId: invalid_value, percentage: '40' })
@@ -106,7 +101,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'must be a string value of up to 32 lowercase characters' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gradualRolloutUserId',
parameters: { groupId: valid_value, percentage: '40' })
@@ -117,13 +111,132 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
end
+ context 'when the strategy name is flexibleRollout' do
+ valid_parameters = { rollout: '40', groupId: 'mygroup', stickiness: 'DEFAULT' }
+ where(invalid_parameters: [
+ nil,
+ {},
+ *valid_parameters.to_a.combination(1).to_a.map { |p| p.to_h },
+ *valid_parameters.to_a.combination(2).to_a.map { |p| p.to_h },
+ { **valid_parameters, userIds: '4' },
+ { **valid_parameters, extra: nil }
+ ])
+ with_them do
+ it 'must have valid parameters for the strategy' do
+ strategy = described_class.create(feature_flag: feature_flag,
+ name: 'flexibleRollout',
+ parameters: invalid_parameters)
+
+ expect(strategy.errors[:parameters]).to eq(['parameters are invalid'])
+ end
+ end
+
+ [
+ [:rollout, '10'],
+ [:stickiness, 'DEFAULT'],
+ [:groupId, 'mygroup']
+ ].permutation(3).each do |parameters|
+ it "allows the parameters in the order #{parameters.map { |p| p.first }.join(', ')}" do
+ strategy = described_class.create(feature_flag: feature_flag,
+ name: 'flexibleRollout',
+ parameters: Hash[parameters])
+
+ expect(strategy.errors[:parameters]).to be_empty
+ end
+ end
+
+ describe 'rollout' do
+ where(invalid_value: [50, 40.0, { key: "value" }, "garbage", "101", "-1", " ", "-10",
+ "1000", "10.0", "5%", "25%", "100hi", "e100", "30m", "\r\n",
+ "\n", "\t", "\n10", "20\n", "\n100", "100\n", "\n ", nil])
+ with_them do
+ it 'must be a string value between 0 and 100 inclusive and without a percentage sign' do
+ parameters = { stickiness: 'DEFAULT', groupId: 'mygroup', rollout: invalid_value }
+ strategy = described_class.create(feature_flag: feature_flag,
+ name: 'flexibleRollout',
+ parameters: parameters)
+
+ expect(strategy.errors[:parameters]).to eq([
+ 'rollout must be a string between 0 and 100 inclusive'
+ ])
+ end
+ end
+
+ where(valid_value: %w[0 1 10 38 100 93])
+ with_them do
+ it 'must be a string value between 0 and 100 inclusive and without a percentage sign' do
+ parameters = { stickiness: 'DEFAULT', groupId: 'mygroup', rollout: valid_value }
+ strategy = described_class.create(feature_flag: feature_flag,
+ name: 'flexibleRollout',
+ parameters: parameters)
+
+ expect(strategy.errors[:parameters]).to eq([])
+ end
+ end
+ end
+
+ describe 'groupId' do
+ where(invalid_value: [nil, 4, 50.0, {}, 'spaces bad', 'bad$', '%bad', '<bad', 'bad>',
+ '!bad', '.bad', 'Bad', 'bad1', "", " ", "b" * 33, "ba_d", "ba\nd"])
+ with_them do
+ it 'must be a string value of up to 32 lowercase characters' do
+ parameters = { stickiness: 'DEFAULT', groupId: invalid_value, rollout: '40' }
+ strategy = described_class.create(feature_flag: feature_flag,
+ name: 'flexibleRollout',
+ parameters: parameters)
+
+ expect(strategy.errors[:parameters]).to eq(['groupId parameter is invalid'])
+ end
+ end
+
+ where(valid_value: ["somegroup", "anothergroup", "okay", "g", "a" * 32])
+ with_them do
+ it 'must be a string value of up to 32 lowercase characters' do
+ parameters = { stickiness: 'DEFAULT', groupId: valid_value, rollout: '40' }
+ strategy = described_class.create(feature_flag: feature_flag,
+ name: 'flexibleRollout',
+ parameters: parameters)
+
+ expect(strategy.errors[:parameters]).to eq([])
+ end
+ end
+ end
+
+ describe 'stickiness' do
+ where(invalid_value: [nil, " ", "default", "DEFAULT\n", "UserId", "USER", "USERID "])
+ with_them do
+ it 'must be a string representing a supported stickiness setting' do
+ parameters = { stickiness: invalid_value, groupId: 'mygroup', rollout: '40' }
+ strategy = described_class.create(feature_flag: feature_flag,
+ name: 'flexibleRollout',
+ parameters: parameters)
+
+ expect(strategy.errors[:parameters]).to eq([
+ 'stickiness parameter must be DEFAULT, USERID, SESSIONID, or RANDOM'
+ ])
+ end
+ end
+
+ where(valid_value: %w[DEFAULT USERID SESSIONID RANDOM])
+ with_them do
+ it 'must be a string representing a supported stickiness setting' do
+ parameters = { stickiness: valid_value, groupId: 'mygroup', rollout: '40' }
+ strategy = described_class.create(feature_flag: feature_flag,
+ name: 'flexibleRollout',
+ parameters: parameters)
+
+ expect(strategy.errors[:parameters]).to eq([])
+ end
+ end
+ end
+ end
+
context 'when the strategy name is userWithId' do
where(:invalid_parameters) do
[nil, { userIds: 'sam', percentage: '40' }, { userIds: 'sam', some: 'param' }, { percentage: '40' }, {}]
end
with_them do
it 'must have valid parameters for the strategy' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'userWithId', parameters: invalid_parameters)
@@ -140,7 +253,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'is valid with a string of comma separated values' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'userWithId', parameters: { userIds: valid_value })
@@ -155,7 +267,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'is invalid' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'userWithId', parameters: { userIds: invalid_value })
@@ -173,7 +284,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'must be empty' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'default',
parameters: invalid_value)
@@ -183,7 +293,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
it 'must be empty' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'default',
parameters: {})
@@ -198,7 +307,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
with_them do
it 'must be empty' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gitlabUserList',
parameters: invalid_value)
@@ -208,7 +316,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
it 'must be empty' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gitlabUserList',
parameters: {})
@@ -221,7 +328,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
describe 'associations' do
context 'when name is gitlabUserList' do
it 'is valid when associated with a user list' do
- feature_flag = create(:operations_feature_flag, project: project)
user_list = create(:operations_feature_flag_user_list, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gitlabUserList',
@@ -232,7 +338,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
it 'is invalid without a user list' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gitlabUserList',
parameters: {})
@@ -242,7 +347,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
it 'is invalid when associated with a user list from another project' do
other_project = create(:project)
- feature_flag = create(:operations_feature_flag, project: project)
user_list = create(:operations_feature_flag_user_list, project: other_project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gitlabUserList',
@@ -255,7 +359,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
context 'when name is default' do
it 'is invalid when associated with a user list' do
- feature_flag = create(:operations_feature_flag, project: project)
user_list = create(:operations_feature_flag_user_list, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'default',
@@ -266,7 +369,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
it 'is valid without a user list' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'default',
parameters: {})
@@ -277,7 +379,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
context 'when name is userWithId' do
it 'is invalid when associated with a user list' do
- feature_flag = create(:operations_feature_flag, project: project)
user_list = create(:operations_feature_flag_user_list, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'userWithId',
@@ -288,7 +389,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
it 'is valid without a user list' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'userWithId',
parameters: { userIds: 'user1' })
@@ -299,7 +399,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
context 'when name is gradualRolloutUserId' do
it 'is invalid when associated with a user list' do
- feature_flag = create(:operations_feature_flag, project: project)
user_list = create(:operations_feature_flag_user_list, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gradualRolloutUserId',
@@ -310,7 +409,6 @@ RSpec.describe Operations::FeatureFlags::Strategy do
end
it 'is valid without a user list' do
- feature_flag = create(:operations_feature_flag, project: project)
strategy = described_class.create(feature_flag: feature_flag,
name: 'gradualRolloutUserId',
parameters: { groupId: 'default', percentage: '10' })
@@ -318,6 +416,30 @@ RSpec.describe Operations::FeatureFlags::Strategy do
expect(strategy.errors[:user_list]).to be_empty
end
end
+
+ context 'when name is flexibleRollout' do
+ it 'is invalid when associated with a user list' do
+ user_list = create(:operations_feature_flag_user_list, project: project)
+ strategy = described_class.create(feature_flag: feature_flag,
+ name: 'flexibleRollout',
+ user_list: user_list,
+ parameters: { groupId: 'default',
+ rollout: '10',
+ stickiness: 'DEFAULT' })
+
+ expect(strategy.errors[:user_list]).to eq(['must be blank'])
+ end
+
+ it 'is valid without a user list' do
+ strategy = described_class.create(feature_flag: feature_flag,
+ name: 'flexibleRollout',
+ parameters: { groupId: 'default',
+ rollout: '10',
+ stickiness: 'DEFAULT' })
+
+ expect(strategy.errors[:user_list]).to be_empty
+ end
+ end
end
end
end
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index ea1f75d04e7..6a3969802f3 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -108,6 +108,20 @@ RSpec.describe Packages::Package, type: :model do
it { is_expected.not_to allow_value('.foobar').for(:name) }
it { is_expected.not_to allow_value('%foo%bar').for(:name) }
end
+
+ context 'generic package' do
+ subject { build_stubbed(:generic_package) }
+
+ it { is_expected.to allow_value('123').for(:name) }
+ it { is_expected.to allow_value('foo').for(:name) }
+ it { is_expected.to allow_value('foo.bar.baz-2.0-20190901.47283-1').for(:name) }
+ it { is_expected.not_to allow_value('../../foo').for(:name) }
+ it { is_expected.not_to allow_value('..\..\foo').for(:name) }
+ it { is_expected.not_to allow_value('%2f%2e%2e%2f%2essh%2fauthorized_keys').for(:name) }
+ it { is_expected.not_to allow_value('$foo/bar').for(:name) }
+ it { is_expected.not_to allow_value('my file name').for(:name) }
+ it { is_expected.not_to allow_value('!!().for(:name)().for(:name)').for(:name) }
+ end
end
describe '#version' do
diff --git a/spec/models/plan_limits_spec.rb b/spec/models/plan_limits_spec.rb
index bc6398de9a4..67fb11f34e0 100644
--- a/spec/models/plan_limits_spec.rb
+++ b/spec/models/plan_limits_spec.rb
@@ -199,6 +199,7 @@ RSpec.describe PlanLimits do
ci_max_artifact_size_secret_detection
ci_max_artifact_size_requirements
ci_max_artifact_size_coverage_fuzzing
+ ci_max_artifact_size_api_fuzzing
]
end
diff --git a/spec/models/project_feature_usage_spec.rb b/spec/models/project_feature_usage_spec.rb
index 908b98ee9c2..d55d41fab85 100644
--- a/spec/models/project_feature_usage_spec.rb
+++ b/spec/models/project_feature_usage_spec.rb
@@ -48,7 +48,7 @@ RSpec.describe ProjectFeatureUsage, type: :model do
feature_usage.log_jira_dvcs_integration_usage
first_logged_at = feature_usage.jira_dvcs_cloud_last_sync_at
- Timecop.freeze(1.hour.from_now) do
+ travel_to(1.hour.from_now) do
ProjectFeatureUsage.new(project_id: project.id).log_jira_dvcs_integration_usage
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index fe971832695..20b10be8c3b 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -72,6 +72,7 @@ RSpec.describe Project do
it { is_expected.to have_one(:last_event).class_name('Event') }
it { is_expected.to have_one(:forked_from_project).through(:fork_network_member) }
it { is_expected.to have_one(:auto_devops).class_name('ProjectAutoDevops') }
+ it { is_expected.to have_one(:tracing_setting).class_name('ProjectTracingSetting') }
it { is_expected.to have_one(:error_tracking_setting).class_name('ErrorTracking::ProjectErrorTrackingSetting') }
it { is_expected.to have_one(:project_setting) }
it { is_expected.to have_one(:alerting_setting).class_name('Alerting::ProjectAlertingSetting') }
@@ -116,6 +117,7 @@ RSpec.describe Project do
it { is_expected.to have_many(:prometheus_alert_events) }
it { is_expected.to have_many(:self_managed_prometheus_alert_events) }
it { is_expected.to have_many(:alert_management_alerts) }
+ it { is_expected.to have_many(:alert_management_http_integrations) }
it { is_expected.to have_many(:jira_imports) }
it { is_expected.to have_many(:metrics_users_starred_dashboards).inverse_of(:project) }
it { is_expected.to have_many(:repository_storage_moves) }
@@ -123,6 +125,7 @@ RSpec.describe Project do
it { is_expected.to have_many(:packages).class_name('Packages::Package') }
it { is_expected.to have_many(:package_files).class_name('Packages::PackageFile') }
it { is_expected.to have_many(:pipeline_artifacts) }
+ it { is_expected.to have_many(:terraform_states).class_name('Terraform::State').inverse_of(:project) }
# GitLab Pages
it { is_expected.to have_many(:pages_domains) }
@@ -5812,6 +5815,38 @@ RSpec.describe Project do
end
end
+ describe 'validation #changing_shared_runners_enabled_is_allowed' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:shared_runners_setting, :project_shared_runners_enabled, :valid_record) do
+ 'enabled' | true | true
+ 'enabled' | false | true
+ 'disabled_with_override' | true | true
+ 'disabled_with_override' | false | true
+ 'disabled_and_unoverridable' | true | false
+ 'disabled_and_unoverridable' | false | true
+ end
+
+ with_them do
+ let(:group) { create(:group) }
+ let(:project) { build(:project, namespace: group, shared_runners_enabled: project_shared_runners_enabled) }
+
+ before do
+ allow_next_found_instance_of(Group) do |group|
+ allow(group).to receive(:shared_runners_setting).and_return(shared_runners_setting)
+ end
+ end
+
+ it 'validates the configuration' do
+ expect(project.valid?).to eq(valid_record)
+
+ unless valid_record
+ expect(project.errors[:shared_runners_enabled]).to contain_exactly('cannot be enabled because parent group does not allow it')
+ end
+ end
+ end
+ end
+
describe '#mark_pages_as_deployed' do
let(:project) { create(:project) }
let(:artifacts_archive) { create(:ci_job_artifact, project: project) }
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index 383fabcfffb..9f40dbb3401 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -201,6 +201,23 @@ RSpec.describe ProjectStatistics do
statistics.refresh!(only: [:commit_count])
end
end
+
+ context 'when the database is read-only' do
+ it 'does nothing' do
+ allow(Gitlab::Database).to receive(:read_only?) { true }
+
+ expect(statistics).not_to receive(:update_commit_count)
+ expect(statistics).not_to receive(:update_repository_size)
+ expect(statistics).not_to receive(:update_wiki_size)
+ expect(statistics).not_to receive(:update_lfs_objects_size)
+ expect(statistics).not_to receive(:update_snippets_size)
+ expect(statistics).not_to receive(:save!)
+ expect(Namespaces::ScheduleAggregationWorker)
+ .not_to receive(:perform_async)
+
+ statistics.refresh!
+ end
+ end
end
describe '#update_commit_count' do
@@ -324,22 +341,51 @@ RSpec.describe ProjectStatistics do
describe '.increment_statistic' do
shared_examples 'a statistic that increases storage_size' do
it 'increases the statistic by that amount' do
- expect { described_class.increment_statistic(project.id, stat, 13) }
+ expect { described_class.increment_statistic(project, stat, 13) }
.to change { statistics.reload.send(stat) || 0 }
.by(13)
end
it 'increases also storage size by that amount' do
- expect { described_class.increment_statistic(project.id, stat, 20) }
+ expect { described_class.increment_statistic(project, stat, 20) }
.to change { statistics.reload.storage_size }
.by(20)
end
end
+ shared_examples 'a statistic that increases storage_size asynchronously' do
+ it 'stores the increment temporarily in Redis', :clean_gitlab_redis_shared_state do
+ described_class.increment_statistic(project, stat, 13)
+
+ Gitlab::Redis::SharedState.with do |redis|
+ increment = redis.get(statistics.counter_key(stat))
+ expect(increment.to_i).to eq(13)
+ end
+ end
+
+ it 'schedules a worker to update the statistic and storage_size async' do
+ expect(FlushCounterIncrementsWorker)
+ .to receive(:perform_in)
+ .with(CounterAttribute::WORKER_DELAY, described_class.name, statistics.id, stat)
+
+ expect(FlushCounterIncrementsWorker)
+ .to receive(:perform_in)
+ .with(CounterAttribute::WORKER_DELAY, described_class.name, statistics.id, :storage_size)
+
+ described_class.increment_statistic(project, stat, 20)
+ end
+ end
+
context 'when adjusting :build_artifacts_size' do
let(:stat) { :build_artifacts_size }
- it_behaves_like 'a statistic that increases storage_size'
+ it_behaves_like 'a statistic that increases storage_size asynchronously'
+
+ it_behaves_like 'a statistic that increases storage_size' do
+ before do
+ stub_feature_flags(efficient_counter_attribute: false)
+ end
+ end
end
context 'when adjusting :pipeline_artifacts_size' do
diff --git a/spec/models/project_tracing_setting_spec.rb b/spec/models/project_tracing_setting_spec.rb
new file mode 100644
index 00000000000..a7e4e557b25
--- /dev/null
+++ b/spec/models/project_tracing_setting_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ProjectTracingSetting do
+ describe '#external_url' do
+ let_it_be(:project) { create(:project) }
+
+ let(:tracing_setting) { project.build_tracing_setting }
+
+ describe 'Validations' do
+ describe 'external_url' do
+ it 'accepts a valid url' do
+ tracing_setting.external_url = 'https://gitlab.com'
+
+ expect(tracing_setting).to be_valid
+ end
+
+ it 'fails with an invalid url' do
+ tracing_setting.external_url = 'gitlab.com'
+
+ expect(tracing_setting).to be_invalid
+ end
+
+ it 'fails with a blank string' do
+ tracing_setting.external_url = nil
+
+ expect(tracing_setting).to be_invalid
+ end
+
+ it 'sanitizes the url' do
+ tracing_setting.external_url = %{https://replaceme.com/'><script>alert(document.cookie)</script>}
+
+ expect(tracing_setting).to be_valid
+ expect(tracing_setting.external_url).to eq(%{https://replaceme.com/'&gt;})
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/resource_label_event_spec.rb b/spec/models/resource_label_event_spec.rb
index 960db31d488..da1fe70c891 100644
--- a/spec/models/resource_label_event_spec.rb
+++ b/spec/models/resource_label_event_spec.rb
@@ -50,26 +50,36 @@ RSpec.describe ResourceLabelEvent, type: :model do
end
end
- describe '#expire_etag_cache' do
- def expect_expiration(issue)
- expect_next_instance_of(Gitlab::EtagCaching::Store) do |instance|
- expect(instance).to receive(:touch)
- .with("/#{issue.project.namespace.to_param}/#{issue.project.to_param}/noteable/issue/#{issue.id}/notes")
+ context 'callbacks' do
+ describe '#usage_metrics' do
+ it 'tracks changed labels' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_label_changed_action)
+
+ subject.save!
end
end
- it 'expires resource note etag cache on event save' do
- expect_expiration(subject.issuable)
+ describe '#expire_etag_cache' do
+ def expect_expiration(issue)
+ expect_next_instance_of(Gitlab::EtagCaching::Store) do |instance|
+ expect(instance).to receive(:touch)
+ .with("/#{issue.project.namespace.to_param}/#{issue.project.to_param}/noteable/issue/#{issue.id}/notes")
+ end
+ end
- subject.save!
- end
+ it 'expires resource note etag cache on event save' do
+ expect_expiration(subject.issuable)
- it 'expires resource note etag cache on event destroy' do
- subject.save!
+ subject.save!
+ end
+
+ it 'expires resource note etag cache on event destroy' do
+ subject.save!
- expect_expiration(subject.issuable)
+ expect_expiration(subject.issuable)
- subject.destroy!
+ subject.destroy!
+ end
end
end
diff --git a/spec/models/resource_milestone_event_spec.rb b/spec/models/resource_milestone_event_spec.rb
index 0a5292b2d16..c1761e5b2e8 100644
--- a/spec/models/resource_milestone_event_spec.rb
+++ b/spec/models/resource_milestone_event_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe ResourceMilestoneEvent, type: :model do
it_behaves_like 'timebox resource event validations'
it_behaves_like 'timebox resource event states'
it_behaves_like 'timebox resource event actions'
+ it_behaves_like 'timebox resource tracks issue metrics', :milestone
describe 'associations' do
it { is_expected.to belong_to(:milestone) }
diff --git a/spec/models/resource_state_event_spec.rb b/spec/models/resource_state_event_spec.rb
index fc6575b2db8..b8a93bdbe3b 100644
--- a/spec/models/resource_state_event_spec.rb
+++ b/spec/models/resource_state_event_spec.rb
@@ -39,4 +39,20 @@ RSpec.describe ResourceStateEvent, type: :model do
end
end
end
+
+ context 'callbacks' do
+ describe '#usage_metrics' do
+ it 'tracks closed issues' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_closed_action)
+
+ create(described_class.name.underscore.to_sym, issue: issue, state: described_class.states[:closed])
+ end
+
+ it 'tracks reopened issues' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_reopened_action)
+
+ create(described_class.name.underscore.to_sym, issue: issue, state: described_class.states[:reopened])
+ end
+ end
+ end
end
diff --git a/spec/models/resource_weight_event_spec.rb b/spec/models/resource_weight_event_spec.rb
index 8a37883d933..170b302e094 100644
--- a/spec/models/resource_weight_event_spec.rb
+++ b/spec/models/resource_weight_event_spec.rb
@@ -73,4 +73,14 @@ RSpec.describe ResourceWeightEvent, type: :model do
expect(event.discussion_id).to eq('73d167c478')
end
end
+
+ context 'callbacks' do
+ describe '#usage_metrics' do
+ it 'tracks changed weights' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_weight_changed_action).with(author: user1)
+
+ create(:resource_weight_event, issue: issue1, user: user1)
+ end
+ end
+ end
end
diff --git a/spec/models/service_spec.rb b/spec/models/service_spec.rb
index 32e2012e284..42a12a98d63 100644
--- a/spec/models/service_spec.rb
+++ b/spec/models/service_spec.rb
@@ -320,18 +320,28 @@ RSpec.describe Service do
end
it 'sets service to inactive' do
- service = described_class.build_from_integration(project.id, integration)
+ service = described_class.build_from_integration(integration, project_id: project.id)
expect(service).to be_valid
expect(service.active).to be false
end
end
- context 'when integration is an instance' do
+ context 'when integration is an instance-level integration' do
let(:integration) { create(:jira_service, :instance) }
it 'sets inherit_from_id from integration' do
- service = described_class.build_from_integration(project.id, integration)
+ service = described_class.build_from_integration(integration, project_id: project.id)
+
+ expect(service.inherit_from_id).to eq(integration.id)
+ end
+ end
+
+ context 'when integration is a group-level integration' do
+ let(:integration) { create(:jira_service, group: group, project: nil) }
+
+ it 'sets inherit_from_id from integration' do
+ service = described_class.build_from_integration(integration, project_id: project.id)
expect(service.inherit_from_id).to eq(integration.id)
end
@@ -350,8 +360,22 @@ RSpec.describe Service do
end
shared_examples 'service creation from an integration' do
- it 'creates a correct service' do
- service = described_class.build_from_integration(project.id, integration)
+ it 'creates a correct service for a project integration' do
+ service = described_class.build_from_integration(integration, project_id: project.id)
+
+ expect(service).to be_active
+ expect(service.url).to eq(url)
+ expect(service.api_url).to eq(api_url)
+ expect(service.username).to eq(username)
+ expect(service.password).to eq(password)
+ expect(service.template).to eq(false)
+ expect(service.instance).to eq(false)
+ expect(service.project).to eq(project)
+ expect(service.group).to eq(nil)
+ end
+
+ it 'creates a correct service for a group integration' do
+ service = described_class.build_from_integration(integration, group_id: group.id)
expect(service).to be_active
expect(service.url).to eq(url)
@@ -360,6 +384,8 @@ RSpec.describe Service do
expect(service.password).to eq(password)
expect(service.template).to eq(false)
expect(service.instance).to eq(false)
+ expect(service.project).to eq(nil)
+ expect(service.group).to eq(group)
end
end
diff --git a/spec/models/snippet_input_action_spec.rb b/spec/models/snippet_input_action_spec.rb
index 43dc70bea98..0a9ab47f2f0 100644
--- a/spec/models/snippet_input_action_spec.rb
+++ b/spec/models/snippet_input_action_spec.rb
@@ -67,7 +67,7 @@ RSpec.describe SnippetInputAction do
let(:options) { { action: action, file_path: file_path, content: content, previous_path: previous_path } }
let(:expected_options) { options.merge(action: action.to_sym) }
- subject { described_class.new(options).to_commit_action }
+ subject { described_class.new(**options).to_commit_action }
it 'transforms attributes to commit action' do
expect(subject).to eq(expected_options)
diff --git a/spec/models/snippet_repository_spec.rb b/spec/models/snippet_repository_spec.rb
index 95602a4de0e..30690ce2fa3 100644
--- a/spec/models/snippet_repository_spec.rb
+++ b/spec/models/snippet_repository_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe SnippetRepository do
it 'returns nil when files argument is empty' do
expect(snippet.repository).not_to receive(:multi_action)
- operation = snippet_repository.multi_files_action(user, [], commit_opts)
+ operation = snippet_repository.multi_files_action(user, [], **commit_opts)
expect(operation).to be_nil
end
@@ -43,7 +43,7 @@ RSpec.describe SnippetRepository do
it 'returns nil when files argument is nil' do
expect(snippet.repository).not_to receive(:multi_action)
- operation = snippet_repository.multi_files_action(user, nil, commit_opts)
+ operation = snippet_repository.multi_files_action(user, nil, **commit_opts)
expect(operation).to be_nil
end
@@ -60,7 +60,7 @@ RSpec.describe SnippetRepository do
end
expect do
- snippet_repository.multi_files_action(user, data, commit_opts)
+ snippet_repository.multi_files_action(user, data, **commit_opts)
end.not_to raise_error
aggregate_failures do
@@ -77,13 +77,13 @@ RSpec.describe SnippetRepository do
it 'tries to obtain an exclusive lease' do
expect(Gitlab::ExclusiveLease).to receive(:new).with("multi_files_action:#{snippet.id}", anything).and_call_original
- snippet_repository.multi_files_action(user, data, commit_opts)
+ snippet_repository.multi_files_action(user, data, **commit_opts)
end
it 'cancels the lease when the method has finished' do
expect(Gitlab::ExclusiveLease).to receive(:cancel).with("multi_files_action:#{snippet.id}", anything).and_call_original
- snippet_repository.multi_files_action(user, data, commit_opts)
+ snippet_repository.multi_files_action(user, data, **commit_opts)
end
it 'raises an error if the lease cannot be obtained' do
@@ -92,7 +92,7 @@ RSpec.describe SnippetRepository do
end
expect do
- snippet_repository.multi_files_action(user, data, commit_opts)
+ snippet_repository.multi_files_action(user, data, **commit_opts)
end.to raise_error(described_class::CommitError)
end
@@ -114,7 +114,7 @@ RSpec.describe SnippetRepository do
it 'infers the commit action based on the parameters if not present' do
expect(repo).to receive(:multi_action).with(user, hash_including(actions: result))
- snippet_repository.multi_files_action(user, data, commit_opts)
+ snippet_repository.multi_files_action(user, data, **commit_opts)
end
context 'when commit actions are present' do
@@ -128,7 +128,7 @@ RSpec.describe SnippetRepository do
user,
hash_including(actions: array_including(hash_including(action: expected_action)))))
- snippet_repository.multi_files_action(user, data, commit_opts)
+ snippet_repository.multi_files_action(user, data, **commit_opts)
end
end
@@ -149,7 +149,7 @@ RSpec.describe SnippetRepository do
specify do
existing_content = blob_at(snippet, previous_path).data
- snippet_repository.multi_files_action(user, [move_action], commit_opts)
+ snippet_repository.multi_files_action(user, [move_action], **commit_opts)
blob = blob_at(snippet, new_path)
expect(blob).not_to be_nil
@@ -177,7 +177,7 @@ RSpec.describe SnippetRepository do
specify do
last_commit_id = snippet.repository.head_commit.id
- snippet_repository.multi_files_action(user, [update_action], commit_opts)
+ snippet_repository.multi_files_action(user, [update_action], **commit_opts)
expect(snippet.repository.head_commit.id).to eq last_commit_id
end
@@ -214,13 +214,13 @@ RSpec.describe SnippetRepository do
before do
expect(blob_at(snippet, default_name)).to be_nil
- snippet_repository.multi_files_action(user, [new_file], commit_opts)
+ snippet_repository.multi_files_action(user, [new_file], **commit_opts)
expect(blob_at(snippet, default_name)).to be
end
it 'reuses the existing file name' do
- snippet_repository.multi_files_action(user, [existing_file], commit_opts)
+ snippet_repository.multi_files_action(user, [existing_file], **commit_opts)
blob = blob_at(snippet, default_name)
expect(blob.data).to eq existing_file[:content]
@@ -234,7 +234,7 @@ RSpec.describe SnippetRepository do
it 'assigns a new name to the file' do
expect(blob_at(snippet, default_name)).to be_nil
- snippet_repository.multi_files_action(user, [new_file], commit_opts)
+ snippet_repository.multi_files_action(user, [new_file], **commit_opts)
blob = blob_at(snippet, default_name)
expect(blob.data).to eq new_file[:content]
@@ -246,7 +246,7 @@ RSpec.describe SnippetRepository do
before do
expect do
- snippet_repository.multi_files_action(user, data, commit_opts)
+ snippet_repository.multi_files_action(user, data, **commit_opts)
end.not_to raise_error
end
@@ -259,10 +259,10 @@ RSpec.describe SnippetRepository do
before do
# Pre-populate repository with 9 unnamed snippets.
- snippet_repository.multi_files_action(user, pre_populate_data, commit_opts)
+ snippet_repository.multi_files_action(user, pre_populate_data, **commit_opts)
expect do
- snippet_repository.multi_files_action(user, data, commit_opts)
+ snippet_repository.multi_files_action(user, data, **commit_opts)
end.not_to raise_error
end
@@ -274,7 +274,7 @@ RSpec.describe SnippetRepository do
it 'raises a path specific error' do
expect do
- snippet_repository.multi_files_action(user, data, commit_opts)
+ snippet_repository.multi_files_action(user, data, **commit_opts)
end.to raise_error(error)
end
end
diff --git a/spec/models/snippet_statistics_spec.rb b/spec/models/snippet_statistics_spec.rb
index 8def6a0bbd4..1fb4ed47169 100644
--- a/spec/models/snippet_statistics_spec.rb
+++ b/spec/models/snippet_statistics_spec.rb
@@ -75,15 +75,28 @@ RSpec.describe SnippetStatistics do
end
describe '#refresh!' do
- subject { statistics.refresh! }
-
it 'retrieves and saves statistic data from repository' do
expect(statistics).to receive(:update_commit_count)
expect(statistics).to receive(:update_file_count)
expect(statistics).to receive(:update_repository_size)
expect(statistics).to receive(:save!)
- subject
+ statistics.refresh!
+ end
+
+ context 'when the database is read-only' do
+ it 'does nothing' do
+ allow(Gitlab::Database).to receive(:read_only?) { true }
+
+ expect(statistics).not_to receive(:update_commit_count)
+ expect(statistics).not_to receive(:update_file_count)
+ expect(statistics).not_to receive(:update_repository_size)
+ expect(statistics).not_to receive(:save!)
+ expect(Namespaces::ScheduleAggregationWorker)
+ .not_to receive(:perform_async)
+
+ statistics.refresh!
+ end
end
end
diff --git a/spec/models/terraform/state_spec.rb b/spec/models/terraform/state_spec.rb
index 01ae80a61d1..1d99d103bb8 100644
--- a/spec/models/terraform/state_spec.rb
+++ b/spec/models/terraform/state_spec.rb
@@ -15,7 +15,24 @@ RSpec.describe Terraform::State do
it { is_expected.to validate_presence_of(:project_id) }
before do
- stub_terraform_state_object_storage(Terraform::StateUploader)
+ stub_terraform_state_object_storage
+ end
+
+ describe 'scopes' do
+ describe '.ordered_by_name' do
+ let_it_be(:project) { create(:project) }
+ let(:names) { %w(state_d state_b state_a state_c) }
+
+ subject { described_class.ordered_by_name }
+
+ before do
+ names.each do |name|
+ create(:terraform_state, project: project, name: name)
+ end
+ end
+
+ it { expect(subject.map(&:name)).to eq(names.sort) }
+ end
end
describe '#file' do
@@ -43,7 +60,7 @@ RSpec.describe Terraform::State do
context 'when file is stored locally' do
before do
- stub_terraform_state_object_storage(Terraform::StateUploader, enabled: false)
+ stub_terraform_state_object_storage(enabled: false)
end
it_behaves_like 'mounted file in local store'
diff --git a/spec/models/terraform/state_version_spec.rb b/spec/models/terraform/state_version_spec.rb
index 72dd29e1571..cc5ea87159d 100644
--- a/spec/models/terraform/state_version_spec.rb
+++ b/spec/models/terraform/state_version_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Terraform::StateVersion do
subject { create(:terraform_state_version) }
before do
- stub_terraform_state_object_storage(Terraform::StateUploader)
+ stub_terraform_state_object_storage
end
describe '#file' do
diff --git a/spec/models/todo_spec.rb b/spec/models/todo_spec.rb
index 44e81455a67..a9c4c6680cd 100644
--- a/spec/models/todo_spec.rb
+++ b/spec/models/todo_spec.rb
@@ -200,26 +200,42 @@ RSpec.describe Todo do
describe '#self_assigned?' do
let(:user_1) { build(:user) }
- before do
- subject.user = user_1
- subject.author = user_1
- subject.action = Todo::ASSIGNED
- end
+ context 'when self_added' do
+ before do
+ subject.user = user_1
+ subject.author = user_1
+ end
- it 'is true when todo is ASSIGNED and self_added' do
- expect(subject).to be_self_assigned
- end
+ it 'returns true for ASSIGNED' do
+ subject.action = Todo::ASSIGNED
+
+ expect(subject).to be_self_assigned
+ end
- it 'is false when the todo is not ASSIGNED' do
- subject.action = Todo::MENTIONED
+ it 'returns true for REVIEW_REQUESTED' do
+ subject.action = Todo::REVIEW_REQUESTED
- expect(subject).not_to be_self_assigned
+ expect(subject).to be_self_assigned
+ end
+
+ it 'returns false for other action' do
+ subject.action = Todo::MENTIONED
+
+ expect(subject).not_to be_self_assigned
+ end
end
- it 'is false when todo is not self_added' do
- subject.author = build(:user)
+ context 'when todo is not self_added' do
+ before do
+ subject.user = user_1
+ subject.author = build(:user)
+ end
- expect(subject).not_to be_self_assigned
+ it 'returns false' do
+ subject.action = Todo::ASSIGNED
+
+ expect(subject).not_to be_self_assigned
+ end
end
end
@@ -427,7 +443,7 @@ RSpec.describe Todo do
it 'updates updated_at' do
create(:todo, :pending)
- Timecop.freeze(1.day.from_now) do
+ travel_to(1.day.from_now) do
expected_update_date = Time.current.utc
ids = described_class.batch_update(state: :done)
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 1841288cd4b..af5614ba85e 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -3902,7 +3902,7 @@ RSpec.describe User do
it 'changes the namespace (just to compare to when username is not changed)' do
expect do
- Timecop.freeze(1.second.from_now) do
+ travel_to(1.second.from_now) do
user.update!(username: new_username)
end
end.to change { user.namespace.updated_at }
@@ -4330,28 +4330,32 @@ RSpec.describe User do
describe '#required_terms_not_accepted?' do
let(:user) { build(:user) }
+ let(:project_bot) { create(:user, :project_bot) }
subject { user.required_terms_not_accepted? }
context "when terms are not enforced" do
- it { is_expected.to be_falsy }
+ it { is_expected.to be_falsey }
end
- context "when terms are enforced and accepted by the user" do
+ context "when terms are enforced" do
before do
enforce_terms
- accept_terms(user)
end
- it { is_expected.to be_falsy }
- end
+ it "is not accepted by the user" do
+ expect(subject).to be_truthy
+ end
- context "when terms are enforced but the user has not accepted" do
- before do
- enforce_terms
+ it "is accepted by the user" do
+ accept_terms(user)
+
+ expect(subject).to be_falsey
end
- it { is_expected.to be_truthy }
+ it "auto accepts the term for project bots" do
+ expect(project_bot.required_terms_not_accepted?).to be_falsey
+ end
end
end
@@ -4895,7 +4899,7 @@ RSpec.describe User do
user.block
end
- it { is_expected.to eq User::BLOCKED_MESSAGE }
+ it { is_expected.to eq :blocked }
end
context 'when user is an internal user' do
@@ -4903,7 +4907,7 @@ RSpec.describe User do
user.update(user_type: :ghost)
end
- it { is_expected.to be User::LOGIN_FORBIDDEN }
+ it { is_expected.to be :forbidden }
end
context 'when user is locked' do
diff --git a/spec/models/wiki_directory_spec.rb b/spec/models/wiki_directory_spec.rb
index 4cac90786eb..9b6cec99ddb 100644
--- a/spec/models/wiki_directory_spec.rb
+++ b/spec/models/wiki_directory_spec.rb
@@ -3,43 +3,97 @@
require 'spec_helper'
RSpec.describe WikiDirectory do
- describe 'validations' do
- subject { build(:wiki_directory) }
+ subject(:directory) { build(:wiki_directory) }
+ describe 'validations' do
it { is_expected.to validate_presence_of(:slug) }
end
+ describe '.group_pages' do
+ let_it_be(:toplevel1) { build(:wiki_page, title: 'aaa-toplevel1') }
+ let_it_be(:toplevel2) { build(:wiki_page, title: 'zzz-toplevel2') }
+ let_it_be(:toplevel3) { build(:wiki_page, title: 'zzz-toplevel3') }
+ let_it_be(:child1) { build(:wiki_page, title: 'parent1/child1') }
+ let_it_be(:child2) { build(:wiki_page, title: 'parent1/child2') }
+ let_it_be(:child3) { build(:wiki_page, title: 'parent2/child3') }
+ let_it_be(:grandchild1) { build(:wiki_page, title: 'parent1/subparent/grandchild1') }
+ let_it_be(:grandchild2) { build(:wiki_page, title: 'parent1/subparent/grandchild2') }
+
+ it 'returns a nested array of entries' do
+ entries = described_class.group_pages(
+ [toplevel1, toplevel2, toplevel3, child1, child2, child3, grandchild1, grandchild2].sort_by(&:title)
+ )
+
+ expect(entries).to match([
+ toplevel1,
+ a_kind_of(WikiDirectory).and(
+ having_attributes(
+ slug: 'parent1', entries: [
+ child1,
+ child2,
+ a_kind_of(WikiDirectory).and(
+ having_attributes(
+ slug: 'parent1/subparent',
+ entries: [grandchild1, grandchild2]
+ )
+ )
+ ]
+ )
+ ),
+ a_kind_of(WikiDirectory).and(
+ having_attributes(
+ slug: 'parent2',
+ entries: [child3]
+ )
+ ),
+ toplevel2,
+ toplevel3
+ ])
+ end
+ end
+
describe '#initialize' do
- context 'when there are pages' do
- let(:pages) { [build(:wiki_page)] }
- let(:directory) { described_class.new('/path_up_to/dir', pages) }
+ context 'when there are entries' do
+ let(:entries) { [build(:wiki_page)] }
+ let(:directory) { described_class.new('/path_up_to/dir', entries) }
it 'sets the slug attribute' do
expect(directory.slug).to eq('/path_up_to/dir')
end
- it 'sets the pages attribute' do
- expect(directory.pages).to eq(pages)
+ it 'sets the entries attribute' do
+ expect(directory.entries).to eq(entries)
end
end
- context 'when there are no pages' do
+ context 'when there are no entries' do
let(:directory) { described_class.new('/path_up_to/dir') }
it 'sets the slug attribute' do
expect(directory.slug).to eq('/path_up_to/dir')
end
- it 'sets the pages attribute to an empty array' do
- expect(directory.pages).to eq([])
+ it 'sets the entries attribute to an empty array' do
+ expect(directory.entries).to eq([])
end
end
end
+ describe '#title' do
+ it 'returns the basename of the directory, with hyphens replaced by spaces' do
+ directory.slug = 'parent'
+ expect(directory.title).to eq('parent')
+
+ directory.slug = 'parent/child'
+ expect(directory.title).to eq('child')
+
+ directory.slug = 'parent/child-foo'
+ expect(directory.title).to eq('child foo')
+ end
+ end
+
describe '#to_partial_path' do
it 'returns the relative path to the partial to be used' do
- directory = build(:wiki_directory)
-
expect(directory.to_partial_path).to eq('../shared/wikis/wiki_directory')
end
end
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index aa8b9ce58b9..be94eca550c 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -4,16 +4,25 @@ require "spec_helper"
RSpec.describe WikiPage do
let_it_be(:user) { create(:user) }
- let(:container) { create(:project, :wiki_repo) }
- let(:wiki) { Wiki.for_container(container, user) }
- let(:new_page) { build(:wiki_page, wiki: wiki, title: 'test page', content: 'test content') }
+ let_it_be(:container) { create(:project) }
- let(:existing_page) do
- create(:wiki_page, wiki: wiki, title: 'test page', content: 'test content', message: 'test commit')
- wiki.find_page('test page')
+ def create_wiki_page(attrs = {})
+ page = build_wiki_page(attrs)
+
+ page.create(message: (attrs[:message] || 'test commit'))
+
+ container.wiki.find_page(page.slug)
end
- subject { new_page }
+ def build_wiki_page(attrs = {})
+ wiki_page_attrs = { container: container, content: 'test content' }.merge(attrs)
+
+ build(:wiki_page, wiki_page_attrs)
+ end
+
+ def wiki
+ container.wiki
+ end
def disable_front_matter
stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => false)
@@ -23,92 +32,16 @@ RSpec.describe WikiPage do
stub_feature_flags(Gitlab::WikiPages::FrontMatterParser::FEATURE_FLAG => thing)
end
- describe '.group_by_directory' do
- context 'when there are no pages' do
- it 'returns an empty array' do
- expect(described_class.group_by_directory(nil)).to eq([])
- expect(described_class.group_by_directory([])).to eq([])
- end
- end
-
- context 'when there are pages' do
- before do
- wiki.create_page('dir_1/dir_1_1/page_3', 'content')
- wiki.create_page('page_1', 'content')
- wiki.create_page('dir_1/page_2', 'content')
- wiki.create_page('dir_2', 'page with dir name')
- wiki.create_page('dir_2/page_5', 'content')
- wiki.create_page('page_6', 'content')
- wiki.create_page('dir_2/page_4', 'content')
- end
-
- let(:page_1) { wiki.find_page('page_1') }
- let(:page_6) { wiki.find_page('page_6') }
- let(:page_dir_2) { wiki.find_page('dir_2') }
-
- let(:dir_1) do
- WikiDirectory.new('dir_1', [wiki.find_page('dir_1/page_2')])
- end
-
- let(:dir_1_1) do
- WikiDirectory.new('dir_1/dir_1_1', [wiki.find_page('dir_1/dir_1_1/page_3')])
- end
-
- let(:dir_2) do
- pages = [wiki.find_page('dir_2/page_5'),
- wiki.find_page('dir_2/page_4')]
- WikiDirectory.new('dir_2', pages)
- end
+ # Use for groups of tests that do not modify their `subject`.
+ #
+ # include_context 'subject is persisted page', title: 'my title'
+ shared_context 'subject is persisted page' do |attrs = {}|
+ let_it_be(:persisted_page) { create_wiki_page(attrs) }
- describe "#list_pages" do
- context 'sort by title' do
- let(:grouped_entries) { described_class.group_by_directory(wiki.list_pages) }
- let(:expected_grouped_entries) { [dir_1_1, dir_1, page_dir_2, dir_2, page_1, page_6] }
-
- it 'returns an array with pages and directories' do
- grouped_entries.each_with_index do |page_or_dir, i|
- expected_page_or_dir = expected_grouped_entries[i]
- expected_slugs = get_slugs(expected_page_or_dir)
- slugs = get_slugs(page_or_dir)
-
- expect(slugs).to match_array(expected_slugs)
- end
- end
- end
-
- context 'sort by created_at' do
- let(:grouped_entries) { described_class.group_by_directory(wiki.list_pages(sort: 'created_at')) }
- let(:expected_grouped_entries) { [dir_1_1, page_1, dir_1, page_dir_2, dir_2, page_6] }
-
- it 'returns an array with pages and directories' do
- grouped_entries.each_with_index do |page_or_dir, i|
- expected_page_or_dir = expected_grouped_entries[i]
- expected_slugs = get_slugs(expected_page_or_dir)
- slugs = get_slugs(page_or_dir)
-
- expect(slugs).to match_array(expected_slugs)
- end
- end
- end
-
- it 'returns an array with retained order with directories at the top' do
- expected_order = ['dir_1/dir_1_1/page_3', 'dir_1/page_2', 'dir_2', 'dir_2/page_4', 'dir_2/page_5', 'page_1', 'page_6']
-
- grouped_entries = described_class.group_by_directory(wiki.list_pages)
-
- actual_order =
- grouped_entries.flat_map do |page_or_dir|
- get_slugs(page_or_dir)
- end
- expect(actual_order).to eq(expected_order)
- end
- end
- end
+ subject { persisted_page }
end
describe '#front_matter' do
- let_it_be(:project) { create(:project) }
- let(:container) { project }
let(:wiki_page) { create(:wiki_page, container: container, content: content) }
shared_examples 'a page without front-matter' do
@@ -230,14 +163,14 @@ RSpec.describe WikiPage do
describe "#initialize" do
context "when initialized with an existing page" do
- subject { existing_page }
+ include_context 'subject is persisted page', title: 'test initialization'
it "sets the slug attribute" do
- expect(subject.slug).to eq("test-page")
+ expect(subject.slug).to eq("test-initialization")
end
it "sets the title attribute" do
- expect(subject.title).to eq("test page")
+ expect(subject.title).to eq("test initialization")
end
it "sets the formatted content attribute" do
@@ -259,6 +192,8 @@ RSpec.describe WikiPage do
end
describe "validations" do
+ subject { build_wiki_page }
+
it "validates presence of title" do
subject.attributes.delete(:title)
@@ -305,7 +240,7 @@ RSpec.describe WikiPage do
end
context 'with an existing page exceeding the limit' do
- subject { existing_page }
+ include_context 'subject is persisted page'
before do
subject
@@ -414,18 +349,22 @@ RSpec.describe WikiPage do
describe "#create" do
let(:attributes) do
{
- title: "Index",
+ title: SecureRandom.hex,
content: "Home Page",
format: "markdown",
message: 'Custom Commit Message'
}
end
+ let(:title) { attributes[:title] }
+
+ subject { build_wiki_page }
+
context "with valid attributes" do
it "saves the wiki page" do
subject.create(attributes)
- expect(wiki.find_page("Index")).not_to be_nil
+ expect(wiki.find_page(title)).not_to be_nil
end
it "returns true" do
@@ -435,7 +374,7 @@ RSpec.describe WikiPage do
it 'saves the wiki page with message' do
subject.create(attributes)
- expect(wiki.find_page("Index").message).to eq 'Custom Commit Message'
+ expect(wiki.find_page(title).message).to eq 'Custom Commit Message'
end
it 'if the title is preceded by a / it is removed' do
@@ -447,9 +386,7 @@ RSpec.describe WikiPage do
context "with invalid attributes" do
it 'does not create the page' do
- subject.create(title: '')
-
- expect(wiki.find_page('New Page')).to be_nil
+ expect { subject.create(title: '') }.not_to change { wiki.list_pages.length }
end
end
end
@@ -458,46 +395,40 @@ RSpec.describe WikiPage do
let(:title) { 'Index v1.2.3' }
describe "#create" do
- let(:attributes) { { title: title, content: "Home Page", format: "markdown" } }
-
- context "with valid attributes" do
- it "saves the wiki page" do
- subject.create(attributes)
+ subject { build_wiki_page }
- expect(wiki.find_page(title)).not_to be_nil
- end
+ it "saves the wiki page and returns true", :aggregate_failures do
+ attributes = { title: title, content: "Home Page", format: "markdown" }
- it "returns true" do
- expect(subject.create(attributes)).to eq(true)
- end
+ expect(subject.create(attributes)).to eq(true)
+ expect(wiki.find_page(title)).not_to be_nil
end
end
describe '#update' do
- subject { create(:wiki_page, wiki: wiki, title: title) }
+ subject { create_wiki_page(title: title) }
+
+ it 'updates the content of the page and returns true', :aggregate_failures do
+ expect(subject.update(content: 'new content')).to be_truthy
- it 'updates the content of the page' do
- subject.update(content: 'new content')
page = wiki.find_page(title)
expect([subject.content, page.content]).to all(eq('new content'))
end
-
- it "returns true" do
- expect(subject.update(content: "more content")).to be_truthy
- end
end
end
describe "#update" do
- subject { existing_page }
+ let!(:original_title) { subject.title }
+
+ subject { create_wiki_page }
context "with valid attributes" do
it "updates the content of the page" do
new_content = "new content"
subject.update(content: new_content)
- page = wiki.find_page('test page')
+ page = wiki.find_page(original_title)
expect([subject.content, page.content]).to all(eq("new content"))
end
@@ -514,10 +445,9 @@ RSpec.describe WikiPage do
describe 'updating front_matter' do
shared_examples 'able to update front-matter' do
it 'updates the wiki-page front-matter' do
- title = subject.title
content = subject.content
subject.update(front_matter: { slugs: ['x'] })
- page = wiki.find_page(title)
+ page = wiki.find_page(original_title)
expect([subject, page]).to all(
have_attributes(
@@ -566,10 +496,9 @@ RSpec.describe WikiPage do
end
it 'updates the wiki-page front-matter and content together' do
- title = subject.title
content = 'totally new content'
subject.update(content: content, front_matter: { slugs: ['x'] })
- page = wiki.find_page(title)
+ page = wiki.find_page(original_title)
expect([subject, page]).to all(
have_attributes(
@@ -598,11 +527,11 @@ RSpec.describe WikiPage do
context 'when renaming a page' do
it 'raises an error if the page already exists' do
- wiki.create_page('Existing Page', 'content')
+ existing_page = create_wiki_page
- expect { subject.update(title: 'Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
- expect(subject.title).to eq 'test page'
- expect(subject.content).to eq 'new_content'
+ expect { subject.update(title: existing_page.title, content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
+ expect(subject.title).to eq original_title
+ expect(subject.content).to eq 'new_content' # We don't revert the content
end
it 'updates the content and rename the file' do
@@ -623,7 +552,7 @@ RSpec.describe WikiPage do
wiki.create_page('foo/Existing Page', 'content')
expect { subject.update(title: 'foo/Existing Page', content: 'new_content') }.to raise_error(WikiPage::PageRenameError)
- expect(subject.title).to eq 'test page'
+ expect(subject.title).to eq original_title
expect(subject.content).to eq 'new_content'
end
@@ -639,20 +568,22 @@ RSpec.describe WikiPage do
expect(page.content).to eq new_content
end
- context 'in subdir' do
- subject { create(:wiki_page, wiki: wiki, title: 'foo/Existing Page') }
-
+ describe 'in subdir' do
it 'moves the page to the root folder if the title is preceded by /' do
- expect(subject.slug).to eq 'foo/Existing-Page'
- expect(subject.update(title: '/Existing Page', content: 'new_content')).to be_truthy
- expect(subject.slug).to eq 'Existing-Page'
+ page = create_wiki_page(title: 'foo/Existing Page')
+
+ expect(page.slug).to eq 'foo/Existing-Page'
+ expect(page.update(title: '/Existing Page', content: 'new_content')).to be_truthy
+ expect(page.slug).to eq 'Existing-Page'
end
it 'does nothing if it has the same title' do
- original_path = subject.slug
+ page = create_wiki_page(title: 'foo/Another Existing Page')
- expect(subject.update(title: 'Existing Page', content: 'new_content')).to be_truthy
- expect(subject.slug).to eq original_path
+ original_path = page.slug
+
+ expect(page.update(title: 'Another Existing Page', content: 'new_content')).to be_truthy
+ expect(page.slug).to eq original_path
end
end
@@ -660,7 +591,7 @@ RSpec.describe WikiPage do
it 'does nothing if the title is preceded by /' do
original_path = subject.slug
- expect(subject.update(title: '/test page', content: 'new_content')).to be_truthy
+ expect(subject.update(title: "/#{subject.title}", content: 'new_content')).to be_truthy
expect(subject.slug).to eq original_path
end
end
@@ -671,7 +602,7 @@ RSpec.describe WikiPage do
expect(subject.update(title: '', content: 'new_content')).to be_falsey
expect(subject.content).to eq 'new_content'
- page = wiki.find_page('test page')
+ page = wiki.find_page(original_title)
expect(page.content).to eq 'test content'
end
@@ -679,21 +610,17 @@ RSpec.describe WikiPage do
end
describe "#delete" do
- subject { existing_page }
-
- it "deletes the page" do
- subject.delete
-
- expect(wiki.list_pages).to be_empty
- end
+ it "deletes the page and returns true", :aggregate_failures do
+ page = create_wiki_page
- it "returns true" do
- expect(subject.delete).to eq(true)
+ expect do
+ expect(page.delete).to eq(true)
+ end.to change { wiki.list_pages.length }.by(-1)
end
end
describe "#versions" do
- subject { existing_page }
+ include_context 'subject is persisted page'
it "returns an array of all commits for the page" do
3.times { |i| subject.update(content: "content #{i}") }
@@ -709,19 +636,21 @@ RSpec.describe WikiPage do
describe '#title_changed?' do
using RSpec::Parameterized::TableSyntax
+ let_it_be(:unsaved_page) { build_wiki_page(title: 'test page') }
+ let_it_be(:existing_page) { create_wiki_page(title: 'test page') }
+ let_it_be(:directory_page) { create_wiki_page(title: 'parent directory/child page') }
+ let_it_be(:page_with_special_characters) { create_wiki_page(title: 'test+page') }
let(:untitled_page) { described_class.new(wiki) }
- let(:directory_page) { create(:wiki_page, title: 'parent directory/child page') }
- let(:page_with_special_characters) { create(:wiki_page, title: 'test+page') }
where(:page, :title, :changed) do
:untitled_page | nil | false
:untitled_page | 'new title' | true
- :new_page | nil | true
- :new_page | 'test page' | true
- :new_page | 'test-page' | true
- :new_page | 'test+page' | true
- :new_page | 'new title' | true
+ :unsaved_page | nil | true
+ :unsaved_page | 'test page' | true
+ :unsaved_page | 'test-page' | true
+ :unsaved_page | 'test+page' | true
+ :unsaved_page | 'new title' | true
:existing_page | nil | false
:existing_page | 'test page' | false
@@ -764,7 +693,7 @@ RSpec.describe WikiPage do
describe '#content_changed?' do
context 'with a new page' do
- subject { new_page }
+ subject { build_wiki_page }
it 'returns true if content is set' do
subject.attributes[:content] = 'new'
@@ -780,7 +709,7 @@ RSpec.describe WikiPage do
end
context 'with an existing page' do
- subject { existing_page }
+ include_context 'subject is persisted page'
it 'returns false' do
expect(subject.content_changed?).to be(false)
@@ -816,17 +745,21 @@ RSpec.describe WikiPage do
describe '#path' do
it 'returns the path when persisted' do
- expect(existing_page.path).to eq('test-page.md')
+ existing_page = create_wiki_page(title: 'path test')
+
+ expect(existing_page.path).to eq('path-test.md')
end
it 'returns nil when not persisted' do
- expect(new_page.path).to be_nil
+ unsaved_page = build_wiki_page(title: 'path test')
+
+ expect(unsaved_page.path).to be_nil
end
end
describe '#directory' do
context 'when the page is at the root directory' do
- subject { existing_page }
+ include_context 'subject is persisted page', title: 'directory test'
it 'returns an empty string' do
expect(subject.directory).to eq('')
@@ -834,7 +767,7 @@ RSpec.describe WikiPage do
end
context 'when the page is inside an actual directory' do
- subject { create(:wiki_page, title: 'dir_1/dir_1_1/file') }
+ include_context 'subject is persisted page', title: 'dir_1/dir_1_1/directory test'
it 'returns the full directory hierarchy' do
expect(subject.directory).to eq('dir_1/dir_1_1')
@@ -843,7 +776,7 @@ RSpec.describe WikiPage do
end
describe '#historical?' do
- subject { existing_page }
+ include_context 'subject is persisted page'
let(:old_version) { subject.versions.last.id }
let(:old_page) { wiki.find_page(subject.title, old_version) }
@@ -883,22 +816,22 @@ RSpec.describe WikiPage do
describe '#persisted?' do
it 'returns true for a persisted page' do
- expect(existing_page).to be_persisted
+ expect(create_wiki_page).to be_persisted
end
it 'returns false for an unpersisted page' do
- expect(new_page).not_to be_persisted
+ expect(build_wiki_page).not_to be_persisted
end
end
describe '#to_partial_path' do
it 'returns the relative path to the partial to be used' do
- expect(subject.to_partial_path).to eq('../shared/wikis/wiki_page')
+ expect(build_wiki_page.to_partial_path).to eq('../shared/wikis/wiki_page')
end
end
describe '#==' do
- subject { existing_page }
+ include_context 'subject is persisted page'
it 'returns true for identical wiki page' do
expect(subject).to eq(subject)
@@ -906,7 +839,7 @@ RSpec.describe WikiPage do
it 'returns true for updated wiki page' do
subject.update(content: "Updated content")
- updated_page = wiki.find_page(existing_page.slug)
+ updated_page = wiki.find_page(subject.slug)
expect(updated_page).not_to be_nil
expect(updated_page).to eq(subject)
@@ -921,7 +854,7 @@ RSpec.describe WikiPage do
end
it 'returns false for page with different slug on same container' do
- other_page = create(:wiki_page, container: subject.container)
+ other_page = create_wiki_page
expect(subject.slug).not_to eq(other_page.slug)
expect(subject.container).to eq(other_page.container)
@@ -929,7 +862,7 @@ RSpec.describe WikiPage do
end
it 'returns false for page with the same slug on a different container' do
- other_page = create(:wiki_page, title: existing_page.slug)
+ other_page = create(:wiki_page, title: subject.slug)
expect(subject.slug).to eq(other_page.slug)
expect(subject.container).not_to eq(other_page.container)
@@ -938,7 +871,7 @@ RSpec.describe WikiPage do
end
describe '#last_commit_sha' do
- subject { existing_page }
+ include_context 'subject is persisted page'
it 'returns commit sha' do
expect(subject.last_commit_sha).to eq subject.last_version.sha
@@ -948,13 +881,15 @@ RSpec.describe WikiPage do
last_commit_sha_before_update = subject.last_commit_sha
subject.update(content: "new content")
- page = wiki.find_page('test page')
+ page = wiki.find_page(subject.title)
expect(page.last_commit_sha).not_to eq last_commit_sha_before_update
end
end
describe '#hook_attrs' do
+ subject { build_wiki_page }
+
it 'adds absolute urls for images in the content' do
subject.attributes[:content] = 'test![WikiPage_Image](/uploads/abc/WikiPage_Image.png)'
@@ -965,19 +900,21 @@ RSpec.describe WikiPage do
describe '#version_commit_timestamp' do
context 'for a new page' do
it 'returns nil' do
- expect(new_page.version_commit_timestamp).to be_nil
+ expect(build_wiki_page.version_commit_timestamp).to be_nil
end
end
context 'for page that exists' do
it 'returns the timestamp of the commit' do
+ existing_page = create_wiki_page
+
expect(existing_page.version_commit_timestamp).to eq(existing_page.version.commit.committed_date)
end
end
end
describe '#diffs' do
- subject { existing_page }
+ include_context 'subject is persisted page'
it 'returns a diff instance' do
diffs = subject.diffs(foo: 'bar')
@@ -993,14 +930,4 @@ RSpec.describe WikiPage do
)
end
end
-
- private
-
- def get_slugs(page_or_dir)
- if page_or_dir.is_a? WikiPage
- [page_or_dir.slug]
- else
- page_or_dir.pages.present? ? page_or_dir.pages.map(&:slug) : []
- end
- end
end
diff --git a/spec/policies/design_management/design_policy_spec.rb b/spec/policies/design_management/design_policy_spec.rb
index 5a74d979ef3..117279d1638 100644
--- a/spec/policies/design_management/design_policy_spec.rb
+++ b/spec/policies/design_management/design_policy_spec.rb
@@ -71,6 +71,11 @@ RSpec.describe DesignManagement::DesignPolicy do
end
end
+ shared_examples_for "read-only design abilities" do
+ it { is_expected.to be_allowed(*guest_design_abilities) }
+ it { is_expected.to be_disallowed(*developer_design_abilities) }
+ end
+
shared_examples_for "design abilities available for members" do
context "for owners" do
let(:current_user) { owner }
@@ -86,8 +91,7 @@ RSpec.describe DesignManagement::DesignPolicy do
end
context "when admin mode disabled" do
- it { is_expected.to be_allowed(*guest_design_abilities) }
- it { is_expected.to be_disallowed(*developer_design_abilities) }
+ it_behaves_like "read-only design abilities"
end
end
@@ -106,16 +110,10 @@ RSpec.describe DesignManagement::DesignPolicy do
context "for reporters" do
let(:current_user) { reporter }
- it { is_expected.to be_allowed(*guest_design_abilities) }
- it { is_expected.to be_disallowed(*developer_design_abilities) }
+ it_behaves_like "read-only design abilities"
end
end
- shared_examples_for "read-only design abilities" do
- it { is_expected.to be_allowed(:read_design) }
- it { is_expected.to be_disallowed(:create_design, :destroy_design) }
- end
-
context "when DesignManagement is not enabled" do
before do
enable_design_management(false)
@@ -135,15 +133,13 @@ RSpec.describe DesignManagement::DesignPolicy do
let_it_be(:project) { create(:project, :private) }
let(:current_user) { guest }
- it { is_expected.to be_allowed(*guest_design_abilities) }
- it { is_expected.to be_disallowed(*developer_design_abilities) }
+ it_behaves_like "read-only design abilities"
end
context "for anonymous users in public projects" do
let(:current_user) { nil }
- it { is_expected.to be_allowed(*guest_design_abilities) }
- it { is_expected.to be_disallowed(*developer_design_abilities) }
+ it_behaves_like "read-only design abilities"
end
context "when the issue is confidential" do
@@ -164,20 +160,6 @@ RSpec.describe DesignManagement::DesignPolicy do
end
end
- context "when the issue is locked" do
- let_it_be(:issue) { create(:issue, :locked, project: project) }
- let(:current_user) { owner }
-
- it_behaves_like "read-only design abilities"
- end
-
- context "when the issue has moved" do
- let_it_be(:issue) { create(:issue, project: project, moved_to: create(:issue)) }
- let(:current_user) { owner }
-
- it_behaves_like "read-only design abilities"
- end
-
context "when the project is archived" do
let_it_be(:project) { create(:project, :public, :archived) }
let_it_be(:issue) { create(:issue, project: project) }
diff --git a/spec/policies/global_policy_spec.rb b/spec/policies/global_policy_spec.rb
index 6cd1c201c62..e483598c47a 100644
--- a/spec/policies/global_policy_spec.rb
+++ b/spec/policies/global_policy_spec.rb
@@ -229,12 +229,6 @@ RSpec.describe GlobalPolicy do
it { is_expected.not_to be_allowed(:access_api) }
end
-
- it 'when `inactive_policy_condition` feature flag is turned off' do
- stub_feature_flags(inactive_policy_condition: false)
-
- is_expected.to be_allowed(:access_api)
- end
end
end
@@ -321,12 +315,6 @@ RSpec.describe GlobalPolicy do
end
it { is_expected.not_to be_allowed(:access_git) }
-
- it 'when `inactive_policy_condition` feature flag is turned off' do
- stub_feature_flags(inactive_policy_condition: false)
-
- is_expected.to be_allowed(:access_git)
- end
end
context 'when terms are enforced' do
@@ -403,12 +391,6 @@ RSpec.describe GlobalPolicy do
end
it { is_expected.not_to be_allowed(:use_slash_commands) }
-
- it 'when `inactive_policy_condition` feature flag is turned off' do
- stub_feature_flags(inactive_policy_condition: false)
-
- is_expected.to be_allowed(:use_slash_commands)
- end
end
context 'when access locked' do
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index dbe444acb58..fecf5f3e4f8 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -812,4 +812,74 @@ RSpec.describe GroupPolicy do
it { is_expected.to be_disallowed(:create_jira_connect_subscription) }
end
end
+
+ describe 'read_package' do
+ context 'admin' do
+ let(:current_user) { admin }
+
+ it { is_expected.to be_allowed(:read_package) }
+ end
+
+ context 'with owner' do
+ let(:current_user) { owner }
+
+ it { is_expected.to be_allowed(:read_package) }
+ end
+
+ context 'with maintainer' do
+ let(:current_user) { maintainer }
+
+ it { is_expected.to be_allowed(:read_package) }
+ end
+
+ context 'with reporter' do
+ let(:current_user) { reporter }
+
+ it { is_expected.to be_allowed(:read_package) }
+ end
+
+ context 'with guest' do
+ let(:current_user) { guest }
+
+ it { is_expected.to be_disallowed(:read_package) }
+ end
+
+ context 'with non member' do
+ let(:current_user) { create(:user) }
+
+ it { is_expected.to be_disallowed(:read_package) }
+ end
+
+ context 'with anonymous' do
+ let(:current_user) { nil }
+
+ it { is_expected.to be_disallowed(:read_package) }
+ end
+ end
+
+ context 'deploy token access' do
+ let!(:group_deploy_token) do
+ create(:group_deploy_token, group: group, deploy_token: deploy_token)
+ end
+
+ subject { described_class.new(deploy_token, group) }
+
+ context 'a deploy token with read_package_registry scope' do
+ let(:deploy_token) { create(:deploy_token, :group, read_package_registry: true) }
+
+ it { is_expected.to be_allowed(:read_package) }
+ it { is_expected.to be_allowed(:read_group) }
+ it { is_expected.to be_disallowed(:create_package) }
+ end
+
+ context 'a deploy token with write_package_registry scope' do
+ let(:deploy_token) { create(:deploy_token, :group, write_package_registry: true) }
+
+ it { is_expected.to be_allowed(:create_package) }
+ it { is_expected.to be_allowed(:read_group) }
+ it { is_expected.to be_disallowed(:destroy_package) }
+ end
+ end
+
+ it_behaves_like 'Self-managed Core resource access tokens'
end
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index 0c457148b4d..d66ef81efca 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -941,4 +941,6 @@ RSpec.describe ProjectPolicy do
end
end
end
+
+ it_behaves_like 'Self-managed Core resource access tokens'
end
diff --git a/spec/policies/terraform/state_policy_spec.rb b/spec/policies/terraform/state_policy_spec.rb
new file mode 100644
index 00000000000..82152920997
--- /dev/null
+++ b/spec/policies/terraform/state_policy_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Terraform::StatePolicy do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:terraform_state) { create(:terraform_state, project: project)}
+
+ subject { described_class.new(user, terraform_state) }
+
+ describe 'rules' do
+ context 'no access' do
+ let(:user) { create(:user) }
+
+ it { is_expected.to be_disallowed(:read_terraform_state) }
+ it { is_expected.to be_disallowed(:admin_terraform_state) }
+ end
+
+ context 'developer' do
+ let(:user) { create(:user, developer_projects: [project]) }
+
+ it { is_expected.to be_allowed(:read_terraform_state) }
+ it { is_expected.to be_disallowed(:admin_terraform_state) }
+ end
+
+ context 'maintainer' do
+ let(:user) { create(:user, maintainer_projects: [project]) }
+
+ it { is_expected.to be_allowed(:read_terraform_state) }
+ it { is_expected.to be_allowed(:admin_terraform_state) }
+ end
+ end
+end
diff --git a/spec/presenters/ci/pipeline_presenter_spec.rb b/spec/presenters/ci/pipeline_presenter_spec.rb
index 18f79bc930c..5cb9d340e06 100644
--- a/spec/presenters/ci/pipeline_presenter_spec.rb
+++ b/spec/presenters/ci/pipeline_presenter_spec.rb
@@ -5,17 +5,20 @@ require 'spec_helper'
RSpec.describe Ci::PipelinePresenter do
include Gitlab::Routing
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:project) { create(:project, :test_repo) }
+ let_it_be_with_reload(:pipeline) { create(:ci_pipeline, project: project) }
let(:current_user) { user }
- let(:project) { create(:project, :test_repo) }
- let(:pipeline) { create(:ci_pipeline, project: project) }
subject(:presenter) do
described_class.new(pipeline)
end
- before do
+ before_all do
project.add_developer(user)
+ end
+
+ before do
allow(presenter).to receive(:current_user) { current_user }
end
@@ -184,8 +187,8 @@ RSpec.describe Ci::PipelinePresenter do
describe '#all_related_merge_request_text' do
subject { presenter.all_related_merge_request_text }
- let(:mr_1) { create(:merge_request) }
- let(:mr_2) { create(:merge_request) }
+ let_it_be(:mr_1) { create(:merge_request) }
+ let_it_be(:mr_2) { create(:merge_request) }
context 'with zero related merge requests (branch pipeline)' do
it { is_expected.to eq('No related merge requests found.') }
@@ -242,7 +245,7 @@ RSpec.describe Ci::PipelinePresenter do
end
context 'permissions' do
- let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline, source_project: project) }
+ let_it_be_with_refind(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline, source_project: project) }
let(:pipeline) { merge_request.all_pipelines.take }
shared_examples 'private merge requests' do
diff --git a/spec/presenters/event_presenter_spec.rb b/spec/presenters/event_presenter_spec.rb
index 6798be21d28..5a67fd92c9d 100644
--- a/spec/presenters/event_presenter_spec.rb
+++ b/spec/presenters/event_presenter_spec.rb
@@ -38,4 +38,34 @@ RSpec.describe EventPresenter do
it { is_expected.to eq([project, target]) }
end
end
+
+ describe '#target_type_name' do
+ it 'returns design for a design event' do
+ expect(build(:design_event).present).to have_attributes(target_type_name: 'design')
+ end
+
+ it 'returns project for a project event' do
+ expect(build(:project_created_event).present).to have_attributes(target_type_name: 'project')
+ end
+
+ it 'returns milestone for a milestone event' do
+ expect(group_event.present).to have_attributes(target_type_name: 'milestone')
+ end
+ end
+
+ describe '#note_target_type_name' do
+ it 'returns design for an event on a comment on a design' do
+ expect(build(:event, :commented, :for_design).present)
+ .to have_attributes(note_target_type_name: 'design')
+ end
+
+ it 'returns nil for an event without a target' do
+ expect(build(:event).present).to have_attributes(note_target_type_name: be_nil)
+ end
+
+ it 'returns issue for an issue comment event' do
+ expect(build(:event, :commented, target: build(:note_on_issue)).present)
+ .to have_attributes(note_target_type_name: 'issue')
+ end
+ end
end
diff --git a/spec/presenters/merge_request_presenter_spec.rb b/spec/presenters/merge_request_presenter_spec.rb
index f1e581efd44..76b77ee0de2 100644
--- a/spec/presenters/merge_request_presenter_spec.rb
+++ b/spec/presenters/merge_request_presenter_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe MergeRequestPresenter do
- let(:resource) { create(:merge_request, source_project: project) }
- let(:project) { create(:project) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:resource) { create(:merge_request, source_project: project) }
+ let_it_be(:user) { create(:user) }
describe '#ci_status' do
subject { described_class.new(resource).ci_status }
@@ -73,8 +73,6 @@ RSpec.describe MergeRequestPresenter do
end
describe '#conflict_resolution_path' do
- let(:project) { create :project }
- let(:user) { create :user }
let(:presenter) { described_class.new(resource, current_user: user) }
let(:path) { presenter.conflict_resolution_path }
@@ -107,18 +105,21 @@ RSpec.describe MergeRequestPresenter do
end
context 'issues links' do
- let(:project) { create(:project, :private, :repository, creator: user, namespace: user.namespace) }
- let(:issue_a) { create(:issue, project: project) }
- let(:issue_b) { create(:issue, project: project) }
+ let_it_be(:project) { create(:project, :private, :repository, creator: user, namespace: user.namespace) }
+ let_it_be(:issue_a) { create(:issue, project: project) }
+ let_it_be(:issue_b) { create(:issue, project: project) }
- let(:resource) do
+ let_it_be(:resource) do
create(:merge_request,
source_project: project, target_project: project,
description: "Fixes #{issue_a.to_reference} Related #{issue_b.to_reference}")
end
- before do
+ before_all do
project.add_developer(user)
+ end
+
+ before do
allow(resource.project).to receive(:default_branch)
.and_return(resource.target_branch)
resource.cache_merge_request_closes_issues!
diff --git a/spec/presenters/projects/prometheus/alert_presenter_spec.rb b/spec/presenters/projects/prometheus/alert_presenter_spec.rb
deleted file mode 100644
index 98dba28829e..00000000000
--- a/spec/presenters/projects/prometheus/alert_presenter_spec.rb
+++ /dev/null
@@ -1,346 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::Prometheus::AlertPresenter do
- include Gitlab::Routing.url_helpers
-
- let_it_be(:project, reload: true) { create(:project) }
-
- let(:presenter) { described_class.new(alert) }
- let(:payload) { {} }
- let(:alert) { create(:alerting_alert, project: project, payload: payload) }
-
- shared_context 'gitlab alert' do
- let(:gitlab_alert) { create(:prometheus_alert, project: project) }
- let(:metric_id) { gitlab_alert.prometheus_metric_id }
-
- let(:alert) do
- create(:alerting_alert, project: project, metric_id: metric_id, payload: payload)
- end
- end
-
- describe '#project_full_path' do
- subject { presenter.project_full_path }
-
- it { is_expected.to eq(project.full_path) }
- end
-
- describe '#start_time' do
- subject { presenter.start_time }
-
- let(:starts_at) { '2020-10-31T14:02:04Z' }
-
- before do
- payload['startsAt'] = starts_at
- end
-
- context 'with valid utc datetime' do
- it { is_expected.to eq('31 October 2020, 2:02PM (UTC)') }
-
- context 'with admin time zone not UTC' do
- before do
- allow(Time).to receive(:zone).and_return(ActiveSupport::TimeZone.new('Perth'))
- end
-
- it { is_expected.to eq('31 October 2020, 2:02PM (UTC)') }
- end
- end
-
- context 'with invalid datetime' do
- let(:starts_at) { 'invalid' }
-
- it { is_expected.to be_nil }
- end
- end
-
- describe '#issue_summary_markdown' do
- let(:markdown_line_break) { ' ' }
-
- subject { presenter.issue_summary_markdown }
-
- context 'without default payload' do
- it do
- is_expected.to eq(
- <<~MARKDOWN.chomp
- **Start time:** #{presenter.start_time}
-
- MARKDOWN
- )
- end
- end
-
- context 'with optional attributes' do
- before do
- payload['annotations'] = {
- 'title' => 'Alert Title',
- 'foo' => 'value1',
- 'bar' => 'value2',
- 'description' => 'Alert Description',
- 'monitoring_tool' => 'monitoring_tool_name',
- 'service' => 'service_name',
- 'hosts' => ['http://localhost:3000', 'http://localhost:3001']
- }
- payload['generatorURL'] = 'http://host?g0.expr=query'
- end
-
- it do
- is_expected.to eq(
- <<~MARKDOWN.chomp
- **Start time:** #{presenter.start_time}#{markdown_line_break}
- **full_query:** `query`#{markdown_line_break}
- **Service:** service_name#{markdown_line_break}
- **Monitoring tool:** monitoring_tool_name#{markdown_line_break}
- **Hosts:** http://localhost:3000 http://localhost:3001
-
- MARKDOWN
- )
- end
- end
-
- context 'when hosts is a string' do
- before do
- payload['annotations'] = { 'hosts' => 'http://localhost:3000' }
- end
-
- it do
- is_expected.to eq(
- <<~MARKDOWN.chomp
- **Start time:** #{presenter.start_time}#{markdown_line_break}
- **Hosts:** http://localhost:3000
-
- MARKDOWN
- )
- end
- end
-
- context 'with embedded metrics' do
- let(:starts_at) { '2018-03-12T09:06:00Z' }
-
- shared_examples_for 'markdown with metrics embed' do
- let(:embed_regex) { /\n\[\]\(#{Regexp.quote(presenter.metrics_dashboard_url)}\)\z/ }
-
- context 'without a starting time available' do
- around do |example|
- Timecop.freeze(starts_at) { example.run }
- end
-
- before do
- payload.delete('startsAt')
- end
-
- it { is_expected.to match(embed_regex) }
- end
-
- context 'with a starting time available' do
- it { is_expected.to match(embed_regex) }
- end
- end
-
- context 'for gitlab-managed prometheus alerts' do
- include_context 'gitlab-managed prometheus alert attributes'
-
- let(:alert) do
- create(:alerting_alert, project: project, metric_id: prometheus_metric_id, payload: payload)
- end
-
- it_behaves_like 'markdown with metrics embed'
- end
-
- context 'for alerts from a self-managed prometheus' do
- include_context 'self-managed prometheus alert attributes'
-
- it_behaves_like 'markdown with metrics embed'
-
- context 'without y_label' do
- let(:y_label) { title }
-
- before do
- payload['annotations'].delete('gitlab_y_label')
- end
-
- it_behaves_like 'markdown with metrics embed'
- end
-
- context 'when not enough information is present for an embed' do
- shared_examples_for 'does not include an embed' do
- it { is_expected.not_to match(/\[\]\(.+\)/) }
- end
-
- context 'without title' do
- before do
- payload['annotations'].delete('title')
- end
-
- it_behaves_like 'does not include an embed'
- end
-
- context 'without environment' do
- before do
- payload['labels'].delete('gitlab_environment_name')
- end
-
- it_behaves_like 'does not include an embed'
- end
-
- context 'without full_query' do
- before do
- payload.delete('generatorURL')
- end
-
- it_behaves_like 'does not include an embed'
- end
- end
- end
- end
- end
-
- describe '#show_performance_dashboard_link?' do
- subject { presenter.show_performance_dashboard_link? }
-
- it { is_expected.to be_falsey }
-
- context 'with gitlab alert' do
- include_context 'gitlab alert'
-
- it { is_expected.to eq(true) }
- end
- end
-
- describe '#show_incident_issues_link?' do
- subject { presenter.show_incident_issues_link? }
-
- it { is_expected.to be_falsey }
-
- context 'create issue setting enabled' do
- before do
- create(:project_incident_management_setting, project: project, create_issue: true)
- end
-
- it { is_expected.to eq(true) }
- end
- end
-
- describe '#details_url' do
- subject { presenter.details_url }
-
- it { is_expected.to eq(nil) }
-
- context 'alert management alert present' do
- let_it_be(:am_alert) { create(:alert_management_alert, project: project) }
- let(:alert) { create(:alerting_alert, project: project, payload: payload, am_alert: am_alert) }
-
- it { is_expected.to eq("http://localhost/#{project.full_path}/-/alert_management/#{am_alert.iid}/details") }
- end
- end
-
- context 'with gitlab alert' do
- include_context 'gitlab alert'
-
- describe '#full_title' do
- let(:query_title) do
- "#{gitlab_alert.title} #{gitlab_alert.computed_operator} #{gitlab_alert.threshold} for 5 minutes"
- end
-
- let(:expected_subject) do
- "#{alert.environment.name}: #{query_title}"
- end
-
- subject { presenter.full_title }
-
- it { is_expected.to eq(expected_subject) }
- end
-
- describe '#metric_query' do
- subject { presenter.metric_query }
-
- it { is_expected.to eq(gitlab_alert.full_query) }
- end
-
- describe '#environment_name' do
- subject { presenter.environment_name }
-
- it { is_expected.to eq(alert.environment.name) }
- end
-
- describe '#performance_dashboard_link' do
- let(:expected_link) { metrics_project_environment_url(project, alert.environment) }
-
- subject { presenter.performance_dashboard_link }
-
- it { is_expected.to eq(expected_link) }
- end
-
- describe '#incident_issues_link' do
- let(:expected_link) { project_issues_url(project, label_name: described_class::INCIDENT_LABEL_NAME) }
-
- subject { presenter.incident_issues_link }
-
- it { is_expected.to eq(expected_link) }
- end
- end
-
- context 'without gitlab alert' do
- describe '#full_title' do
- subject { presenter.full_title }
-
- context 'with title' do
- let(:title) { 'some title' }
-
- before do
- expect(alert).to receive(:title).and_return(title)
- end
-
- it { is_expected.to eq(title) }
- end
-
- context 'without title' do
- it { is_expected.to eq('') }
- end
- end
-
- describe '#metric_query' do
- subject { presenter.metric_query }
-
- it { is_expected.to be_nil }
- end
-
- describe '#environment_name' do
- subject { presenter.environment_name }
-
- it { is_expected.to be_nil }
- end
-
- describe '#performance_dashboard_link' do
- let(:expected_link) { metrics_project_environments_url(project) }
-
- subject { presenter.performance_dashboard_link }
-
- it { is_expected.to eq(expected_link) }
- end
- end
-
- describe '#metrics_dashboard_url' do
- subject { presenter.metrics_dashboard_url }
-
- context 'for a non-prometheus alert' do
- it { is_expected.to be_nil }
- end
-
- context 'for a self-managed prometheus alert' do
- include_context 'self-managed prometheus alert attributes'
-
- let(:prometheus_payload) { payload }
-
- it { is_expected.to eq(dashboard_url_for_alert) }
- end
-
- context 'for a gitlab-managed prometheus alert' do
- include_context 'gitlab-managed prometheus alert attributes'
-
- let(:prometheus_payload) { payload }
-
- it { is_expected.to eq(dashboard_url_for_alert) }
- end
- end
-end
diff --git a/spec/presenters/sentry_error_presenter_spec.rb b/spec/presenters/sentry_error_presenter_spec.rb
index af9e7c8a2b2..86e43be1fa7 100644
--- a/spec/presenters/sentry_error_presenter_spec.rb
+++ b/spec/presenters/sentry_error_presenter_spec.rb
@@ -26,4 +26,12 @@ RSpec.describe SentryErrorPresenter do
expect(count).to eq error.frequency.first[1]
end
end
+
+ describe '#project_id' do
+ subject { presenter.project_id }
+
+ it 'returns a global ID of the correct type' do
+ expect(subject).to eq(Gitlab::GlobalId.build(model_name: 'SentryProject', id: error.project_id).to_s)
+ end
+ end
end
diff --git a/spec/presenters/snippet_blob_presenter_spec.rb b/spec/presenters/snippet_blob_presenter_spec.rb
index 915f43fe572..d7268c79a2c 100644
--- a/spec/presenters/snippet_blob_presenter_spec.rb
+++ b/spec/presenters/snippet_blob_presenter_spec.rb
@@ -3,70 +3,75 @@
require 'spec_helper'
RSpec.describe SnippetBlobPresenter do
+ let_it_be(:snippet) { create(:personal_snippet, :repository) }
+
+ let(:branch) { snippet.default_branch }
+ let(:blob) { snippet.blobs.first }
+
describe '#rich_data' do
+ let(:data_endpoint_url) { "/-/snippets/#{snippet.id}/raw/#{branch}/#{file}" }
+
before do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:current_user).and_return(nil)
end
+
+ blob.name = File.basename(file)
+ blob.path = file
end
- subject { described_class.new(snippet.blob).rich_data }
+ subject { described_class.new(blob).rich_data }
context 'with PersonalSnippet' do
- let(:snippet) { create(:personal_snippet, :repository) }
-
context 'when blob is binary' do
- it 'returns the HTML associated with the binary' do
- allow(snippet).to receive(:blob).and_return(snippet.repository.blob_at('master', 'files/images/logo-black.png'))
+ let(:file) { 'files/images/logo-black.png' }
+ let(:blob) { blob_at(file) }
+ it 'returns the HTML associated with the binary' do
expect(subject).to include('file-content image_file')
end
end
context 'with markdown format' do
- let(:snippet) { create(:personal_snippet, file_name: 'test.md', content: '*foo*') }
+ let(:file) { 'README.md' }
+ let(:blob) { blob_at(file) }
it 'returns rich markdown content' do
- expected = <<~HTML
- <div class="file-content md">
- <p data-sourcepos="1:1-1:5" dir="auto"><em>foo</em></p>
- </div>
- HTML
-
- expect(subject).to eq(expected)
+ expect(subject).to include('file-content md')
end
end
context 'with notebook format' do
- let(:snippet) { create(:personal_snippet, file_name: 'test.ipynb') }
+ let(:file) { 'test.ipynb' }
it 'returns rich notebook content' do
- expect(subject.strip).to eq %Q(<div class="file-content" data-endpoint="/-/snippets/#{snippet.id}/raw" id="js-notebook-viewer"></div>)
+ expect(subject.strip).to eq %Q(<div class="file-content" data-endpoint="#{data_endpoint_url}" id="js-notebook-viewer"></div>)
end
end
context 'with openapi format' do
- let(:snippet) { create(:personal_snippet, file_name: 'openapi.yml') }
+ let(:file) { 'openapi.yml' }
it 'returns rich openapi content' do
- expect(subject).to eq %Q(<div class="file-content" data-endpoint="/-/snippets/#{snippet.id}/raw" id="js-openapi-viewer"></div>\n)
+ expect(subject).to eq %Q(<div class="file-content" data-endpoint="#{data_endpoint_url}" id="js-openapi-viewer"></div>\n)
end
end
context 'with svg format' do
- let(:snippet) { create(:personal_snippet, file_name: 'test.svg') }
+ let(:file) { 'files/images/wm.svg' }
+ let(:blob) { blob_at(file) }
it 'returns rich svg content' do
result = Nokogiri::HTML::DocumentFragment.parse(subject)
image_tag = result.search('img').first
- expect(image_tag.attr('src')).to include("data:#{snippet.blob.mime_type};base64")
- expect(image_tag.attr('alt')).to eq('test.svg')
+ expect(image_tag.attr('src')).to include("data:#{blob.mime_type};base64")
+ expect(image_tag.attr('alt')).to eq(File.basename(file))
end
end
context 'with other format' do
- let(:snippet) { create(:personal_snippet, file_name: 'test') }
+ let(:file) { 'test' }
it 'does not return no rich content' do
expect(subject).to be_nil
@@ -76,36 +81,41 @@ RSpec.describe SnippetBlobPresenter do
end
describe '#plain_data' do
- let(:snippet) { build(:personal_snippet) }
+ let(:blob) { blob_at(file) }
- subject { described_class.new(snippet.blob).plain_data }
+ subject { described_class.new(blob).plain_data }
- it 'returns nil when the snippet blob is binary' do
- allow(snippet.blob).to receive(:binary?).and_return(true)
+ context 'when blob is binary' do
+ let(:file) { 'files/images/logo-black.png' }
- expect(subject).to be_nil
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
end
- it 'returns plain content when snippet file is markup' do
- snippet.file_name = 'test.md'
- snippet.content = '*foo*'
+ context 'when blob is markup' do
+ let(:file) { 'README.md' }
- expect(subject).to eq '<span id="LC1" class="line" lang="markdown"><span class="ge">*foo*</span></span>'
+ it 'returns plain content' do
+ expect(subject).to include('<span id="LC1" class="line" lang="markdown">')
+ end
end
- it 'returns highlighted syntax content' do
- snippet.file_name = 'test.rb'
- snippet.content = 'class Foo;end'
+ context 'when blob has syntax' do
+ let(:file) { 'files/ruby/regex.rb' }
- expect(subject)
- .to eq '<span id="LC1" class="line" lang="ruby"><span class="k">class</span> <span class="nc">Foo</span><span class="p">;</span><span class="k">end</span></span>'
+ it 'returns highlighted syntax content' do
+ expect(subject)
+ .to include '<span id="LC1" class="line" lang="ruby"><span class="k">module</span> <span class="nn">Gitlab</span>'
+ end
end
- it 'returns plain text highlighted content' do
- snippet.file_name = 'test'
- snippet.content = 'foo'
+ context 'when blob has plain data' do
+ let(:file) { 'LICENSE' }
- expect(subject).to eq '<span id="LC1" class="line" lang="plaintext">foo</span>'
+ it 'returns plain text highlighted content' do
+ expect(subject).to include('<span id="LC1" class="line" lang="plaintext">The MIT License (MIT)</span>')
+ end
end
end
@@ -179,4 +189,8 @@ RSpec.describe SnippetBlobPresenter do
end
end
end
+
+ def blob_at(path)
+ snippet.repository.blob_at(branch, path)
+ end
end
diff --git a/spec/presenters/snippet_presenter_spec.rb b/spec/presenters/snippet_presenter_spec.rb
index 681564ed2b0..66c6ba8fa0e 100644
--- a/spec/presenters/snippet_presenter_spec.rb
+++ b/spec/presenters/snippet_presenter_spec.rb
@@ -163,25 +163,4 @@ RSpec.describe SnippetPresenter do
end
end
end
-
- describe '#blobs' do
- let(:snippet) { personal_snippet }
-
- subject { presenter.blobs }
-
- context 'when snippet does not have a repository' do
- it 'returns an array with one SnippetBlob' do
- expect(subject.size).to eq(1)
- expect(subject.first).to eq(snippet.blob)
- end
- end
-
- context 'when snippet has a repository' do
- let(:snippet) { create(:snippet, :repository, author: user) }
-
- it 'returns an array with all repository blobs' do
- expect(subject).to match_array(snippet.blobs)
- end
- end
- end
end
diff --git a/spec/requests/api/admin/instance_clusters_spec.rb b/spec/requests/api/admin/instance_clusters_spec.rb
index b68541b5d92..9d0661089a9 100644
--- a/spec/requests/api/admin/instance_clusters_spec.rb
+++ b/spec/requests/api/admin/instance_clusters_spec.rb
@@ -162,6 +162,7 @@ RSpec.describe ::API::Admin::InstanceClusters do
name: 'test-instance-cluster',
domain: 'domain.example.com',
managed: false,
+ namespace_per_environment: false,
platform_kubernetes_attributes: platform_kubernetes_attributes,
clusterable: clusterable
}
@@ -206,6 +207,7 @@ RSpec.describe ::API::Admin::InstanceClusters do
expect(cluster_result.enabled).to eq(true)
expect(platform_kubernetes.authorization_type).to eq('rbac')
expect(cluster_result.managed).to be_falsy
+ expect(cluster_result.namespace_per_environment).to eq(false)
expect(platform_kubernetes.api_url).to eq("https://example.com")
expect(platform_kubernetes.token).to eq('sample-token')
end
@@ -235,6 +237,22 @@ RSpec.describe ::API::Admin::InstanceClusters do
end
end
+ context 'when namespace_per_environment is not set' do
+ let(:cluster_params) do
+ {
+ name: 'test-cluster',
+ domain: 'domain.example.com',
+ platform_kubernetes_attributes: platform_kubernetes_attributes
+ }
+ end
+
+ it 'defaults to true' do
+ cluster_result = Clusters::Cluster.find(json_response['id'])
+
+ expect(cluster_result).to be_namespace_per_environment
+ end
+ end
+
context 'when an instance cluster already exists' do
it 'allows user to add multiple clusters' do
post api('/admin/clusters/add', admin_user), params: multiple_cluster_params
diff --git a/spec/requests/api/ci/runner/jobs_artifacts_spec.rb b/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
index 97110b63ff6..71be0c30f5a 100644
--- a/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
@@ -227,10 +227,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
context 'authorize uploading of an lsif artifact' do
- before do
- stub_feature_flags(code_navigation: job.project)
- end
-
it 'adds ProcessLsif header' do
authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
@@ -249,32 +245,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
.to change { Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(tracking_params) }
.by(1)
end
-
- context 'code_navigation feature flag is disabled' do
- before do
- stub_feature_flags(code_navigation: false)
- end
-
- it 'responds with a forbidden error' do
- authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
-
- aggregate_failures do
- expect(response).to have_gitlab_http_status(:forbidden)
- expect(json_response['ProcessLsif']).to be_falsy
- end
- end
-
- it 'does not track code_intelligence usage ping' do
- tracking_params = {
- event_names: 'i_source_code_code_intelligence',
- start_date: Date.yesterday,
- end_date: Date.today
- }
-
- expect { authorize_artifacts_with_token_in_headers(artifact_type: :lsif) }
- .not_to change { Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(tracking_params) }
- end
- end
end
def authorize_artifacts(params = {}, request_headers = headers)
diff --git a/spec/requests/api/ci/runner/jobs_put_spec.rb b/spec/requests/api/ci/runner/jobs_put_spec.rb
index 183a3b26e00..92d38621105 100644
--- a/spec/requests/api/ci/runner/jobs_put_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_put_spec.rb
@@ -46,64 +46,59 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
context 'when status is given' do
- it 'mark job as succeeded' do
+ it 'marks job as succeeded' do
update_job(state: 'success')
- job.reload
- expect(job).to be_success
+ expect(job.reload).to be_success
+ expect(response.header).not_to have_key('X-GitLab-Trace-Update-Interval')
end
- it 'mark job as failed' do
+ it 'marks job as failed' do
update_job(state: 'failed')
- job.reload
- expect(job).to be_failed
+ expect(job.reload).to be_failed
expect(job).to be_unknown_failure
+ expect(response.header).not_to have_key('X-GitLab-Trace-Update-Interval')
end
context 'when failure_reason is script_failure' do
before do
update_job(state: 'failed', failure_reason: 'script_failure')
- job.reload
end
- it { expect(job).to be_script_failure }
+ it { expect(job.reload).to be_script_failure }
end
context 'when failure_reason is runner_system_failure' do
before do
update_job(state: 'failed', failure_reason: 'runner_system_failure')
- job.reload
end
- it { expect(job).to be_runner_system_failure }
+ it { expect(job.reload).to be_runner_system_failure }
end
context 'when failure_reason is unrecognized value' do
before do
update_job(state: 'failed', failure_reason: 'what_is_this')
- job.reload
end
- it { expect(job).to be_unknown_failure }
+ it { expect(job.reload).to be_unknown_failure }
end
context 'when failure_reason is job_execution_timeout' do
before do
update_job(state: 'failed', failure_reason: 'job_execution_timeout')
- job.reload
end
- it { expect(job).to be_job_execution_timeout }
+ it { expect(job.reload).to be_job_execution_timeout }
end
context 'when failure_reason is unmet_prerequisites' do
before do
update_job(state: 'failed', failure_reason: 'unmet_prerequisites')
- job.reload
end
- it { expect(job).to be_unmet_prerequisites }
+ it { expect(job.reload).to be_unmet_prerequisites }
end
context 'when unmigrated live trace chunks exist' do
@@ -119,24 +114,21 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect(job.pending_state).to be_present
expect(response).to have_gitlab_http_status(:accepted)
+ expect(response.header['X-GitLab-Trace-Update-Interval']).to be > 0
end
end
context 'when runner retries request after receiving 202' do
it 'responds with 202 and then with 200', :sidekiq_inline do
- perform_enqueued_jobs do
- update_job(state: 'success', checksum: 'crc32:12345678')
- end
+ update_job(state: 'success', checksum: 'crc32:12345678')
- expect(job.reload.pending_state).to be_present
expect(response).to have_gitlab_http_status(:accepted)
+ expect(job.reload.pending_state).to be_present
- perform_enqueued_jobs do
- update_job(state: 'success', checksum: 'crc32:12345678')
- end
+ update_job(state: 'success', checksum: 'crc32:12345678')
- expect(job.reload.pending_state).not_to be_present
expect(response).to have_gitlab_http_status(:ok)
+ expect(job.reload.pending_state).not_to be_present
end
end
@@ -149,8 +141,9 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
update_job(state: 'success', checksum: 'crc:12345678')
expect(job.reload).to be_success
- expect(job.pending_state).not_to be_present
+ expect(job.pending_state).to be_present
expect(response).to have_gitlab_http_status(:ok)
+ expect(response.header).not_to have_key('X-GitLab-Trace-Update-Interval')
end
end
end
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index d34244771ad..98c1e0228d4 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -36,6 +36,13 @@ RSpec.describe API::Commits do
end
it 'include correct pagination headers' do
+ get api(route, current_user)
+
+ expect(response).to include_limited_pagination_headers
+ end
+
+ it 'includes the total headers when the count is not disabled' do
+ stub_feature_flags(api_commits_without_count: false)
commit_count = project.repository.count_commits(ref: 'master').to_s
get api(route, current_user)
@@ -79,12 +86,10 @@ RSpec.describe API::Commits do
it 'include correct pagination headers' do
commits = project.repository.commits("master", limit: 2)
after = commits.second.created_at
- commit_count = project.repository.count_commits(ref: 'master', after: after).to_s
get api("/projects/#{project_id}/repository/commits?since=#{after.utc.iso8601}", user)
- expect(response).to include_pagination_headers
- expect(response.headers['X-Total']).to eq(commit_count)
+ expect(response).to include_limited_pagination_headers
expect(response.headers['X-Page']).to eql('1')
end
end
@@ -109,12 +114,10 @@ RSpec.describe API::Commits do
it 'include correct pagination headers' do
commits = project.repository.commits("master", limit: 2)
before = commits.second.created_at
- commit_count = project.repository.count_commits(ref: 'master', before: before).to_s
get api("/projects/#{project_id}/repository/commits?until=#{before.utc.iso8601}", user)
- expect(response).to include_pagination_headers
- expect(response.headers['X-Total']).to eq(commit_count)
+ expect(response).to include_limited_pagination_headers
expect(response.headers['X-Page']).to eql('1')
end
end
@@ -137,49 +140,49 @@ RSpec.describe API::Commits do
context "path optional parameter" do
it "returns project commits matching provided path parameter" do
path = 'files/ruby/popen.rb'
- commit_count = project.repository.count_commits(ref: 'master', path: path).to_s
get api("/projects/#{project_id}/repository/commits?path=#{path}", user)
expect(json_response.size).to eq(3)
expect(json_response.first["id"]).to eq("570e7b2abdd848b95f2f578043fc23bd6f6fd24d")
- expect(response).to include_pagination_headers
- expect(response.headers['X-Total']).to eq(commit_count)
+ expect(response).to include_limited_pagination_headers
end
it 'include correct pagination headers' do
path = 'files/ruby/popen.rb'
- commit_count = project.repository.count_commits(ref: 'master', path: path).to_s
get api("/projects/#{project_id}/repository/commits?path=#{path}", user)
- expect(response).to include_pagination_headers
- expect(response.headers['X-Total']).to eq(commit_count)
+ expect(response).to include_limited_pagination_headers
expect(response.headers['X-Page']).to eql('1')
end
end
context 'all optional parameter' do
it 'returns all project commits' do
- commit_count = project.repository.count_commits(all: true)
+ expected_commit_ids = project.repository.commits(nil, all: true, limit: 50).map(&:id)
+
+ get api("/projects/#{project_id}/repository/commits?all=true&per_page=50", user)
- get api("/projects/#{project_id}/repository/commits?all=true", user)
+ commit_ids = json_response.map { |c| c['id'] }
- expect(response).to include_pagination_headers
- expect(response.headers['X-Total']).to eq(commit_count.to_s)
+ expect(response).to include_limited_pagination_headers
+ expect(commit_ids).to eq(expected_commit_ids)
expect(response.headers['X-Page']).to eql('1')
end
end
context 'first_parent optional parameter' do
it 'returns all first_parent commits' do
- commit_count = project.repository.count_commits(ref: SeedRepo::Commit::ID, first_parent: true)
+ expected_commit_ids = project.repository.commits(SeedRepo::Commit::ID, limit: 50, first_parent: true).map(&:id)
- get api("/projects/#{project_id}/repository/commits", user), params: { ref_name: SeedRepo::Commit::ID, first_parent: 'true' }
+ get api("/projects/#{project_id}/repository/commits?per_page=50", user), params: { ref_name: SeedRepo::Commit::ID, first_parent: 'true' }
- expect(response).to include_pagination_headers
- expect(commit_count).to eq(12)
- expect(response.headers['X-Total']).to eq(commit_count.to_s)
+ commit_ids = json_response.map { |c| c['id'] }
+
+ expect(response).to include_limited_pagination_headers
+ expect(expected_commit_ids.size).to eq(12)
+ expect(commit_ids).to eq(expected_commit_ids)
end
end
@@ -209,11 +212,7 @@ RSpec.describe API::Commits do
end
it 'returns correct headers' do
- commit_count = project.repository.count_commits(ref: ref_name).to_s
-
- expect(response).to include_pagination_headers
- expect(response.headers['X-Total']).to eq(commit_count)
- expect(response.headers['X-Page']).to eq('1')
+ expect(response).to include_limited_pagination_headers
expect(response.headers['Link']).to match(/page=1&per_page=5/)
expect(response.headers['Link']).to match(/page=2&per_page=5/)
end
@@ -972,7 +971,7 @@ RSpec.describe API::Commits do
refs.concat(project.repository.tag_names_contains(commit_id).map {|name| ['tag', name]})
expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
+ expect(response).to include_limited_pagination_headers
expect(json_response).to be_an Array
expect(json_response.map { |r| [r['type'], r['name']] }.compact).to eq(refs)
end
@@ -1262,7 +1261,7 @@ RSpec.describe API::Commits do
get api(route, current_user)
expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
+ expect(response).to include_limited_pagination_headers
expect(json_response.size).to be >= 1
expect(json_response.first.keys).to include 'diff'
end
@@ -1276,7 +1275,7 @@ RSpec.describe API::Commits do
get api(route, current_user)
expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
+ expect(response).to include_limited_pagination_headers
expect(json_response.size).to be <= 1
end
end
@@ -1914,7 +1913,7 @@ RSpec.describe API::Commits do
get api("/projects/#{project.id}/repository/commits/#{commit.id}/merge_requests", user)
expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
+ expect(response).to include_limited_pagination_headers
expect(json_response.length).to eq(1)
expect(json_response[0]['id']).to eq(merged_mr.id)
end
diff --git a/spec/requests/api/debian_group_packages_spec.rb b/spec/requests/api/debian_group_packages_spec.rb
new file mode 100644
index 00000000000..8a05d20fb33
--- /dev/null
+++ b/spec/requests/api/debian_group_packages_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe API::DebianGroupPackages do
+ include HttpBasicAuthHelpers
+ include WorkhorseHelpers
+
+ include_context 'Debian repository shared context', :group do
+ describe 'GET groups/:id/-/packages/debian/dists/*distribution/Release.gpg' do
+ let(:url) { "/groups/#{group.id}/-/packages/debian/dists/#{distribution}/Release.gpg" }
+
+ it_behaves_like 'Debian group repository GET endpoint', :not_found, nil
+ end
+
+ describe 'GET groups/:id/-/packages/debian/dists/*distribution/Release' do
+ let(:url) { "/groups/#{group.id}/-/packages/debian/dists/#{distribution}/Release" }
+
+ it_behaves_like 'Debian group repository GET endpoint', :success, 'TODO Release'
+ end
+
+ describe 'GET groups/:id/-/packages/debian/dists/*distribution/InRelease' do
+ let(:url) { "/groups/#{group.id}/-/packages/debian/dists/#{distribution}/InRelease" }
+
+ it_behaves_like 'Debian group repository GET endpoint', :not_found, nil
+ end
+
+ describe 'GET groups/:id/-/packages/debian/dists/*distribution/:component/binary-:architecture/Packages' do
+ let(:url) { "/groups/#{group.id}/-/packages/debian/dists/#{distribution}/#{component}/binary-#{architecture}/Packages" }
+
+ it_behaves_like 'Debian group repository GET endpoint', :success, 'TODO Packages'
+ end
+
+ describe 'GET groups/:id/-/packages/debian/pool/:component/:letter/:source_package/:file_name' do
+ let(:url) { "/groups/#{group.id}/-/packages/debian/pool/#{component}/#{letter}/#{source_package}/#{package_name}_#{package_version}_#{architecture}.deb" }
+
+ it_behaves_like 'Debian group repository GET endpoint', :success, 'TODO File'
+ end
+ end
+end
diff --git a/spec/requests/api/debian_project_packages_spec.rb b/spec/requests/api/debian_project_packages_spec.rb
new file mode 100644
index 00000000000..d2f208d0079
--- /dev/null
+++ b/spec/requests/api/debian_project_packages_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe API::DebianProjectPackages do
+ include HttpBasicAuthHelpers
+ include WorkhorseHelpers
+
+ include_context 'Debian repository shared context', :project do
+ describe 'GET projects/:id/-/packages/debian/dists/*distribution/Release.gpg' do
+ let(:url) { "/projects/#{project.id}/-/packages/debian/dists/#{distribution}/Release.gpg" }
+
+ it_behaves_like 'Debian project repository GET endpoint', :not_found, nil
+ end
+
+ describe 'GET projects/:id/-/packages/debian/dists/*distribution/Release' do
+ let(:url) { "/projects/#{project.id}/-/packages/debian/dists/#{distribution}/Release" }
+
+ it_behaves_like 'Debian project repository GET endpoint', :success, 'TODO Release'
+ end
+
+ describe 'GET projects/:id/-/packages/debian/dists/*distribution/InRelease' do
+ let(:url) { "/projects/#{project.id}/-/packages/debian/dists/#{distribution}/InRelease" }
+
+ it_behaves_like 'Debian project repository GET endpoint', :not_found, nil
+ end
+
+ describe 'GET projects/:id/-/packages/debian/dists/*distribution/:component/binary-:architecture/Packages' do
+ let(:url) { "/projects/#{project.id}/-/packages/debian/dists/#{distribution}/#{component}/binary-#{architecture}/Packages" }
+
+ it_behaves_like 'Debian project repository GET endpoint', :success, 'TODO Packages'
+ end
+
+ describe 'GET projects/:id/-/packages/debian/pool/:component/:letter/:source_package/:file_name' do
+ let(:url) { "/projects/#{project.id}/-/packages/debian/pool/#{component}/#{letter}/#{source_package}/#{package_name}_#{package_version}_#{architecture}.deb" }
+
+ it_behaves_like 'Debian project repository GET endpoint', :success, 'TODO File'
+ end
+
+ describe 'PUT projects/:id/-/packages/debian/incoming/:file_name' do
+ let(:method) { :put }
+ let(:url) { "/projects/#{project.id}/-/packages/debian/incoming/#{file_name}" }
+
+ it_behaves_like 'Debian project repository PUT endpoint', :created, nil
+ end
+ end
+end
diff --git a/spec/requests/api/features_spec.rb b/spec/requests/api/features_spec.rb
index 2746e777306..3f443b4f92b 100644
--- a/spec/requests/api/features_spec.rb
+++ b/spec/requests/api/features_spec.rb
@@ -12,6 +12,8 @@ RSpec.describe API::Features, stub_feature_flags: false do
Flipper.register(:perf_team) do |actor|
actor.respond_to?(:admin) && actor.admin?
end
+
+ skip_feature_flags_yaml_validation
end
describe 'GET /features' do
diff --git a/spec/requests/api/files_spec.rb b/spec/requests/api/files_spec.rb
index bb4e88f97f8..f77f127ddc8 100644
--- a/spec/requests/api/files_spec.rb
+++ b/spec/requests/api/files_spec.rb
@@ -747,7 +747,7 @@ RSpec.describe API::Files do
it "updates existing file in project repo with accepts correct last commit id" do
last_commit = Gitlab::Git::Commit
- .last_for_path(project.repository, 'master', URI.unescape(file_path))
+ .last_for_path(project.repository, 'master', Addressable::URI.unencode_component(file_path))
params_with_correct_id = params.merge(last_commit_id: last_commit.id)
put api(route(file_path), user), params: params_with_correct_id
@@ -757,7 +757,7 @@ RSpec.describe API::Files do
it "returns 400 when file path is invalid" do
last_commit = Gitlab::Git::Commit
- .last_for_path(project.repository, 'master', URI.unescape(file_path))
+ .last_for_path(project.repository, 'master', Addressable::URI.unencode_component(file_path))
params_with_correct_id = params.merge(last_commit_id: last_commit.id)
put api(route(rouge_file_path), user), params: params_with_correct_id
@@ -769,7 +769,7 @@ RSpec.describe API::Files do
it_behaves_like 'when path is absolute' do
let(:last_commit) do
Gitlab::Git::Commit
- .last_for_path(project.repository, 'master', URI.unescape(file_path))
+ .last_for_path(project.repository, 'master', Addressable::URI.unencode_component(file_path))
end
let(:params_with_correct_id) { params.merge(last_commit_id: last_commit.id) }
diff --git a/spec/requests/api/generic_packages_spec.rb b/spec/requests/api/generic_packages_spec.rb
index ed852fe75c7..2cb686167f1 100644
--- a/spec/requests/api/generic_packages_spec.rb
+++ b/spec/requests/api/generic_packages_spec.rb
@@ -4,79 +4,432 @@ require 'spec_helper'
RSpec.describe API::GenericPackages do
let_it_be(:personal_access_token) { create(:personal_access_token) }
- let_it_be(:project) { create(:project) }
+ let_it_be(:project, reload: true) { create(:project) }
+ let(:workhorse_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
+ let(:workhorse_header) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => workhorse_token } }
+ let(:user) { personal_access_token.user }
+ let(:ci_build) { create(:ci_build, :running, user: user) }
- describe 'GET /api/v4/projects/:id/packages/generic/ping' do
- let(:user) { personal_access_token.user }
- let(:auth_token) { personal_access_token.token }
+ def auth_header
+ return {} if user_role == :anonymous
+ case authenticate_with
+ when :personal_access_token
+ personal_access_token_header
+ when :job_token
+ job_token_header
+ when :invalid_personal_access_token
+ personal_access_token_header('wrong token')
+ when :invalid_job_token
+ job_token_header('wrong token')
+ end
+ end
+
+ def personal_access_token_header(value = nil)
+ { Gitlab::Auth::AuthFinders::PRIVATE_TOKEN_HEADER => value || personal_access_token.token }
+ end
+
+ def job_token_header(value = nil)
+ { Gitlab::Auth::AuthFinders::JOB_TOKEN_HEADER => value || ci_build.token }
+ end
+
+ shared_examples 'secure endpoint' do
before do
project.add_developer(user)
end
- context 'packages feature is disabled' do
- it 'responds with 404 Not Found' do
- stub_packages_setting(enabled: false)
+ it 'rejects malicious request' do
+ subject
- ping(personal_access_token: auth_token)
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
- expect(response).to have_gitlab_http_status(:not_found)
+ describe 'PUT /api/v4/projects/:id/packages/generic/:package_name/:package_version/:file_name/authorize' do
+ context 'with valid project' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:project_visibility, :user_role, :member?, :authenticate_with, :expected_status) do
+ 'PUBLIC' | :developer | true | :personal_access_token | :success
+ 'PUBLIC' | :guest | true | :personal_access_token | :forbidden
+ 'PUBLIC' | :developer | true | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :guest | true | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :developer | false | :personal_access_token | :forbidden
+ 'PUBLIC' | :guest | false | :personal_access_token | :forbidden
+ 'PUBLIC' | :developer | false | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :guest | false | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :anonymous | false | :none | :unauthorized
+ 'PRIVATE' | :developer | true | :personal_access_token | :success
+ 'PRIVATE' | :guest | true | :personal_access_token | :forbidden
+ 'PRIVATE' | :developer | true | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :guest | true | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :developer | false | :personal_access_token | :not_found
+ 'PRIVATE' | :guest | false | :personal_access_token | :not_found
+ 'PRIVATE' | :developer | false | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :guest | false | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :anonymous | false | :none | :unauthorized
+ 'PUBLIC' | :developer | true | :job_token | :success
+ 'PUBLIC' | :developer | true | :invalid_job_token | :unauthorized
+ 'PUBLIC' | :developer | false | :job_token | :forbidden
+ 'PUBLIC' | :developer | false | :invalid_job_token | :unauthorized
+ 'PRIVATE' | :developer | true | :job_token | :success
+ 'PRIVATE' | :developer | true | :invalid_job_token | :unauthorized
+ 'PRIVATE' | :developer | false | :job_token | :not_found
+ 'PRIVATE' | :developer | false | :invalid_job_token | :unauthorized
+ end
+
+ with_them do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility, false))
+ project.send("add_#{user_role}", user) if member? && user_role != :anonymous
+ end
+
+ it "responds with #{params[:expected_status]}" do
+ authorize_upload_file(workhorse_header.merge(auth_header))
+
+ expect(response).to have_gitlab_http_status(expected_status)
+ end
+ end
+ end
+
+ context 'application security' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:param_name, :param_value) do
+ :package_name | 'my-package/../'
+ :package_name | 'my-package%2f%2e%2e%2f'
+ :file_name | '../.ssh%2fauthorized_keys'
+ :file_name | '%2e%2e%2f.ssh%2fauthorized_keys'
+ end
+
+ with_them do
+ subject { authorize_upload_file(workhorse_header.merge(personal_access_token_header), param_name => param_value) }
+
+ it_behaves_like 'secure endpoint'
end
end
context 'generic_packages feature flag is disabled' do
it 'responds with 404 Not Found' do
stub_feature_flags(generic_packages: false)
+ project.add_developer(user)
- ping(personal_access_token: auth_token)
+ authorize_upload_file(workhorse_header.merge(personal_access_token_header))
expect(response).to have_gitlab_http_status(:not_found)
end
end
- context 'generic_packages feature flag is enabled' do
+ def authorize_upload_file(request_headers, package_name: 'mypackage', file_name: 'myfile.tar.gz')
+ url = "/projects/#{project.id}/packages/generic/#{package_name}/0.0.1/#{file_name}/authorize"
+
+ put api(url), headers: request_headers
+ end
+ end
+
+ describe 'PUT /api/v4/projects/:id/packages/generic/:package_name/:package_version/:file_name' do
+ include WorkhorseHelpers
+
+ let(:file_upload) { fixture_file_upload('spec/fixtures/packages/generic/myfile.tar.gz') }
+ let(:params) { { file: file_upload } }
+
+ context 'authentication' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:project_visibility, :user_role, :member?, :authenticate_with, :expected_status) do
+ 'PUBLIC' | :guest | true | :personal_access_token | :forbidden
+ 'PUBLIC' | :developer | true | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :guest | true | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :developer | false | :personal_access_token | :forbidden
+ 'PUBLIC' | :guest | false | :personal_access_token | :forbidden
+ 'PUBLIC' | :developer | false | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :guest | false | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :anonymous | false | :none | :unauthorized
+ 'PRIVATE' | :guest | true | :personal_access_token | :forbidden
+ 'PRIVATE' | :developer | true | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :guest | true | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :developer | false | :personal_access_token | :not_found
+ 'PRIVATE' | :guest | false | :personal_access_token | :not_found
+ 'PRIVATE' | :developer | false | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :guest | false | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :anonymous | false | :none | :unauthorized
+ 'PUBLIC' | :developer | true | :invalid_job_token | :unauthorized
+ 'PUBLIC' | :developer | false | :job_token | :forbidden
+ 'PUBLIC' | :developer | false | :invalid_job_token | :unauthorized
+ 'PRIVATE' | :developer | true | :invalid_job_token | :unauthorized
+ 'PRIVATE' | :developer | false | :job_token | :not_found
+ 'PRIVATE' | :developer | false | :invalid_job_token | :unauthorized
+ end
+
+ with_them do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility, false))
+ project.send("add_#{user_role}", user) if member? && user_role != :anonymous
+ end
+
+ it "responds with #{params[:expected_status]}" do
+ headers = workhorse_header.merge(auth_header)
+
+ upload_file(params, headers)
+
+ expect(response).to have_gitlab_http_status(expected_status)
+ end
+ end
+ end
+
+ context 'when user can upload packages and has valid credentials' do
before do
- stub_feature_flags(generic_packages: true)
+ project.add_developer(user)
end
- context 'authenticating using personal access token' do
- it 'responds with 200 OK when valid personal access token is provided' do
- ping(personal_access_token: auth_token)
+ it 'creates package and package file when valid personal access token is used' do
+ headers = workhorse_header.merge(personal_access_token_header)
+
+ expect { upload_file(params, headers) }
+ .to change { project.packages.generic.count }.by(1)
+ .and change { Packages::PackageFile.count }.by(1)
+
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:created)
- expect(response).to have_gitlab_http_status(:ok)
+ package = project.packages.generic.last
+ expect(package.name).to eq('mypackage')
+ expect(package.version).to eq('0.0.1')
+ expect(package.build_info).to be_nil
+
+ package_file = package.package_files.last
+ expect(package_file.file_name).to eq('myfile.tar.gz')
end
+ end
+
+ it 'creates package, package file, and package build info when valid job token is used' do
+ headers = workhorse_header.merge(job_token_header)
+
+ expect { upload_file(params, headers) }
+ .to change { project.packages.generic.count }.by(1)
+ .and change { Packages::PackageFile.count }.by(1)
- it 'responds with 401 Unauthorized when invalid personal access token provided' do
- ping(personal_access_token: 'invalid-token')
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:created)
- expect(response).to have_gitlab_http_status(:unauthorized)
+ package = project.packages.generic.last
+ expect(package.name).to eq('mypackage')
+ expect(package.version).to eq('0.0.1')
+ expect(package.build_info.pipeline).to eq(ci_build.pipeline)
+
+ package_file = package.package_files.last
+ expect(package_file.file_name).to eq('myfile.tar.gz')
end
end
- context 'authenticating using job token' do
- it 'responds with 200 OK when valid job token is provided' do
- job_token = create(:ci_build, :running, user: user).token
+ context 'event tracking' do
+ subject { upload_file(params, workhorse_header.merge(personal_access_token_header)) }
+
+ it_behaves_like 'a gitlab tracking event', described_class.name, 'push_package'
+ end
+
+ it 'rejects request without a file from workhorse' do
+ headers = workhorse_header.merge(personal_access_token_header)
+ upload_file({}, headers)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'rejects request without an auth token' do
+ upload_file(params, workhorse_header)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+
+ it 'rejects request without workhorse rewritten fields' do
+ headers = workhorse_header.merge(personal_access_token_header)
+ upload_file(params, headers, send_rewritten_field: false)
- ping(job_token: job_token)
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
- expect(response).to have_gitlab_http_status(:ok)
+ it 'rejects request if file size is too large' do
+ allow_next_instance_of(UploadedFile) do |uploaded_file|
+ allow(uploaded_file).to receive(:size).and_return(project.actual_limits.generic_packages_max_file_size + 1)
end
- it 'responds with 401 Unauthorized when invalid job token provided' do
- ping(job_token: 'invalid-token')
+ headers = workhorse_header.merge(personal_access_token_header)
+ upload_file(params, headers)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'rejects request without workhorse header' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).once
- expect(response).to have_gitlab_http_status(:unauthorized)
+ upload_file(params, personal_access_token_header)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'application security' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:param_name, :param_value) do
+ :package_name | 'my-package/../'
+ :package_name | 'my-package%2f%2e%2e%2f'
+ :file_name | '../.ssh%2fauthorized_keys'
+ :file_name | '%2e%2e%2f.ssh%2fauthorized_keys'
+ end
+
+ with_them do
+ subject { upload_file(params, workhorse_header.merge(personal_access_token_header), param_name => param_value) }
+
+ it_behaves_like 'secure endpoint'
+ end
+ end
+
+ def upload_file(params, request_headers, send_rewritten_field: true, package_name: 'mypackage', file_name: 'myfile.tar.gz')
+ url = "/projects/#{project.id}/packages/generic/#{package_name}/0.0.1/#{file_name}"
+
+ workhorse_finalize(
+ api(url),
+ method: :put,
+ file_key: :file,
+ params: params,
+ headers: request_headers,
+ send_rewritten_field: send_rewritten_field
+ )
+ end
+ end
+
+ describe 'GET /api/v4/projects/:id/packages/generic/:package_name/:package_version/:file_name' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:package) { create(:generic_package, project: project) }
+ let_it_be(:package_file) { create(:package_file, :generic, package: package) }
+
+ context 'authentication' do
+ where(:project_visibility, :user_role, :member?, :authenticate_with, :expected_status) do
+ 'PUBLIC' | :developer | true | :personal_access_token | :success
+ 'PUBLIC' | :guest | true | :personal_access_token | :success
+ 'PUBLIC' | :developer | true | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :guest | true | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :developer | false | :personal_access_token | :success
+ 'PUBLIC' | :guest | false | :personal_access_token | :success
+ 'PUBLIC' | :developer | false | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :guest | false | :invalid_personal_access_token | :unauthorized
+ 'PUBLIC' | :anonymous | false | :none | :unauthorized
+ 'PRIVATE' | :developer | true | :personal_access_token | :success
+ 'PRIVATE' | :guest | true | :personal_access_token | :forbidden
+ 'PRIVATE' | :developer | true | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :guest | true | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :developer | false | :personal_access_token | :not_found
+ 'PRIVATE' | :guest | false | :personal_access_token | :not_found
+ 'PRIVATE' | :developer | false | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :guest | false | :invalid_personal_access_token | :unauthorized
+ 'PRIVATE' | :anonymous | false | :none | :unauthorized
+ 'PUBLIC' | :developer | true | :job_token | :success
+ 'PUBLIC' | :developer | true | :invalid_job_token | :unauthorized
+ 'PUBLIC' | :developer | false | :job_token | :success
+ 'PUBLIC' | :developer | false | :invalid_job_token | :unauthorized
+ 'PRIVATE' | :developer | true | :job_token | :success
+ 'PRIVATE' | :developer | true | :invalid_job_token | :unauthorized
+ 'PRIVATE' | :developer | false | :job_token | :not_found
+ 'PRIVATE' | :developer | false | :invalid_job_token | :unauthorized
+ end
+
+ with_them do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility, false))
+ project.send("add_#{user_role}", user) if member? && user_role != :anonymous
+ end
+
+ it "responds with #{params[:expected_status]}" do
+ download_file(auth_header)
+
+ expect(response).to have_gitlab_http_status(expected_status)
end
end
end
- def ping(personal_access_token: nil, job_token: nil)
- headers = {
- Gitlab::Auth::AuthFinders::PRIVATE_TOKEN_HEADER => personal_access_token.presence,
- Gitlab::Auth::AuthFinders::JOB_TOKEN_HEADER => job_token.presence
- }.compact
+ context 'event tracking' do
+ before do
+ project.add_developer(user)
+ end
+
+ subject { download_file(personal_access_token_header) }
+
+ it_behaves_like 'a gitlab tracking event', described_class.name, 'pull_package'
+ end
+
+ it 'rejects a malicious file name request' do
+ project.add_developer(user)
+
+ download_file(personal_access_token_header, file_name: '../.ssh%2fauthorized_keys')
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'rejects a malicious file name request' do
+ project.add_developer(user)
+
+ download_file(personal_access_token_header, file_name: '%2e%2e%2f.ssh%2fauthorized_keys')
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'rejects a malicious package name request' do
+ project.add_developer(user)
+
+ download_file(personal_access_token_header, package_name: 'my-package/../')
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'rejects a malicious package name request' do
+ project.add_developer(user)
+
+ download_file(personal_access_token_header, package_name: 'my-package%2f%2e%2e%2f')
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ context 'application security' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:param_name, :param_value) do
+ :package_name | 'my-package/../'
+ :package_name | 'my-package%2f%2e%2e%2f'
+ :file_name | '../.ssh%2fauthorized_keys'
+ :file_name | '%2e%2e%2f.ssh%2fauthorized_keys'
+ end
+
+ with_them do
+ subject { download_file(personal_access_token_header, param_name => param_value) }
+
+ it_behaves_like 'secure endpoint'
+ end
+ end
+
+ it 'responds with 404 Not Found for non existing package' do
+ project.add_developer(user)
+
+ download_file(personal_access_token_header, package_name: 'no-such-package')
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'responds with 404 Not Found for non existing package file' do
+ project.add_developer(user)
+
+ download_file(personal_access_token_header, file_name: 'no-such-file')
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ def download_file(request_headers, package_name: nil, file_name: nil)
+ package_name ||= package.name
+ file_name ||= package_file.file_name
+ url = "/projects/#{project.id}/packages/generic/#{package_name}/#{package.version}/#{file_name}"
- get api('/projects/%d/packages/generic/ping' % project.id), headers: headers
+ get api(url), headers: request_headers
end
end
end
diff --git a/spec/requests/api/graphql/gitlab_schema_spec.rb b/spec/requests/api/graphql/gitlab_schema_spec.rb
index ee7dba545be..fe1c7c15de2 100644
--- a/spec/requests/api/graphql/gitlab_schema_spec.rb
+++ b/spec/requests/api/graphql/gitlab_schema_spec.rb
@@ -190,7 +190,9 @@ RSpec.describe 'GitlabSchema configurations' do
variables: {}.to_s,
complexity: 181,
depth: 13,
- duration_s: 7
+ duration_s: 7,
+ used_fields: an_instance_of(Array),
+ used_deprecated_fields: an_instance_of(Array)
}
expect_any_instance_of(Gitlab::Graphql::QueryAnalyzers::LoggerAnalyzer).to receive(:duration).and_return(7)
diff --git a/spec/requests/api/graphql/mutations/boards/lists/destroy_spec.rb b/spec/requests/api/graphql/mutations/boards/lists/destroy_spec.rb
new file mode 100644
index 00000000000..42f690f53ed
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/boards/lists/destroy_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Boards::Lists::Destroy do
+ include GraphqlHelpers
+
+ let_it_be(:current_user, reload: true) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project) }
+ let_it_be(:board) { create(:board, project: project) }
+ let_it_be(:list) { create(:list, board: board) }
+ let(:mutation) do
+ variables = {
+ list_id: GitlabSchema.id_from_object(list).to_s
+ }
+
+ graphql_mutation(:destroy_board_list, variables)
+ end
+
+ subject { post_graphql_mutation(mutation, current_user: current_user) }
+
+ def mutation_response
+ graphql_mutation_response(:destroy_board_list)
+ end
+
+ context 'when the user does not have permission' do
+ it_behaves_like 'a mutation that returns a top-level access error'
+
+ it 'does not destroy the list' do
+ expect { subject }.not_to change { List.count }
+ end
+ end
+
+ context 'when the user has permission' do
+ before do
+ project.add_maintainer(current_user)
+ end
+
+ context 'when given id is not for a list' do
+ let_it_be(:list) { build_stubbed(:issue, project: project) }
+
+ it 'returns an error' do
+ subject
+
+ expect(graphql_errors.first['message']).to include('does not represent an instance of List')
+ end
+ end
+
+ context 'when everything is ok' do
+ it 'destroys the list' do
+ expect { subject }.to change { List.count }.from(2).to(1)
+ end
+
+ it 'returns an empty list' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response).to have_key('list')
+ expect(mutation_response['list']).to be_nil
+ end
+ end
+
+ context 'when the list is not destroyable' do
+ let_it_be(:list) { create(:list, board: board, list_type: :backlog) }
+
+ it 'does not destroy the list' do
+ expect { subject }.not_to change { List.count }.from(3)
+ end
+
+ it 'returns an error and not nil list' do
+ subject
+
+ expect(mutation_response['errors']).not_to be_empty
+ expect(mutation_response['list']).not_to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/snippets/create_spec.rb b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
index 1bb446de708..a708c3fdf1f 100644
--- a/spec/requests/api/graphql/mutations/snippets/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
@@ -79,18 +79,20 @@ RSpec.describe 'Creating a Snippet' do
end
shared_examples 'creates snippet' do
- it 'returns the created Snippet' do
+ it 'returns the created Snippet', :aggregate_failures do
expect do
subject
end.to change { Snippet.count }.by(1)
+ snippet = Snippet.last
+ created_file_1 = snippet.repository.blob_at('HEAD', file_1[:filePath])
+ created_file_2 = snippet.repository.blob_at('HEAD', file_2[:filePath])
+
+ expect(created_file_1.data).to match(file_1[:content])
+ expect(created_file_2.data).to match(file_2[:content])
expect(mutation_response['snippet']['title']).to eq(title)
expect(mutation_response['snippet']['description']).to eq(description)
expect(mutation_response['snippet']['visibilityLevel']).to eq(visibility_level)
- expect(mutation_response['snippet']['blobs'][0]['plainData']).to match(file_1[:content])
- expect(mutation_response['snippet']['blobs'][0]['fileName']).to match(file_1[:file_path])
- expect(mutation_response['snippet']['blobs'][1]['plainData']).to match(file_2[:content])
- expect(mutation_response['snippet']['blobs'][1]['fileName']).to match(file_2[:file_path])
end
context 'when action is invalid' do
diff --git a/spec/requests/api/graphql/mutations/snippets/update_spec.rb b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
index 58ce74b9263..67a9869c001 100644
--- a/spec/requests/api/graphql/mutations/snippets/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
@@ -73,7 +73,6 @@ RSpec.describe 'Updating a Snippet' do
aggregate_failures do
expect(blob_to_update.data).to eq updated_content
expect(blob_to_delete).to be_nil
- expect(blob_in_mutation_response(updated_file)['plainData']).to match(updated_content)
expect(mutation_response['snippet']['title']).to eq(updated_title)
expect(mutation_response['snippet']['description']).to eq(updated_description)
expect(mutation_response['snippet']['visibilityLevel']).to eq('public')
@@ -100,7 +99,6 @@ RSpec.describe 'Updating a Snippet' do
aggregate_failures do
expect(blob_at(updated_file).data).to eq blob_to_update.data
expect(blob_at(deleted_file).data).to eq blob_to_delete.data
- expect(blob_in_mutation_response(deleted_file)['plainData']).not_to be_nil
expect(mutation_response['snippet']['title']).to eq(original_title)
expect(mutation_response['snippet']['description']).to eq(original_description)
expect(mutation_response['snippet']['visibilityLevel']).to eq('private')
@@ -108,10 +106,6 @@ RSpec.describe 'Updating a Snippet' do
end
end
- def blob_in_mutation_response(filename)
- mutation_response['snippet']['blobs'].select { |blob| blob['name'] == filename }[0]
- end
-
def blob_at(filename)
snippet.repository.blob_at('HEAD', filename)
end
diff --git a/spec/requests/api/graphql/project/issues_spec.rb b/spec/requests/api/graphql/project/issues_spec.rb
index 5d4276f47ca..40fec6ba068 100644
--- a/spec/requests/api/graphql/project/issues_spec.rb
+++ b/spec/requests/api/graphql/project/issues_spec.rb
@@ -53,16 +53,37 @@ RSpec.describe 'getting an issue list for a project' do
context 'when limiting the number of results' do
let(:query) do
- graphql_query_for(
- 'project',
- { 'fullPath' => project.full_path },
- "issues(first: 1) { #{fields} }"
- )
+ <<~GQL
+ query($path: ID!, $n: Int) {
+ project(fullPath: $path) {
+ issues(first: $n) { #{fields} }
+ }
+ }
+ GQL
+ end
+
+ let(:issue_limit) { 1 }
+ let(:variables) do
+ { path: project.full_path, n: issue_limit }
end
it_behaves_like 'a working graphql query' do
before do
- post_graphql(query, current_user: current_user)
+ post_graphql(query, current_user: current_user, variables: variables)
+ end
+
+ it 'only returns N issues' do
+ expect(issues_data.size).to eq(issue_limit)
+ end
+ end
+
+ context 'no limit is provided' do
+ let(:issue_limit) { nil }
+
+ it 'returns all issues' do
+ post_graphql(query, current_user: current_user, variables: variables)
+
+ expect(issues_data.size).to be > 1
end
end
@@ -71,7 +92,7 @@ RSpec.describe 'getting an issue list for a project' do
# Newest first, we only want to see the newest checked
expect(Ability).not_to receive(:allowed?).with(current_user, :read_issue, issues.first)
- post_graphql(query, current_user: current_user)
+ post_graphql(query, current_user: current_user, variables: variables)
end
end
diff --git a/spec/requests/api/graphql_spec.rb b/spec/requests/api/graphql_spec.rb
index ff1a5aa1540..94a66f54e4d 100644
--- a/spec/requests/api/graphql_spec.rb
+++ b/spec/requests/api/graphql_spec.rb
@@ -9,7 +9,15 @@ RSpec.describe 'GraphQL' do
context 'logging' do
shared_examples 'logging a graphql query' do
let(:expected_params) do
- { query_string: query, variables: variables.to_s, duration_s: anything, depth: 1, complexity: 1 }
+ {
+ query_string: query,
+ variables: variables.to_s,
+ duration_s: anything,
+ depth: 1,
+ complexity: 1,
+ used_fields: ['Query.echo'],
+ used_deprecated_fields: []
+ }
end
it 'logs a query with the expected params' do
diff --git a/spec/requests/api/group_clusters_spec.rb b/spec/requests/api/group_clusters_spec.rb
index 068af1485e2..eb21ae9468c 100644
--- a/spec/requests/api/group_clusters_spec.rb
+++ b/spec/requests/api/group_clusters_spec.rb
@@ -172,6 +172,7 @@ RSpec.describe API::GroupClusters do
name: 'test-cluster',
domain: 'domain.example.com',
managed: false,
+ namespace_per_environment: false,
platform_kubernetes_attributes: platform_kubernetes_attributes,
management_project_id: management_project_id
}
@@ -206,6 +207,7 @@ RSpec.describe API::GroupClusters do
expect(cluster_result.domain).to eq('domain.example.com')
expect(cluster_result.managed).to be_falsy
expect(cluster_result.management_project_id).to eq management_project_id
+ expect(cluster_result.namespace_per_environment).to eq(false)
expect(platform_kubernetes.rbac?).to be_truthy
expect(platform_kubernetes.api_url).to eq(api_url)
expect(platform_kubernetes.token).to eq('sample-token')
@@ -237,6 +239,22 @@ RSpec.describe API::GroupClusters do
end
end
+ context 'when namespace_per_environment is not set' do
+ let(:cluster_params) do
+ {
+ name: 'test-cluster',
+ domain: 'domain.example.com',
+ platform_kubernetes_attributes: platform_kubernetes_attributes
+ }
+ end
+
+ it 'defaults to true' do
+ cluster_result = Clusters::Cluster.find(json_response['id'])
+
+ expect(cluster_result).to be_namespace_per_environment
+ end
+ end
+
context 'current user does not have access to management_project_id' do
let(:management_project_id) { create(:project).id }
diff --git a/spec/requests/api/group_container_repositories_spec.rb b/spec/requests/api/group_container_repositories_spec.rb
index 3128becae6d..4b97fad79dd 100644
--- a/spec/requests/api/group_container_repositories_spec.rb
+++ b/spec/requests/api/group_container_repositories_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe API::GroupContainerRepositories do
let(:object) { group }
end
- it_behaves_like 'a gitlab tracking event', described_class.name, 'list_repositories'
+ it_behaves_like 'a package tracking event', described_class.name, 'list_repositories'
context 'with invalid group id' do
let(:url) { "/groups/#{non_existing_record_id}/registry/repositories" }
diff --git a/spec/requests/api/group_packages_spec.rb b/spec/requests/api/group_packages_spec.rb
index f67cafbd8f5..72ba25c59af 100644
--- a/spec/requests/api/group_packages_spec.rb
+++ b/spec/requests/api/group_packages_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe API::GroupPackages do
it_behaves_like 'returns packages', :group, :owner
it_behaves_like 'returns packages', :group, :maintainer
it_behaves_like 'returns packages', :group, :developer
- it_behaves_like 'rejects packages access', :group, :reporter, :forbidden
+ it_behaves_like 'returns packages', :group, :reporter
it_behaves_like 'rejects packages access', :group, :guest, :forbidden
context 'with subgroup' do
@@ -88,7 +88,7 @@ RSpec.describe API::GroupPackages do
it_behaves_like 'returns packages with subgroups', :group, :owner
it_behaves_like 'returns packages with subgroups', :group, :maintainer
it_behaves_like 'returns packages with subgroups', :group, :developer
- it_behaves_like 'rejects packages access', :group, :reporter, :forbidden
+ it_behaves_like 'returns packages with subgroups', :group, :reporter
it_behaves_like 'rejects packages access', :group, :guest, :forbidden
context 'excluding subgroup' do
@@ -97,7 +97,7 @@ RSpec.describe API::GroupPackages do
it_behaves_like 'returns packages', :group, :owner
it_behaves_like 'returns packages', :group, :maintainer
it_behaves_like 'returns packages', :group, :developer
- it_behaves_like 'rejects packages access', :group, :reporter, :forbidden
+ it_behaves_like 'returns packages', :group, :reporter
it_behaves_like 'rejects packages access', :group, :guest, :forbidden
end
end
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index da423e986c3..c7756a4fae5 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -1391,6 +1391,139 @@ RSpec.describe API::Groups do
end
end
+ describe 'GET /groups/:id/descendant_groups' do
+ let_it_be(:child_group1) { create(:group, parent: group1) }
+ let_it_be(:private_child_group1) { create(:group, :private, parent: group1) }
+ let_it_be(:sub_child_group1) { create(:group, parent: child_group1) }
+ let_it_be(:child_group2) { create(:group, :private, parent: group2) }
+ let_it_be(:sub_child_group2) { create(:group, :private, parent: child_group2) }
+ let(:response_groups) { json_response.map { |group| group['name'] } }
+
+ context 'when unauthenticated' do
+ it 'returns only public descendants' do
+ get api("/groups/#{group1.id}/descendant_groups")
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(2)
+ expect(response_groups).to contain_exactly(child_group1.name, sub_child_group1.name)
+ end
+
+ it 'returns 404 for a private group' do
+ get api("/groups/#{group2.id}/descendant_groups")
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when authenticated as user' do
+ context 'when user is not member of a public group' do
+ it 'returns no descendants for the public group' do
+ get api("/groups/#{group1.id}/descendant_groups", user2)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(0)
+ end
+
+ context 'when using all_available in request' do
+ it 'returns public descendants' do
+ get api("/groups/#{group1.id}/descendant_groups", user2), params: { all_available: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(2)
+ expect(response_groups).to contain_exactly(child_group1.name, sub_child_group1.name)
+ end
+ end
+ end
+
+ context 'when user is not member of a private group' do
+ it 'returns 404 for the private group' do
+ get api("/groups/#{group2.id}/descendant_groups", user1)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when user is member of public group' do
+ before do
+ group1.add_guest(user2)
+ end
+
+ it 'returns private descendants' do
+ get api("/groups/#{group1.id}/descendant_groups", user2)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(3)
+ expect(response_groups).to contain_exactly(child_group1.name, sub_child_group1.name, private_child_group1.name)
+ end
+
+ context 'when using statistics in request' do
+ it 'does not include statistics' do
+ get api("/groups/#{group1.id}/descendant_groups", user2), params: { statistics: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(json_response.first).not_to include 'statistics'
+ end
+ end
+ end
+
+ context 'when user is member of private group' do
+ before do
+ group2.add_guest(user1)
+ end
+
+ it 'returns descendants' do
+ get api("/groups/#{group2.id}/descendant_groups", user1)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(2)
+ expect(response_groups).to contain_exactly(child_group2.name, sub_child_group2.name)
+ end
+ end
+ end
+
+ context 'when authenticated as admin' do
+ it 'returns private descendants of a public group' do
+ get api("/groups/#{group1.id}/descendant_groups", admin)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(3)
+ end
+
+ it 'returns descendants of a private group' do
+ get api("/groups/#{group2.id}/descendant_groups", admin)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(json_response.length).to eq(2)
+ end
+
+ it 'does not include statistics by default' do
+ get api("/groups/#{group1.id}/descendant_groups", admin)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(json_response.first).not_to include('statistics')
+ end
+
+ it 'includes statistics if requested' do
+ get api("/groups/#{group1.id}/descendant_groups", admin), params: { statistics: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(json_response.first).to include('statistics')
+ end
+ end
+ end
+
describe "POST /groups" do
it_behaves_like 'group avatar upload' do
def make_upload_request
diff --git a/spec/requests/api/helpers_spec.rb b/spec/requests/api/helpers_spec.rb
index 9c0ea14e3e3..91d10791541 100644
--- a/spec/requests/api/helpers_spec.rb
+++ b/spec/requests/api/helpers_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe API::Helpers do
include described_class
include TermsHelper
- let(:user) { create(:user) }
+ let_it_be(:user, reload: true) { create(:user) }
let(:admin) { create(:admin) }
let(:key) { create(:key, user: user) }
@@ -243,6 +243,67 @@ RSpec.describe API::Helpers do
end
end
end
+
+ describe "when authenticating using a job token" do
+ let_it_be(:job, reload: true) do
+ create(:ci_build, user: user, status: :running)
+ end
+
+ let(:route_authentication_setting) { {} }
+
+ before do
+ allow_any_instance_of(self.class).to receive(:route_authentication_setting)
+ .and_return(route_authentication_setting)
+ end
+
+ context 'when route is allowed to be authenticated' do
+ let(:route_authentication_setting) { { job_token_allowed: true } }
+
+ it "returns a 401 response for an invalid token" do
+ env[Gitlab::Auth::AuthFinders::JOB_TOKEN_HEADER] = 'invalid token'
+
+ expect { current_user }.to raise_error /401/
+ end
+
+ it "returns a 401 response for a job that's not running" do
+ job.update!(status: :success)
+ env[Gitlab::Auth::AuthFinders::JOB_TOKEN_HEADER] = job.token
+
+ expect { current_user }.to raise_error /401/
+ end
+
+ it "returns a 403 response for a user without access" do
+ env[Gitlab::Auth::AuthFinders::JOB_TOKEN_HEADER] = job.token
+ allow_any_instance_of(Gitlab::UserAccess).to receive(:allowed?).and_return(false)
+
+ expect { current_user }.to raise_error /403/
+ end
+
+ it 'returns a 403 response for a user who is blocked' do
+ user.block!
+ env[Gitlab::Auth::AuthFinders::JOB_TOKEN_HEADER] = job.token
+
+ expect { current_user }.to raise_error /403/
+ end
+
+ it "sets current_user" do
+ env[Gitlab::Auth::AuthFinders::JOB_TOKEN_HEADER] = job.token
+
+ expect(current_user).to eq(user)
+ end
+ end
+
+ context 'when route is not allowed to be authenticated' do
+ let(:route_authentication_setting) { { job_token_allowed: false } }
+
+ it "sets current_user to nil" do
+ env[Gitlab::Auth::AuthFinders::JOB_TOKEN_HEADER] = job.token
+ allow_any_instance_of(Gitlab::UserAccess).to receive(:allowed?).and_return(true)
+
+ expect(current_user).to be_nil
+ end
+ end
+ end
end
describe '.handle_api_exception' do
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index 4a0a7c81781..e13b492ecc8 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe API::Internal::Base do
+ include APIInternalBaseHelpers
+
let_it_be(:user, reload: true) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :repository, :wiki_repo) }
let_it_be(:personal_snippet) { create(:personal_snippet, :repository, author: user) }
@@ -1207,88 +1209,6 @@ RSpec.describe API::Internal::Base do
end
end
- def gl_repository_for(container)
- case container
- when ProjectWiki
- Gitlab::GlRepository::WIKI.identifier_for_container(container.project)
- when Project
- Gitlab::GlRepository::PROJECT.identifier_for_container(container)
- when Snippet
- Gitlab::GlRepository::SNIPPET.identifier_for_container(container)
- else
- nil
- end
- end
-
- def full_path_for(container)
- case container
- when PersonalSnippet
- "snippets/#{container.id}"
- when ProjectSnippet
- "#{container.project.full_path}/snippets/#{container.id}"
- else
- container.full_path
- end
- end
-
- def pull(key, container, protocol = 'ssh')
- post(
- api("/internal/allowed"),
- params: {
- key_id: key.id,
- project: full_path_for(container),
- gl_repository: gl_repository_for(container),
- action: 'git-upload-pack',
- secret_token: secret_token,
- protocol: protocol
- }
- )
- end
-
- def push(key, container, protocol = 'ssh', env: nil, changes: nil)
- push_with_path(key,
- full_path: full_path_for(container),
- gl_repository: gl_repository_for(container),
- protocol: protocol,
- env: env,
- changes: changes)
- end
-
- def push_with_path(key, full_path:, gl_repository: nil, protocol: 'ssh', env: nil, changes: nil)
- changes ||= 'd14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master'
-
- params = {
- changes: changes,
- key_id: key.id,
- project: full_path,
- action: 'git-receive-pack',
- secret_token: secret_token,
- protocol: protocol,
- env: env
- }
- params[:gl_repository] = gl_repository if gl_repository
-
- post(
- api("/internal/allowed"),
- params: params
- )
- end
-
- def archive(key, container)
- post(
- api("/internal/allowed"),
- params: {
- ref: 'master',
- key_id: key.id,
- project: full_path_for(container),
- gl_repository: gl_repository_for(container),
- action: 'git-upload-archive',
- secret_token: secret_token,
- protocol: 'ssh'
- }
- )
- end
-
def lfs_auth_project(project)
post(
api("/internal/lfs_authenticate"),
diff --git a/spec/requests/api/internal/lfs_spec.rb b/spec/requests/api/internal/lfs_spec.rb
new file mode 100644
index 00000000000..4739ec62992
--- /dev/null
+++ b/spec/requests/api/internal/lfs_spec.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Internal::Lfs do
+ include APIInternalBaseHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:lfs_object) { create(:lfs_object, :with_file) }
+ let_it_be(:lfs_objects_project) { create(:lfs_objects_project, project: project, lfs_object: lfs_object) }
+ let_it_be(:gl_repository) { "project-#{project.id}" }
+ let_it_be(:filename) { lfs_object.file.path }
+
+ let(:secret_token) { Gitlab::Shell.secret_token }
+
+ describe 'GET /internal/lfs' do
+ let(:valid_params) do
+ { oid: lfs_object.oid, gl_repository: gl_repository, secret_token: secret_token }
+ end
+
+ context 'with invalid auth' do
+ let(:invalid_params) { valid_params.merge!(secret_token: 'invalid_tokne') }
+
+ it 'returns 401' do
+ get api("/internal/lfs"), params: invalid_params
+ end
+ end
+
+ context 'with valid auth' do
+ context 'LFS in local storage' do
+ it 'sends the file' do
+ get api("/internal/lfs"), params: valid_params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Content-Type']).to eq('application/octet-stream')
+ expect(response.headers['Content-Length'].to_i).to eq(File.stat(filename).size)
+ expect(response.body).to eq(File.open(filename, 'rb', &:read))
+ end
+
+ # https://www.rubydoc.info/github/rack/rack/master/Rack/Sendfile
+ it 'delegates sending to Web server' do
+ get api("/internal/lfs"), params: valid_params, env: { 'HTTP_X_SENDFILE_TYPE' => 'X-Sendfile' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Content-Type']).to eq('application/octet-stream')
+ expect(response.headers['Content-Length'].to_i).to eq(0)
+ expect(response.headers['X-Sendfile']).to be_present
+ expect(response.body).to eq("")
+ end
+
+ it 'retuns 404 for unknown file' do
+ params = valid_params.merge(oid: SecureRandom.hex)
+
+ get api("/internal/lfs"), params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns 404 if LFS object does not belong to project' do
+ other_lfs = create(:lfs_object, :with_file)
+ params = valid_params.merge(oid: other_lfs.oid)
+
+ get api("/internal/lfs"), params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'LFS in object storage' do
+ let!(:lfs_object2) { create(:lfs_object, :with_file) }
+ let!(:lfs_objects_project2) { create(:lfs_objects_project, project: project, lfs_object: lfs_object2) }
+ let(:valid_params) do
+ { oid: lfs_object2.oid, gl_repository: gl_repository, secret_token: secret_token }
+ end
+
+ before do
+ stub_lfs_object_storage(enabled: true)
+ lfs_object2.file.migrate!(LfsObjectUploader::Store::REMOTE)
+ end
+
+ it 'notifies Workhorse to send the file' do
+ get api("/internal/lfs"), params: valid_params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:")
+ expect(response.headers['Content-Type']).to eq('application/octet-stream')
+ expect(response.headers['Content-Length'].to_i).to eq(0)
+ expect(response.body).to eq("")
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/lint_spec.rb b/spec/requests/api/lint_spec.rb
index 4c60c8bd2a3..6b5a4b6436a 100644
--- a/spec/requests/api/lint_spec.rb
+++ b/spec/requests/api/lint_spec.rb
@@ -17,23 +17,52 @@ RSpec.describe API::Lint do
expect(json_response['status']).to eq('valid')
expect(json_response['errors']).to eq([])
end
+
+ it 'outputs expanded yaml content' do
+ post api('/ci/lint'), params: { content: yaml_content, include_merged_yaml: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to have_key('merged_yaml')
+ end
end
context 'with an invalid .gitlab_ci.yml' do
- it 'responds with errors about invalid syntax' do
- post api('/ci/lint'), params: { content: 'invalid content' }
+ context 'with invalid syntax' do
+ let(:yaml_content) { 'invalid content' }
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['status']).to eq('invalid')
- expect(json_response['errors']).to eq(['Invalid configuration format'])
+ it 'responds with errors about invalid syntax' do
+ post api('/ci/lint'), params: { content: yaml_content }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['status']).to eq('invalid')
+ expect(json_response['errors']).to eq(['Invalid configuration format'])
+ end
+
+ it 'outputs expanded yaml content' do
+ post api('/ci/lint'), params: { content: yaml_content, include_merged_yaml: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to have_key('merged_yaml')
+ end
end
- it "responds with errors about invalid configuration" do
- post api('/ci/lint'), params: { content: '{ image: "ruby:2.7", services: ["postgres"] }' }
+ context 'with invalid configuration' do
+ let(:yaml_content) { '{ image: "ruby:2.7", services: ["postgres"] }' }
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['status']).to eq('invalid')
- expect(json_response['errors']).to eq(['jobs config should contain at least one visible job'])
+ it 'responds with errors about invalid configuration' do
+ post api('/ci/lint'), params: { content: yaml_content }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['status']).to eq('invalid')
+ expect(json_response['errors']).to eq(['jobs config should contain at least one visible job'])
+ end
+
+ it 'outputs expanded yaml content' do
+ post api('/ci/lint'), params: { content: yaml_content, include_merged_yaml: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to have_key('merged_yaml')
+ end
end
end
diff --git a/spec/requests/api/maven_packages_spec.rb b/spec/requests/api/maven_packages_spec.rb
index 0a23aed109b..a67bc157e5a 100644
--- a/spec/requests/api/maven_packages_spec.rb
+++ b/spec/requests/api/maven_packages_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe API::MavenPackages do
context 'with jar file' do
let_it_be(:package_file) { jar_file }
- it_behaves_like 'a gitlab tracking event', described_class.name, 'pull_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_package'
end
end
@@ -571,7 +571,7 @@ RSpec.describe API::MavenPackages do
context 'event tracking' do
subject { upload_file_with_token(params) }
- it_behaves_like 'a gitlab tracking event', described_class.name, 'push_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'push_package'
end
it 'creates package and stores package file' do
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 2757c56e0fe..0e5fa24ad66 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -1140,7 +1140,7 @@ RSpec.describe API::MergeRequests do
context 'when a merge request has more than the changes limit' do
it "returns a string indicating that more changes were made" do
- stub_const('Commit::DIFF_HARD_LIMIT_FILES', 5)
+ allow(Commit).to receive(:diff_hard_limit_files).and_return(5)
merge_request_overflow = create(:merge_request, :simple,
author: user,
diff --git a/spec/requests/api/npm_packages_spec.rb b/spec/requests/api/npm_packages_spec.rb
index 108ea84b7e6..8a3ccd7c6e3 100644
--- a/spec/requests/api/npm_packages_spec.rb
+++ b/spec/requests/api/npm_packages_spec.rb
@@ -88,12 +88,16 @@ RSpec.describe API::NpmPackages do
it_behaves_like 'returning the npm package info'
context 'with unknown package' do
+ subject { get api("/packages/npm/unknown") }
+
it 'returns a redirect' do
- get api("/packages/npm/unknown")
+ subject
expect(response).to have_gitlab_http_status(:found)
expect(response.headers['Location']).to eq('https://registry.npmjs.org/unknown')
end
+
+ it_behaves_like 'a gitlab tracking event', described_class.name, 'npm_request_forward'
end
end
@@ -193,7 +197,7 @@ RSpec.describe API::NpmPackages do
expect(response.media_type).to eq('application/octet-stream')
end
- it_behaves_like 'a gitlab tracking event', described_class.name, 'pull_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_package'
end
context 'private project' do
@@ -301,7 +305,7 @@ RSpec.describe API::NpmPackages do
context 'with access token' do
subject { upload_package_with_token(package_name, params) }
- it_behaves_like 'a gitlab tracking event', described_class.name, 'push_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'push_package'
it 'creates npm package with file' do
expect { subject }
diff --git a/spec/requests/api/project_clusters_spec.rb b/spec/requests/api/project_clusters_spec.rb
index ff35e380476..7b37862af74 100644
--- a/spec/requests/api/project_clusters_spec.rb
+++ b/spec/requests/api/project_clusters_spec.rb
@@ -171,6 +171,7 @@ RSpec.describe API::ProjectClusters do
name: 'test-cluster',
domain: 'domain.example.com',
managed: false,
+ namespace_per_environment: false,
platform_kubernetes_attributes: platform_kubernetes_attributes,
management_project_id: management_project_id
}
@@ -202,6 +203,7 @@ RSpec.describe API::ProjectClusters do
expect(cluster_result.domain).to eq('domain.example.com')
expect(cluster_result.managed).to be_falsy
expect(cluster_result.management_project_id).to eq management_project_id
+ expect(cluster_result.namespace_per_environment).to eq(false)
expect(platform_kubernetes.rbac?).to be_truthy
expect(platform_kubernetes.api_url).to eq(api_url)
expect(platform_kubernetes.namespace).to eq(namespace)
@@ -235,6 +237,22 @@ RSpec.describe API::ProjectClusters do
end
end
+ context 'when namespace_per_environment is not set' do
+ let(:cluster_params) do
+ {
+ name: 'test-cluster',
+ domain: 'domain.example.com',
+ platform_kubernetes_attributes: platform_kubernetes_attributes
+ }
+ end
+
+ it 'defaults to true' do
+ cluster_result = Clusters::Cluster.find(json_response['id'])
+
+ expect(cluster_result).to be_namespace_per_environment
+ end
+ end
+
context 'current user does not have access to management_project_id' do
let(:management_project_id) { create(:project).id }
diff --git a/spec/requests/api/project_container_repositories_spec.rb b/spec/requests/api/project_container_repositories_spec.rb
index 6cf0619cde4..a0c310a448b 100644
--- a/spec/requests/api/project_container_repositories_spec.rb
+++ b/spec/requests/api/project_container_repositories_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe API::ProjectContainerRepositories do
it_behaves_like 'rejected container repository access', :guest, :forbidden
it_behaves_like 'rejected container repository access', :anonymous, :not_found
- it_behaves_like 'a gitlab tracking event', described_class.name, 'list_repositories'
+ it_behaves_like 'a package tracking event', described_class.name, 'list_repositories'
it_behaves_like 'returns repositories for allowed users', :reporter, 'project' do
let(:object) { project }
@@ -57,7 +57,7 @@ RSpec.describe API::ProjectContainerRepositories do
it_behaves_like 'rejected container repository access', :developer, :forbidden
it_behaves_like 'rejected container repository access', :anonymous, :not_found
- it_behaves_like 'a gitlab tracking event', described_class.name, 'delete_repository'
+ it_behaves_like 'a package tracking event', described_class.name, 'delete_repository'
context 'for maintainer' do
let(:api_user) { maintainer }
@@ -86,7 +86,7 @@ RSpec.describe API::ProjectContainerRepositories do
stub_container_registry_tags(repository: root_repository.path, tags: %w(rootA latest))
end
- it_behaves_like 'a gitlab tracking event', described_class.name, 'list_tags'
+ it_behaves_like 'a package tracking event', described_class.name, 'list_tags'
it 'returns a list of tags' do
subject
@@ -114,7 +114,7 @@ RSpec.describe API::ProjectContainerRepositories do
it_behaves_like 'rejected container repository access', :developer, :forbidden
it_behaves_like 'rejected container repository access', :anonymous, :not_found
- it_behaves_like 'a gitlab tracking event', described_class.name, 'delete_tag_bulk'
+ it_behaves_like 'a package tracking event', described_class.name, 'delete_tag_bulk'
end
context 'for maintainer' do
diff --git a/spec/requests/api/project_packages_spec.rb b/spec/requests/api/project_packages_spec.rb
index 2f0d0fc87ec..4c8599d1a20 100644
--- a/spec/requests/api/project_packages_spec.rb
+++ b/spec/requests/api/project_packages_spec.rb
@@ -23,6 +23,19 @@ RSpec.describe API::ProjectPackages do
it_behaves_like 'returns packages', :project, :no_type
end
+ context 'with conan package' do
+ let!(:conan_package) { create(:conan_package, project: project) }
+
+ it 'uses the conan recipe as the package name' do
+ subject
+
+ response_conan_package = json_response.find { |package| package['id'] == conan_package.id }
+
+ expect(response_conan_package['name']).to eq(conan_package.conan_recipe)
+ expect(response_conan_package['conan_package_name']).to eq(conan_package.name)
+ end
+ end
+
context 'project is private' do
let(:project) { create(:project, :private) }
diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb
index 779ae983886..01ead9eef54 100644
--- a/spec/requests/api/releases_spec.rb
+++ b/spec/requests/api/releases_spec.rb
@@ -259,7 +259,7 @@ RSpec.describe API::Releases do
end
it '#collected_at' do
- Timecop.freeze(Time.now.round) do
+ travel_to(Time.now.round) do
get api("/projects/#{project.id}/releases/v0.1", maintainer)
expect(json_response['evidences'].first['collected_at'].to_datetime.to_i).to be_within(1.minute).of(release.evidences.first.created_at.to_i)
@@ -476,7 +476,7 @@ RSpec.describe API::Releases do
it 'sets the released_at to the current time if the released_at parameter is not provided' do
now = Time.zone.parse('2015-08-25 06:00:00Z')
- Timecop.freeze(now) do
+ travel_to(now) do
post api("/projects/#{project.id}/releases", maintainer), params: params
expect(project.releases.last.released_at).to eq(now)
diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb
index 36707f32d04..45bce8c8a5c 100644
--- a/spec/requests/api/repositories_spec.rb
+++ b/spec/requests/api/repositories_spec.rb
@@ -402,7 +402,9 @@ RSpec.describe API::Repositories do
end
it "returns an empty string when the diff overflows" do
- stub_const('Gitlab::Git::DiffCollection::DEFAULT_LIMITS', { max_files: 2, max_lines: 2 })
+ allow(Gitlab::Git::DiffCollection)
+ .to receive(:default_limits)
+ .and_return({ max_files: 2, max_lines: 2 })
get api(route, current_user), params: { from: 'master', to: 'feature' }
diff --git a/spec/requests/api/search_spec.rb b/spec/requests/api/search_spec.rb
index af6731f3015..523f0f72f11 100644
--- a/spec/requests/api/search_spec.rb
+++ b/spec/requests/api/search_spec.rb
@@ -231,18 +231,6 @@ RSpec.describe API::Search do
it_behaves_like 'pagination', scope: :users
it_behaves_like 'ping counters', scope: :users
-
- context 'when users search feature is disabled' do
- before do
- stub_feature_flags(users_search: false)
-
- get api(endpoint, user), params: { scope: 'users', search: 'billy' }
- end
-
- it 'returns 400 error' do
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
end
context 'for snippet_titles scope' do
@@ -416,18 +404,6 @@ RSpec.describe API::Search do
include_examples 'pagination', scope: :users
end
-
- context 'when users search feature is disabled' do
- before do
- stub_feature_flags(users_search: false)
-
- get api(endpoint, user), params: { scope: 'users', search: 'billy' }
- end
-
- it 'returns 400 error' do
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
end
context 'for users scope with group path as id' do
@@ -589,18 +565,6 @@ RSpec.describe API::Search do
include_examples 'pagination', scope: :users
end
-
- context 'when users search feature is disabled' do
- before do
- stub_feature_flags(users_search: false)
-
- get api(endpoint, user), params: { scope: 'users', search: 'billy' }
- end
-
- it 'returns 400 error' do
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
end
context 'for notes scope' do
diff --git a/spec/requests/api/services_spec.rb b/spec/requests/api/services_spec.rb
index 5528a0c094f..63ed57c5045 100644
--- a/spec/requests/api/services_spec.rb
+++ b/spec/requests/api/services_spec.rb
@@ -264,4 +264,34 @@ RSpec.describe API::Services do
expect(json_response['properties']['notify_only_broken_pipelines']).to eq(true)
end
end
+
+ describe 'Hangouts Chat service' do
+ let(:service_name) { 'hangouts-chat' }
+ let(:params) do
+ {
+ webhook: 'https://hook.example.com',
+ branches_to_be_notified: 'default'
+ }
+ end
+
+ before do
+ project.create_hangouts_chat_service(
+ active: true,
+ properties: params
+ )
+ end
+
+ it 'accepts branches_to_be_notified for update', :aggregate_failures do
+ put api("/projects/#{project.id}/services/#{service_name}", user), params: params.merge(branches_to_be_notified: 'all')
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['properties']['branches_to_be_notified']).to eq('all')
+ end
+
+ it 'only requires the webhook param' do
+ put api("/projects/#{project.id}/services/#{service_name}", user), params: { webhook: 'https://hook.example.com' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
end
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index ef12f6dbed3..b0face6ec41 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -413,6 +413,14 @@ RSpec.describe API::Settings, 'Settings' do
end
end
+ it 'supports legacy admin_notification_email' do
+ put api('/application/settings', admin),
+ params: { admin_notification_email: 'test@example.com' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['abuse_notification_email']).to eq('test@example.com')
+ end
+
context "missing sourcegraph_url value when sourcegraph_enabled is true" do
it "returns a blank parameter error message" do
put api("/application/settings", admin), params: { sourcegraph_enabled: true }
diff --git a/spec/requests/api/terraform/state_spec.rb b/spec/requests/api/terraform/state_spec.rb
index 8d128bd911f..aff41ff5974 100644
--- a/spec/requests/api/terraform/state_spec.rb
+++ b/spec/requests/api/terraform/state_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe API::Terraform::State do
let(:state_path) { "/projects/#{project_id}/terraform/state/#{state_name}" }
before do
- stub_terraform_state_object_storage(Terraform::StateUploader)
+ stub_terraform_state_object_storage
end
describe 'GET /projects/:id/terraform/state/:name' do
diff --git a/spec/requests/api/terraform/state_version_spec.rb b/spec/requests/api/terraform/state_version_spec.rb
new file mode 100644
index 00000000000..ade0aacf805
--- /dev/null
+++ b/spec/requests/api/terraform/state_version_spec.rb
@@ -0,0 +1,210 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Terraform::StateVersion do
+ include HttpBasicAuthHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user, developer_projects: [project]) }
+ let_it_be(:maintainer) { create(:user, maintainer_projects: [project]) }
+ let_it_be(:user_without_access) { create(:user) }
+
+ let_it_be(:state) { create(:terraform_state, project: project) }
+
+ let!(:versions) { create_list(:terraform_state_version, 3, terraform_state: state) }
+
+ let(:current_user) { maintainer }
+ let(:auth_header) { user_basic_auth_header(current_user) }
+ let(:project_id) { project.id }
+ let(:state_name) { state.name }
+ let(:version) { versions.last }
+ let(:version_serial) { version.version }
+ let(:state_version_path) { "/projects/#{project_id}/terraform/state/#{state_name}/versions/#{version_serial}" }
+
+ describe 'GET /projects/:id/terraform/state/:name/versions/:serial' do
+ subject(:request) { get api(state_version_path), headers: auth_header }
+
+ context 'with invalid authentication' do
+ let(:auth_header) { basic_auth_header('bad', 'token') }
+
+ it 'returns unauthorized status' do
+ request
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'with no authentication' do
+ let(:auth_header) { nil }
+
+ it 'returns unauthorized status' do
+ request
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'personal acceess token authentication' do
+ context 'with maintainer permissions' do
+ let(:current_user) { maintainer }
+
+ it 'returns the state contents at the given version' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to eq(version.file.read)
+ end
+
+ context 'for a project that does not exist' do
+ let(:project_id) { '0000' }
+
+ it 'returns not found status' do
+ request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'with developer permissions' do
+ let(:current_user) { developer }
+
+ it 'returns the state contents at the given version' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to eq(version.file.read)
+ end
+ end
+
+ context 'with no permissions' do
+ let(:current_user) { user_without_access }
+
+ it 'returns not found status' do
+ request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'job token authentication' do
+ let(:auth_header) { job_basic_auth_header(job) }
+
+ context 'with maintainer permissions' do
+ let(:job) { create(:ci_build, status: :running, project: project, user: maintainer) }
+
+ it 'returns the state contents at the given version' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to eq(version.file.read)
+ end
+
+ it 'returns unauthorized status if the the job is not running' do
+ job.update!(status: :failed)
+ request
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+
+ context 'for a project that does not exist' do
+ let(:project_id) { '0000' }
+
+ it 'returns not found status' do
+ request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'with developer permissions' do
+ let(:job) { create(:ci_build, status: :running, project: project, user: developer) }
+
+ it 'returns the state contents at the given version' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to eq(version.file.read)
+ end
+ end
+
+ context 'with no permissions' do
+ let(:current_user) { user_without_access }
+ let(:job) { create(:ci_build, status: :running, user: current_user) }
+
+ it 'returns not found status' do
+ request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
+
+ describe 'DELETE /projects/:id/terraform/state/:name/versions/:serial' do
+ subject(:request) { delete api(state_version_path), headers: auth_header }
+
+ context 'with invalid authentication' do
+ let(:auth_header) { basic_auth_header('bad', 'token') }
+
+ it 'returns unauthorized status' do
+ request
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'with no authentication' do
+ let(:auth_header) { nil }
+
+ it 'returns unauthorized status' do
+ request
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'with maintainer permissions' do
+ let(:current_user) { maintainer }
+
+ it 'deletes the version' do
+ expect { request }.to change { Terraform::StateVersion.count }.by(-1)
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+
+ context 'version does not exist' do
+ let(:version_serial) { -1 }
+
+ it 'does not delete a version' do
+ expect { request }.to change { Terraform::StateVersion.count }.by(0)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'with developer permissions' do
+ let(:current_user) { developer }
+
+ it 'returns forbidden status' do
+ expect { request }.to change { Terraform::StateVersion.count }.by(0)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'with no permissions' do
+ let(:current_user) { user_without_access }
+
+ it 'returns not found status' do
+ expect { request }.to change { Terraform::StateVersion.count }.by(0)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/unleash_spec.rb b/spec/requests/api/unleash_spec.rb
new file mode 100644
index 00000000000..0b70d62b093
--- /dev/null
+++ b/spec/requests/api/unleash_spec.rb
@@ -0,0 +1,608 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Unleash do
+ include FeatureFlagHelpers
+
+ let_it_be(:project, refind: true) { create(:project) }
+ let(:project_id) { project.id }
+ let(:params) { }
+ let(:headers) { }
+
+ shared_examples 'authenticated request' do
+ context 'when using instance id' do
+ let(:client) { create(:operations_feature_flags_client, project: project) }
+ let(:params) { { instance_id: client.token } }
+
+ it 'responds with OK' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ context 'when repository is disabled' do
+ before do
+ project.project_feature.update!(
+ repository_access_level: ::ProjectFeature::DISABLED,
+ merge_requests_access_level: ::ProjectFeature::DISABLED,
+ builds_access_level: ::ProjectFeature::DISABLED
+ )
+ end
+
+ it 'responds with forbidden' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when repository is private' do
+ before do
+ project.project_feature.update!(
+ repository_access_level: ::ProjectFeature::PRIVATE,
+ merge_requests_access_level: ::ProjectFeature::DISABLED,
+ builds_access_level: ::ProjectFeature::DISABLED
+ )
+ end
+
+ it 'responds with OK' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+
+ context 'when using header' do
+ let(:client) { create(:operations_feature_flags_client, project: project) }
+ let(:headers) { { "UNLEASH-INSTANCEID" => client.token }}
+
+ it 'responds with OK' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when using bogus instance id' do
+ let(:params) { { instance_id: 'token' } }
+
+ it 'responds with unauthorized' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'when using not existing project' do
+ let(:project_id) { -5000 }
+ let(:params) { { instance_id: 'token' } }
+
+ it 'responds with unauthorized' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+ end
+
+ shared_examples_for 'support multiple environments' do
+ let!(:client) { create(:operations_feature_flags_client, project: project) }
+ let!(:base_headers) { { "UNLEASH-INSTANCEID" => client.token } }
+ let!(:headers) { base_headers.merge({ "UNLEASH-APPNAME" => "test" }) }
+
+ let!(:feature_flag_1) do
+ create(:operations_feature_flag, name: "feature_flag_1", project: project, active: true)
+ end
+
+ let!(:feature_flag_2) do
+ create(:operations_feature_flag, name: "feature_flag_2", project: project, active: false)
+ end
+
+ before do
+ create_scope(feature_flag_1, 'production', false)
+ create_scope(feature_flag_2, 'review/*', true)
+ end
+
+ it 'does not have N+1 problem' do
+ control_count = ActiveRecord::QueryRecorder.new { get api(features_url), headers: headers }.count
+
+ create(:operations_feature_flag, name: "feature_flag_3", project: project, active: true)
+
+ expect { get api(features_url), headers: headers }.not_to exceed_query_limit(control_count)
+ end
+
+ context 'when app name is staging' do
+ let(:headers) { base_headers.merge({ "UNLEASH-APPNAME" => "staging" }) }
+
+ it 'returns correct active values' do
+ subject
+
+ feature_flag_1 = json_response['features'].find { |f| f['name'] == 'feature_flag_1' }
+ feature_flag_2 = json_response['features'].find { |f| f['name'] == 'feature_flag_2' }
+
+ expect(feature_flag_1['enabled']).to eq(true)
+ expect(feature_flag_2['enabled']).to eq(false)
+ end
+ end
+
+ context 'when app name is production' do
+ let(:headers) { base_headers.merge({ "UNLEASH-APPNAME" => "production" }) }
+
+ it 'returns correct active values' do
+ subject
+
+ feature_flag_1 = json_response['features'].find { |f| f['name'] == 'feature_flag_1' }
+ feature_flag_2 = json_response['features'].find { |f| f['name'] == 'feature_flag_2' }
+
+ expect(feature_flag_1['enabled']).to eq(false)
+ expect(feature_flag_2['enabled']).to eq(false)
+ end
+ end
+
+ context 'when app name is review/patch-1' do
+ let(:headers) { base_headers.merge({ "UNLEASH-APPNAME" => "review/patch-1" }) }
+
+ it 'returns correct active values' do
+ subject
+
+ feature_flag_1 = json_response['features'].find { |f| f['name'] == 'feature_flag_1' }
+ feature_flag_2 = json_response['features'].find { |f| f['name'] == 'feature_flag_2' }
+
+ expect(feature_flag_1['enabled']).to eq(true)
+ expect(feature_flag_2['enabled']).to eq(false)
+ end
+ end
+
+ context 'when app name is empty' do
+ let(:headers) { base_headers }
+
+ it 'returns empty list' do
+ subject
+
+ expect(json_response['features'].count).to eq(0)
+ end
+ end
+ end
+
+ %w(/feature_flags/unleash/:project_id/features /feature_flags/unleash/:project_id/client/features).each do |features_endpoint|
+ describe "GET #{features_endpoint}" do
+ let(:features_url) { features_endpoint.sub(':project_id', project_id.to_s) }
+ let(:client) { create(:operations_feature_flags_client, project: project) }
+
+ subject { get api(features_url), params: params, headers: headers }
+
+ it_behaves_like 'authenticated request'
+
+ context 'with version 1 (legacy) feature flags' do
+ let(:feature_flag) { create(:operations_feature_flag, project: project, name: 'feature1', active: true, version: 1) }
+
+ it_behaves_like 'support multiple environments'
+
+ context 'with a list of feature flags' do
+ let(:headers) { { "UNLEASH-INSTANCEID" => client.token, "UNLEASH-APPNAME" => "production" } }
+ let!(:enabled_feature_flag) { create(:operations_feature_flag, project: project, name: 'feature1', active: true, version: 1) }
+ let!(:disabled_feature_flag) { create(:operations_feature_flag, project: project, name: 'feature2', active: false, version: 1) }
+
+ it 'responds with a list of features' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['version']).to eq(1)
+ expect(json_response['features']).not_to be_empty
+ expect(json_response['features'].map { |f| f['name'] }.sort).to eq(%w[feature1 feature2])
+ expect(json_response['features'].sort_by {|f| f['name'] }.map { |f| f['enabled'] }).to eq([true, false])
+ end
+
+ it 'matches json schema' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('unleash/unleash')
+ end
+ end
+
+ it 'returns a feature flag strategy' do
+ create(:operations_feature_flag_scope,
+ feature_flag: feature_flag,
+ environment_scope: 'sandbox',
+ active: true,
+ strategies: [{ name: "gradualRolloutUserId",
+ parameters: { groupId: "default", percentage: "50" } }])
+ headers = { "UNLEASH-INSTANCEID" => client.token, "UNLEASH-APPNAME" => "sandbox" }
+
+ get api(features_url), headers: headers
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features'].first['enabled']).to eq(true)
+ strategies = json_response['features'].first['strategies']
+ expect(strategies).to eq([{
+ "name" => "gradualRolloutUserId",
+ "parameters" => {
+ "percentage" => "50",
+ "groupId" => "default"
+ }
+ }])
+ end
+
+ it 'returns a default strategy for a scope' do
+ create(:operations_feature_flag_scope, feature_flag: feature_flag, environment_scope: 'sandbox', active: true)
+ headers = { "UNLEASH-INSTANCEID" => client.token, "UNLEASH-APPNAME" => "sandbox" }
+
+ get api(features_url), headers: headers
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features'].first['enabled']).to eq(true)
+ strategies = json_response['features'].first['strategies']
+ expect(strategies).to eq([{ "name" => "default", "parameters" => {} }])
+ end
+
+ it 'returns multiple strategies for a feature flag' do
+ create(:operations_feature_flag_scope,
+ feature_flag: feature_flag,
+ environment_scope: 'staging',
+ active: true,
+ strategies: [{ name: "userWithId", parameters: { userIds: "max,fred" } },
+ { name: "gradualRolloutUserId",
+ parameters: { groupId: "default", percentage: "50" } }])
+ headers = { "UNLEASH-INSTANCEID" => client.token, "UNLEASH-APPNAME" => "staging" }
+
+ get api(features_url), headers: headers
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features'].first['enabled']).to eq(true)
+ strategies = json_response['features'].first['strategies'].sort_by { |s| s['name'] }
+ expect(strategies).to eq([{
+ "name" => "gradualRolloutUserId",
+ "parameters" => {
+ "percentage" => "50",
+ "groupId" => "default"
+ }
+ }, {
+ "name" => "userWithId",
+ "parameters" => {
+ "userIds" => "max,fred"
+ }
+ }])
+ end
+
+ it 'returns a disabled feature when the flag is disabled' do
+ flag = create(:operations_feature_flag, project: project, name: 'test_feature', active: false, version: 1)
+ create(:operations_feature_flag_scope, feature_flag: flag, environment_scope: 'production', active: true)
+ headers = { "UNLEASH-INSTANCEID" => client.token, "UNLEASH-APPNAME" => "production" }
+
+ get api(features_url), headers: headers
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features'].first['enabled']).to eq(false)
+ end
+
+ context "with an inactive scope" do
+ let!(:scope) { create(:operations_feature_flag_scope, feature_flag: feature_flag, environment_scope: 'production', active: false, strategies: [{ name: "default", parameters: {} }]) }
+ let(:headers) { { "UNLEASH-INSTANCEID" => client.token, "UNLEASH-APPNAME" => "production" } }
+
+ it 'returns a disabled feature' do
+ get api(features_url), headers: headers
+
+ expect(response).to have_gitlab_http_status(:ok)
+ feature_json = json_response['features'].first
+ expect(feature_json['enabled']).to eq(false)
+ expect(feature_json['strategies']).to eq([{ 'name' => 'default', 'parameters' => {} }])
+ end
+ end
+ end
+
+ context 'with version 2 feature flags' do
+ it 'does not return a flag without any strategies' do
+ create(:operations_feature_flag, project: project,
+ name: 'feature1', active: true, version: 2)
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'production' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features']).to be_empty
+ end
+
+ it 'returns a flag with a default strategy' do
+ feature_flag = create(:operations_feature_flag, project: project,
+ name: 'feature1', active: true, version: 2)
+ strategy = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'default', parameters: {})
+ create(:operations_scope, strategy: strategy, environment_scope: 'production')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'production' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features']).to eq([{
+ 'name' => 'feature1',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'default',
+ 'parameters' => {}
+ }]
+ }])
+ end
+
+ it 'returns a flag with a userWithId strategy' do
+ feature_flag = create(:operations_feature_flag, project: project,
+ name: 'feature1', active: true, version: 2)
+ strategy = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'userWithId', parameters: { userIds: 'user123,user456' })
+ create(:operations_scope, strategy: strategy, environment_scope: 'production')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'production' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features']).to eq([{
+ 'name' => 'feature1',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'userWithId',
+ 'parameters' => { 'userIds' => 'user123,user456' }
+ }]
+ }])
+ end
+
+ it 'returns a flag with multiple strategies' do
+ feature_flag = create(:operations_feature_flag, project: project,
+ name: 'feature1', active: true, version: 2)
+ strategy_a = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'userWithId', parameters: { userIds: 'user_a,user_b' })
+ strategy_b = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'gradualRolloutUserId', parameters: { groupId: 'default', percentage: '45' })
+ create(:operations_scope, strategy: strategy_a, environment_scope: 'production')
+ create(:operations_scope, strategy: strategy_b, environment_scope: 'production')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'production' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features'].map { |f| f['name'] }.sort).to eq(['feature1'])
+ features_json = json_response['features'].map do |feature|
+ feature.merge(feature.slice('strategies').transform_values { |v| v.sort_by { |s| s['name'] } })
+ end
+ expect(features_json).to eq([{
+ 'name' => 'feature1',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'gradualRolloutUserId',
+ 'parameters' => { 'groupId' => 'default', 'percentage' => '45' }
+ }, {
+ 'name' => 'userWithId',
+ 'parameters' => { 'userIds' => 'user_a,user_b' }
+ }]
+ }])
+ end
+
+ it 'returns only flags matching the environment scope' do
+ feature_flag_a = create(:operations_feature_flag, project: project,
+ name: 'feature1', active: true, version: 2)
+ strategy_a = create(:operations_strategy, feature_flag: feature_flag_a)
+ create(:operations_scope, strategy: strategy_a, environment_scope: 'production')
+ feature_flag_b = create(:operations_feature_flag, project: project,
+ name: 'feature2', active: true, version: 2)
+ strategy_b = create(:operations_strategy, feature_flag: feature_flag_b)
+ create(:operations_scope, strategy: strategy_b, environment_scope: 'staging')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'staging' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features'].map { |f| f['name'] }.sort).to eq(['feature2'])
+ expect(json_response['features']).to eq([{
+ 'name' => 'feature2',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'default',
+ 'parameters' => {}
+ }]
+ }])
+ end
+
+ it 'returns only strategies matching the environment scope' do
+ feature_flag = create(:operations_feature_flag, project: project,
+ name: 'feature1', active: true, version: 2)
+ strategy_a = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'userWithId', parameters: { userIds: 'user2,user8,user4' })
+ create(:operations_scope, strategy: strategy_a, environment_scope: 'production')
+ strategy_b = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'default', parameters: {})
+ create(:operations_scope, strategy: strategy_b, environment_scope: 'staging')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'production' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features']).to eq([{
+ 'name' => 'feature1',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'userWithId',
+ 'parameters' => { 'userIds' => 'user2,user8,user4' }
+ }]
+ }])
+ end
+
+ it 'returns only flags for the given project' do
+ project_b = create(:project)
+ feature_flag_a = create(:operations_feature_flag, project: project, name: 'feature_a', active: true, version: 2)
+ strategy_a = create(:operations_strategy, feature_flag: feature_flag_a)
+ create(:operations_scope, strategy: strategy_a, environment_scope: 'sandbox')
+ feature_flag_b = create(:operations_feature_flag, project: project_b, name: 'feature_b', active: true, version: 2)
+ strategy_b = create(:operations_strategy, feature_flag: feature_flag_b)
+ create(:operations_scope, strategy: strategy_b, environment_scope: 'sandbox')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'sandbox' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features']).to eq([{
+ 'name' => 'feature_a',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'default',
+ 'parameters' => {}
+ }]
+ }])
+ end
+
+ it 'returns all strategies with a matching scope' do
+ feature_flag = create(:operations_feature_flag, project: project,
+ name: 'feature1', active: true, version: 2)
+ strategy_a = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'userWithId', parameters: { userIds: 'user2,user8,user4' })
+ create(:operations_scope, strategy: strategy_a, environment_scope: '*')
+ strategy_b = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'default', parameters: {})
+ create(:operations_scope, strategy: strategy_b, environment_scope: 'review/*')
+ strategy_c = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'gradualRolloutUserId', parameters: { groupId: 'default', percentage: '15' })
+ create(:operations_scope, strategy: strategy_c, environment_scope: 'review/patch-1')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'review/patch-1' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features'].first['strategies'].sort_by { |s| s['name'] }).to eq([{
+ 'name' => 'default',
+ 'parameters' => {}
+ }, {
+ 'name' => 'gradualRolloutUserId',
+ 'parameters' => { 'groupId' => 'default', 'percentage' => '15' }
+ }, {
+ 'name' => 'userWithId',
+ 'parameters' => { 'userIds' => 'user2,user8,user4' }
+ }])
+ end
+
+ it 'returns a strategy with more than one matching scope' do
+ feature_flag = create(:operations_feature_flag, project: project,
+ name: 'feature1', active: true, version: 2)
+ strategy = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'default', parameters: {})
+ create(:operations_scope, strategy: strategy, environment_scope: 'production')
+ create(:operations_scope, strategy: strategy, environment_scope: '*')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'production' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features']).to eq([{
+ 'name' => 'feature1',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'default',
+ 'parameters' => {}
+ }]
+ }])
+ end
+
+ it 'returns a disabled flag with a matching scope' do
+ feature_flag = create(:operations_feature_flag, project: project,
+ name: 'myfeature', active: false, version: 2)
+ strategy = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'default', parameters: {})
+ create(:operations_scope, strategy: strategy, environment_scope: 'production')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'production' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features']).to eq([{
+ 'name' => 'myfeature',
+ 'enabled' => false,
+ 'strategies' => [{
+ 'name' => 'default',
+ 'parameters' => {}
+ }]
+ }])
+ end
+
+ it 'returns a userWithId strategy for a gitlabUserList strategy' do
+ feature_flag = create(:operations_feature_flag, :new_version_flag, project: project,
+ name: 'myfeature', active: true)
+ user_list = create(:operations_feature_flag_user_list, project: project,
+ name: 'My List', user_xids: 'user1,user2')
+ strategy = create(:operations_strategy, feature_flag: feature_flag,
+ name: 'gitlabUserList', parameters: {}, user_list: user_list)
+ create(:operations_scope, strategy: strategy, environment_scope: 'production')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'production' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features']).to eq([{
+ 'name' => 'myfeature',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'userWithId',
+ 'parameters' => { 'userIds' => 'user1,user2' }
+ }]
+ }])
+ end
+ end
+
+ context 'when mixing version 1 and version 2 feature flags' do
+ it 'returns both types of flags when both match' do
+ feature_flag_a = create(:operations_feature_flag, project: project,
+ name: 'feature_a', active: true, version: 2)
+ strategy = create(:operations_strategy, feature_flag: feature_flag_a,
+ name: 'userWithId', parameters: { userIds: 'user8' })
+ create(:operations_scope, strategy: strategy, environment_scope: 'staging')
+ feature_flag_b = create(:operations_feature_flag, project: project,
+ name: 'feature_b', active: true, version: 1)
+ create(:operations_feature_flag_scope, feature_flag: feature_flag_b,
+ active: true, strategies: [{ name: 'default', parameters: {} }], environment_scope: 'staging')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'staging' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features'].sort_by {|f| f['name']}).to eq([{
+ 'name' => 'feature_a',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'userWithId',
+ 'parameters' => { 'userIds' => 'user8' }
+ }]
+ }, {
+ 'name' => 'feature_b',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'default',
+ 'parameters' => {}
+ }]
+ }])
+ end
+
+ it 'returns legacy flags when only legacy flags match' do
+ feature_flag_a = create(:operations_feature_flag, project: project,
+ name: 'feature_a', active: true, version: 2)
+ strategy = create(:operations_strategy, feature_flag: feature_flag_a,
+ name: 'userWithId', parameters: { userIds: 'user8' })
+ create(:operations_scope, strategy: strategy, environment_scope: 'production')
+ feature_flag_b = create(:operations_feature_flag, project: project,
+ name: 'feature_b', active: true, version: 1)
+ create(:operations_feature_flag_scope, feature_flag: feature_flag_b,
+ active: true, strategies: [{ name: 'default', parameters: {} }], environment_scope: 'staging')
+
+ get api(features_url), headers: { 'UNLEASH-INSTANCEID' => client.token, 'UNLEASH-APPNAME' => 'staging' }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['features']).to eq([{
+ 'name' => 'feature_b',
+ 'enabled' => true,
+ 'strategies' => [{
+ 'name' => 'default',
+ 'parameters' => {}
+ }]
+ }])
+ end
+ end
+ end
+ end
+
+ describe 'POST /feature_flags/unleash/:project_id/client/register' do
+ subject { post api("/feature_flags/unleash/#{project_id}/client/register"), params: params, headers: headers }
+
+ it_behaves_like 'authenticated request'
+ end
+
+ describe 'POST /feature_flags/unleash/:project_id/client/metrics' do
+ subject { post api("/feature_flags/unleash/#{project_id}/client/metrics"), params: params, headers: headers }
+
+ it_behaves_like 'authenticated request'
+ end
+end
diff --git a/spec/requests/api/usage_data_spec.rb b/spec/requests/api/usage_data_spec.rb
index 46dd54dcc73..4f4f386e9db 100644
--- a/spec/requests/api/usage_data_spec.rb
+++ b/spec/requests/api/usage_data_spec.rb
@@ -66,6 +66,10 @@ RSpec.describe API::UsageData do
end
context 'with unknown event' do
+ before do
+ skip_feature_flags_yaml_validation
+ end
+
it 'returns status ok' do
expect(Gitlab::Redis::HLL).not_to receive(:add)
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 806b586ef49..72dd22038c9 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -1460,39 +1460,22 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
end
describe 'GET /user/:id/gpg_keys' do
- context 'when unauthenticated' do
- it 'returns authentication error' do
- get api("/users/#{user.id}/gpg_keys")
+ it 'returns 404 for non-existing user' do
+ get api('/users/0/gpg_keys')
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 User Not Found')
end
- context 'when authenticated' do
- it 'returns 404 for non-existing user' do
- get api('/users/0/gpg_keys', admin)
-
- expect(response).to have_gitlab_http_status(:not_found)
- expect(json_response['message']).to eq('404 User Not Found')
- end
-
- it 'returns 404 error if key not foud' do
- delete api("/users/#{user.id}/gpg_keys/#{non_existing_record_id}", admin)
-
- expect(response).to have_gitlab_http_status(:not_found)
- expect(json_response['message']).to eq('404 GPG Key Not Found')
- end
-
- it 'returns array of GPG keys' do
- user.gpg_keys << gpg_key
+ it 'returns array of GPG keys' do
+ user.gpg_keys << gpg_key
- get api("/users/#{user.id}/gpg_keys", admin)
+ get api("/users/#{user.id}/gpg_keys")
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.first['key']).to eq(gpg_key.key)
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.first['key']).to eq(gpg_key.key)
end
end
@@ -2308,23 +2291,31 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
end
describe 'POST /users/:id/activate' do
+ subject(:activate) { post api("/users/#{user_id}/activate", api_user) }
+
+ let(:user_id) { user.id }
+
context 'performed by a non-admin user' do
+ let(:api_user) { user }
+
it 'is not authorized to perform the action' do
- post api("/users/#{user.id}/activate", user)
+ activate
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'performed by an admin user' do
+ let(:api_user) { admin }
+
context 'for a deactivated user' do
before do
user.deactivate
-
- post api("/users/#{user.id}/activate", admin)
end
it 'activates a deactivated user' do
+ activate
+
expect(response).to have_gitlab_http_status(:created)
expect(user.reload.state).to eq('active')
end
@@ -2333,11 +2324,11 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
context 'for an active user' do
before do
user.activate
-
- post api("/users/#{user.id}/activate", admin)
end
it 'returns 201' do
+ activate
+
expect(response).to have_gitlab_http_status(:created)
expect(user.reload.state).to eq('active')
end
@@ -2346,11 +2337,11 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
context 'for a blocked user' do
before do
user.block
-
- post api("/users/#{user.id}/activate", admin)
end
it 'returns 403' do
+ activate
+
expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden - A blocked user must be unblocked to be activated')
expect(user.reload.state).to eq('blocked')
@@ -2360,11 +2351,11 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
context 'for a ldap blocked user' do
before do
user.ldap_block
-
- post api("/users/#{user.id}/activate", admin)
end
it 'returns 403' do
+ activate
+
expect(response).to have_gitlab_http_status(:forbidden)
expect(json_response['message']).to eq('403 Forbidden - A blocked user must be unblocked to be activated')
expect(user.reload.state).to eq('ldap_blocked')
@@ -2372,8 +2363,10 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
end
context 'for a user that does not exist' do
+ let(:user_id) { 0 }
+
before do
- post api("/users/0/activate", admin)
+ activate
end
it_behaves_like '404'
diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb
index dbba2b35d74..a3bfa7ea33c 100644
--- a/spec/requests/git_http_spec.rb
+++ b/spec/requests/git_http_spec.rb
@@ -90,7 +90,7 @@ RSpec.describe 'Git HTTP requests' do
shared_examples_for 'pulls are allowed' do
it 'allows pulls' do
- download(path, env) do |response|
+ download(path, **env) do |response|
expect(response).to have_gitlab_http_status(:ok)
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
@@ -99,7 +99,7 @@ RSpec.describe 'Git HTTP requests' do
shared_examples_for 'pushes are allowed' do
it 'allows pushes', :sidekiq_might_not_need_inline do
- upload(path, env) do |response|
+ upload(path, **env) do |response|
expect(response).to have_gitlab_http_status(:ok)
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
@@ -259,7 +259,7 @@ RSpec.describe 'Git HTTP requests' do
it_behaves_like 'pulls are allowed'
it 'rejects pushes with 403 Forbidden' do
- upload(path, env) do |response|
+ upload(path, **env) do |response|
expect(response).to have_gitlab_http_status(:forbidden)
expect(response.body).to eq(git_access_wiki_error(:write_to_wiki))
end
@@ -347,7 +347,7 @@ RSpec.describe 'Git HTTP requests' do
end
it 'rejects pushes with 403 Forbidden' do
- upload(path, env) do |response|
+ upload(path, **env) do |response|
expect(response).to have_gitlab_http_status(:forbidden)
expect(response.body).to eq(git_access_error(:receive_pack_disabled_over_http))
end
@@ -358,7 +358,7 @@ RSpec.describe 'Git HTTP requests' do
it "rejects pushes with 403 Forbidden" do
allow(Gitlab.config.gitlab_shell).to receive(:upload_pack).and_return(false)
- download(path, env) do |response|
+ download(path, **env) do |response|
expect(response).to have_gitlab_http_status(:forbidden)
expect(response.body).to eq(git_access_error(:upload_pack_disabled_over_http))
end
@@ -370,7 +370,7 @@ RSpec.describe 'Git HTTP requests' do
it_behaves_like 'pulls are allowed'
it 'rejects pushes with 403 Forbidden' do
- upload(path, env) do |response|
+ upload(path, **env) do |response|
expect(response).to have_gitlab_http_status(:forbidden)
expect(response.body).to eq('You are not allowed to push code to this project.')
end
@@ -485,7 +485,7 @@ RSpec.describe 'Git HTTP requests' do
user.block
project.add_maintainer(user)
- download(path, env) do |response|
+ download(path, **env) do |response|
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
@@ -507,7 +507,7 @@ RSpec.describe 'Git HTTP requests' do
it "resets the IP in Rack Attack on download" do
expect(Rack::Attack::Allow2Ban).to receive(:reset).twice
- download(path, env) do
+ download(path, **env) do
expect(response).to have_gitlab_http_status(:ok)
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
@@ -516,7 +516,7 @@ RSpec.describe 'Git HTTP requests' do
it "resets the IP in Rack Attack on upload" do
expect(Rack::Attack::Allow2Ban).to receive(:reset).twice
- upload(path, env) do
+ upload(path, **env) do
expect(response).to have_gitlab_http_status(:ok)
expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
end
@@ -525,7 +525,7 @@ RSpec.describe 'Git HTTP requests' do
it 'updates the user last activity', :clean_gitlab_redis_shared_state do
expect(user.last_activity_on).to be_nil
- download(path, env) do |response|
+ download(path, **env) do |response|
expect(user.reload.last_activity_on).to eql(Date.today)
end
end
@@ -699,7 +699,7 @@ RSpec.describe 'Git HTTP requests' do
end
it 'uploads get status 404 with "project was moved" message' do
- upload(path, env) do |response|
+ upload(path, **env) do |response|
expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -917,11 +917,11 @@ RSpec.describe 'Git HTTP requests' do
expect(response).to have_gitlab_http_status(:forbidden)
end
- download(path, env) do |response|
+ download(path, **env) do |response|
expect(response).to have_gitlab_http_status(:forbidden)
end
- upload(path, env) do |response|
+ upload(path, **env) do |response|
expect(response).to have_gitlab_http_status(:forbidden)
end
end
diff --git a/spec/requests/projects/cycle_analytics_events_spec.rb b/spec/requests/projects/cycle_analytics_events_spec.rb
index 4338bfa3759..3f57b8ba67b 100644
--- a/spec/requests/projects/cycle_analytics_events_spec.rb
+++ b/spec/requests/projects/cycle_analytics_events_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'value stream analytics events' do
project.add_developer(user)
3.times do |count|
- Timecop.freeze(Time.now + count.days) do
+ travel_to(Time.now + count.days) do
create_cycle
end
end
diff --git a/spec/requests/request_profiler_spec.rb b/spec/requests/request_profiler_spec.rb
index 7f9999bf3d2..72689595480 100644
--- a/spec/requests/request_profiler_spec.rb
+++ b/spec/requests/request_profiler_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe 'Request Profiler' do
time = Time.now
path = "/#{project.full_path}"
- Timecop.freeze(time) do
+ travel_to(time) do
get path, params: {}, headers: { 'X-Profile-Token' => Gitlab::RequestProfiler.profile_token, 'X-Profile-Mode' => profile_type }
end
diff --git a/spec/routing/admin_routing_spec.rb b/spec/routing/admin_routing_spec.rb
index fedafff0d1b..9374df0c4a2 100644
--- a/spec/routing/admin_routing_spec.rb
+++ b/spec/routing/admin_routing_spec.rb
@@ -184,3 +184,9 @@ RSpec.describe Admin::PlanLimitsController, "routing" do
expect(post("/admin/plan_limits")).to route_to('admin/plan_limits#create')
end
end
+
+RSpec.describe Admin::RunnersController, "routing" do
+ it "to #runner_setup_scripts" do
+ expect(get("/admin/runners/runner_setup_scripts")).to route_to('admin/runners#runner_setup_scripts')
+ end
+end
diff --git a/spec/routing/group_routing_spec.rb b/spec/routing/group_routing_spec.rb
index f4d5f899519..9de99b73d23 100644
--- a/spec/routing/group_routing_spec.rb
+++ b/spec/routing/group_routing_spec.rb
@@ -43,6 +43,10 @@ RSpec.shared_examples 'groups routing' do
expect(get("/groups/#{group_path}/-/milestones")).to route_to('groups/milestones#index', group_id: group_path)
end
+ it "to #runner_setup_scripts" do
+ expect(get("/groups/#{group_path}/-/settings/ci_cd/runner_setup_scripts")).to route_to('groups/settings/ci_cd#runner_setup_scripts', group_id: group_path)
+ end
+
it 'routes to the avatars controller' do
expect(delete("/groups/#{group_path}/-/avatar"))
.to route_to(group_id: group_path,
diff --git a/spec/routing/instance_statistics_routing_spec.rb b/spec/routing/instance_statistics_routing_spec.rb
deleted file mode 100644
index 7eec807fb0b..00000000000
--- a/spec/routing/instance_statistics_routing_spec.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Instance Statistics', 'routing' do
- include RSpec::Rails::RequestExampleGroup
-
- it "routes '/-/instance_statistics' to dev ops report" do
- expect(get('/-/instance_statistics')).to redirect_to('/admin/dev_ops_report')
- end
-end
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index b80baf0aa13..b04bae3e224 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -731,6 +731,12 @@ RSpec.describe 'project routing' do
end
end
+ describe Projects::Settings::CiCdController, 'routing' do
+ it "to #runner_setup_scripts" do
+ expect(get("/gitlab/gitlabhq/-/settings/ci_cd/runner_setup_scripts")).to route_to('projects/settings/ci_cd#runner_setup_scripts', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ end
+ end
+
describe Projects::TemplatesController, 'routing' do
describe '#show' do
def show_with_template_type(template_type)
diff --git a/spec/routing/routing_spec.rb b/spec/routing/routing_spec.rb
index 722e687838f..85a4c0e4bca 100644
--- a/spec/routing/routing_spec.rb
+++ b/spec/routing/routing_spec.rb
@@ -374,3 +374,9 @@ RSpec.describe Snippets::BlobsController, "routing" do
.to route_to('snippets/blobs#raw', snippet_id: '1', ref: 'master', path: 'lib/version.rb')
end
end
+
+RSpec.describe RunnerSetupController, 'routing' do
+ it 'to #platforms' do
+ expect(get("/-/runner_setup/platforms")).to route_to('runner_setup#platforms')
+ end
+end
diff --git a/spec/rubocop/cop/code_reuse/active_record_spec.rb b/spec/rubocop/cop/code_reuse/active_record_spec.rb
index 25eca185f26..e15b9e11aed 100644
--- a/spec/rubocop/cop/code_reuse/active_record_spec.rb
+++ b/spec/rubocop/cop/code_reuse/active_record_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe RuboCop::Cop::CodeReuse::ActiveRecord, type: :rubocop do
SOURCE
end
- it 'autocorrects offenses in instance methods by whitelisting them' do
+ it 'autocorrects offenses in instance methods by allowing them' do
corrected = autocorrect_source(<<~SOURCE)
def foo
User.where
@@ -100,7 +100,7 @@ RSpec.describe RuboCop::Cop::CodeReuse::ActiveRecord, type: :rubocop do
SOURCE
end
- it 'autocorrects offenses in class methods by whitelisting them' do
+ it 'autocorrects offenses in class methods by allowing them' do
corrected = autocorrect_source(<<~SOURCE)
def self.foo
User.where
@@ -116,7 +116,7 @@ RSpec.describe RuboCop::Cop::CodeReuse::ActiveRecord, type: :rubocop do
SOURCE
end
- it 'autocorrects offenses in blocks by whitelisting them' do
+ it 'autocorrects offenses in blocks by allowing them' do
corrected = autocorrect_source(<<~SOURCE)
get '/' do
User.where
diff --git a/spec/serializers/ci/trigger_entity_spec.rb b/spec/serializers/ci/trigger_entity_spec.rb
new file mode 100644
index 00000000000..b2f3337d166
--- /dev/null
+++ b/spec/serializers/ci/trigger_entity_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::TriggerEntity do
+ let(:project) { create(:project) }
+ let(:trigger) { create(:ci_trigger, project: project, token: '237f3604900a4cd71ed06ef13e57b96d') }
+ let(:user) { create(:user) }
+ let(:entity) { described_class.new(trigger, current_user: user, project: project) }
+
+ describe '#as_json' do
+ let(:as_json) { entity.as_json }
+ let(:project_trigger_path) { "/#{project.full_path}/-/triggers/#{trigger.id}" }
+
+ it 'contains required fields' do
+ expect(as_json).to include(
+ :description, :owner, :last_used, :token, :has_token_exposed, :can_access_project
+ )
+ end
+
+ it 'contains user fields' do
+ expect(as_json[:owner].to_json).to match_schema('entities/user')
+ end
+
+ context 'when current user can manage triggers' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'returns short_token as token' do
+ expect(as_json[:token]).to eq(trigger.short_token)
+ end
+
+ it 'contains project_trigger_path' do
+ expect(as_json[:project_trigger_path]).to eq(project_trigger_path)
+ end
+
+ it 'does not contain edit_project_trigger_path' do
+ expect(as_json).not_to include(:edit_project_trigger_path)
+ end
+
+ it 'returns has_token_exposed' do
+ expect(as_json[:has_token_exposed]).to eq(false)
+ end
+ end
+
+ context 'when current user is the owner of the trigger' do
+ before do
+ project.add_maintainer(user)
+ trigger.update!(owner: user)
+ end
+
+ it 'returns token as token' do
+ expect(as_json[:token]).to eq(trigger.token)
+ end
+
+ it 'contains project_trigger_path' do
+ expect(as_json[:project_trigger_path]).to eq(project_trigger_path)
+ end
+
+ it 'contains edit_project_trigger_path' do
+ expect(as_json[:edit_project_trigger_path]).to eq("#{project_trigger_path}/edit")
+ end
+
+ it 'returns has_token_exposed' do
+ expect(as_json[:has_token_exposed]).to eq(true)
+ end
+ end
+ end
+end
diff --git a/spec/serializers/ci/trigger_serializer_spec.rb b/spec/serializers/ci/trigger_serializer_spec.rb
new file mode 100644
index 00000000000..a669a8c3ed0
--- /dev/null
+++ b/spec/serializers/ci/trigger_serializer_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::TriggerSerializer do
+ describe '#represent' do
+ let(:represent) { described_class.new.represent(trigger) }
+
+ let(:trigger) { build_stubbed(:ci_trigger) }
+
+ it 'matches schema' do
+ expect(represent.to_json).to match_schema('entities/trigger')
+ end
+ end
+end
diff --git a/spec/serializers/cluster_serializer_spec.rb b/spec/serializers/cluster_serializer_spec.rb
index 04999975276..e65e97b6ae0 100644
--- a/spec/serializers/cluster_serializer_spec.rb
+++ b/spec/serializers/cluster_serializer_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe ClusterSerializer do
:cluster_type,
:enabled,
:environment_scope,
+ :id,
:gitlab_managed_apps_logs_path,
:enable_advanced_logs_querying,
:kubernetes_errors,
diff --git a/spec/serializers/diff_file_base_entity_spec.rb b/spec/serializers/diff_file_base_entity_spec.rb
index 94c39e11790..99dbaff4b7e 100644
--- a/spec/serializers/diff_file_base_entity_spec.rb
+++ b/spec/serializers/diff_file_base_entity_spec.rb
@@ -3,10 +3,24 @@
require 'spec_helper'
RSpec.describe DiffFileBaseEntity do
- let(:project) { create(:project, :repository) }
+ include ProjectForksHelper
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
let(:repository) { project.repository }
let(:entity) { described_class.new(diff_file, options).as_json }
+ shared_examples 'nil if removed source branch' do |key|
+ before do
+ allow(merge_request).to receive(:source_branch_exists?).and_return(false)
+ end
+
+ specify do
+ expect(entity[key]).to eq(nil)
+ end
+ end
+
context 'submodule information for a' do
let(:commit_sha) { "" }
let(:commit) { project.commit(commit_sha) }
@@ -67,7 +81,7 @@ RSpec.describe DiffFileBaseEntity do
context 'edit_path' do
let(:diff_file) { merge_request.diffs.diff_files.to_a.last }
- let(:options) { { request: EntityRequest.new(current_user: create(:user)), merge_request: merge_request } }
+ let(:options) { { request: EntityRequest.new(current_user: user), merge_request: merge_request } }
let(:params) { {} }
shared_examples 'a diff file edit path to the source branch' do
@@ -81,16 +95,7 @@ RSpec.describe DiffFileBaseEntity do
let(:params) { { from_merge_request_iid: merge_request.iid } }
it_behaves_like 'a diff file edit path to the source branch'
-
- context 'removed source branch' do
- before do
- allow(merge_request).to receive(:source_branch_exists?).and_return(false)
- end
-
- it do
- expect(entity[:edit_path]).to eq(nil)
- end
- end
+ it_behaves_like 'nil if removed source branch', :edit_path
end
context 'closed' do
@@ -118,4 +123,30 @@ RSpec.describe DiffFileBaseEntity do
end
end
end
+
+ context 'ide_edit_path' do
+ let(:source_project) { project }
+ let(:merge_request) { create(:merge_request, iid: 123, target_project: target_project, source_project: source_project) }
+ let(:diff_file) { merge_request.diffs.diff_files.to_a.last }
+ let(:options) { { request: EntityRequest.new(current_user: user), merge_request: merge_request } }
+ let(:expected_merge_request_path) { "/-/ide/project/#{source_project.full_path}/merge_requests/#{merge_request.iid}" }
+
+ context 'when source_project and target_project are the same' do
+ let(:target_project) { source_project }
+
+ it_behaves_like 'nil if removed source branch', :ide_edit_path
+
+ it 'returns the merge_request ide route' do
+ expect(entity[:ide_edit_path]).to eq expected_merge_request_path
+ end
+ end
+
+ context 'when source_project and target_project are different' do
+ let(:target_project) { fork_project(source_project, source_project.owner, repository: true) }
+
+ it 'returns the merge_request ide route with the target_project as param' do
+ expect(entity[:ide_edit_path]).to eq("#{expected_merge_request_path}?target_project=#{ERB::Util.url_encode(target_project.full_path)}")
+ end
+ end
+ end
end
diff --git a/spec/serializers/diffs_entity_spec.rb b/spec/serializers/diffs_entity_spec.rb
index 7c59e4aed83..5928a1c24b3 100644
--- a/spec/serializers/diffs_entity_spec.rb
+++ b/spec/serializers/diffs_entity_spec.rb
@@ -68,15 +68,5 @@ RSpec.describe DiffsEntity do
end
end
end
-
- context 'when code_navigation feature flag is disabled' do
- it 'does not include code navigation properties' do
- stub_feature_flags(code_navigation: false)
-
- expect(Gitlab::CodeNavigationPath).not_to receive(:new)
-
- expect(subject).not_to include(:definition_path_prefix)
- end
- end
end
end
diff --git a/spec/serializers/feature_flag_entity_spec.rb b/spec/serializers/feature_flag_entity_spec.rb
new file mode 100644
index 00000000000..21ecfe59c31
--- /dev/null
+++ b/spec/serializers/feature_flag_entity_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlagEntity do
+ let(:feature_flag) { create(:operations_feature_flag, project: project) }
+ let(:project) { create(:project) }
+ let(:request) { double('request', current_user: user) }
+ let(:user) { create(:user) }
+ let(:entity) { described_class.new(feature_flag, request: request) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ subject { entity.as_json }
+
+ it 'has feature flag attributes' do
+ expect(subject).to include(:id, :active, :created_at, :updated_at,
+ :description, :name, :edit_path, :destroy_path)
+ end
+end
diff --git a/spec/serializers/feature_flag_serializer_spec.rb b/spec/serializers/feature_flag_serializer_spec.rb
new file mode 100644
index 00000000000..fab8ca93b1b
--- /dev/null
+++ b/spec/serializers/feature_flag_serializer_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlagSerializer do
+ let(:serializer) { described_class.new(project: project, current_user: user) }
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:feature_flags) { create_list(:operations_feature_flag, 3) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ describe '#represent' do
+ subject { serializer.represent(feature_flags) }
+
+ it 'includes feature flag attributes' do
+ is_expected.to all(include(:id, :active, :created_at, :updated_at,
+ :description, :name))
+ end
+ end
+end
diff --git a/spec/serializers/feature_flag_summary_entity_spec.rb b/spec/serializers/feature_flag_summary_entity_spec.rb
new file mode 100644
index 00000000000..385a9deb2d7
--- /dev/null
+++ b/spec/serializers/feature_flag_summary_entity_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlagSummaryEntity do
+ let!(:feature_flag) { create(:operations_feature_flag, project: project) }
+ let(:project) { create(:project) }
+ let(:request) { double('request', current_user: user) }
+ let(:user) { create(:user) }
+ let(:entity) { described_class.new(project, request: request) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ subject { entity.as_json }
+
+ it 'has summary information' do
+ expect(subject).to include(:count)
+ end
+end
diff --git a/spec/serializers/feature_flag_summary_serializer_spec.rb b/spec/serializers/feature_flag_summary_serializer_spec.rb
new file mode 100644
index 00000000000..79cef6765f7
--- /dev/null
+++ b/spec/serializers/feature_flag_summary_serializer_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlagSummarySerializer do
+ let(:serializer) { described_class.new(project: project, current_user: user) }
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let!(:feature_flags) { create(:operations_feature_flag, project: project) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ describe '#represent' do
+ subject { serializer.represent(project) }
+
+ it 'has summary information' do
+ expect(subject).to include(:count)
+ end
+ end
+end
diff --git a/spec/serializers/feature_flags_client_serializer_spec.rb b/spec/serializers/feature_flags_client_serializer_spec.rb
new file mode 100644
index 00000000000..3746142a3f1
--- /dev/null
+++ b/spec/serializers/feature_flags_client_serializer_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlagsClientSerializer do
+ let(:project) { create(:project) }
+ let(:feature_flags_client) { project.create_operations_feature_flags_client! }
+ let(:serializer) { described_class.new }
+
+ describe '#represent_token' do
+ subject { serializer.represent_token(feature_flags_client).to_json }
+
+ it 'includes feature flags client token' do
+ expect(subject).to match_schema('feature_flags_client_token')
+ end
+ end
+end
diff --git a/spec/serializers/group_group_link_entity_spec.rb b/spec/serializers/group_group_link_entity_spec.rb
index 8384563e3e6..9affe4af381 100644
--- a/spec/serializers/group_group_link_entity_spec.rb
+++ b/spec/serializers/group_group_link_entity_spec.rb
@@ -5,9 +5,27 @@ require 'spec_helper'
RSpec.describe GroupGroupLinkEntity do
include_context 'group_group_link'
- subject(:json) { described_class.new(group_group_link).to_json }
+ let_it_be(:current_user) { create(:user) }
+ let(:entity) { described_class.new(group_group_link) }
+
+ before do
+ allow(entity).to receive(:current_user).and_return(current_user)
+ end
it 'matches json schema' do
- expect(json).to match_schema('entities/group_group_link')
+ expect(entity.to_json).to match_schema('entities/group_group_link')
+ end
+
+ context 'a user with :admin_group_member permissions' do
+ before do
+ allow(entity).to receive(:can?).with(current_user, :admin_group_member, shared_group).and_return(true)
+ end
+
+ it 'sets `can_update` and `can_remove` to `true`' do
+ json = entity.as_json
+
+ expect(json[:can_update]).to be true
+ expect(json[:can_remove]).to be true
+ end
end
end
diff --git a/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb b/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
index 51564de6041..031dc729a79 100644
--- a/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
@@ -3,12 +3,11 @@
require 'spec_helper'
RSpec.describe MergeRequestPollCachedWidgetEntity do
- include ProjectForksHelper
using RSpec::Parameterized::TableSyntax
- let(:project) { create :project, :repository }
- let(:resource) { create(:merge_request, source_project: project, target_project: project) }
- let(:user) { create(:user) }
+ let_it_be(:project, refind: true) { create :project, :repository }
+ let_it_be(:resource, refind: true) { create(:merge_request, source_project: project, target_project: project) }
+ let_it_be(:user) { create(:user) }
let(:request) { double('request', current_user: user, project: project) }
@@ -174,8 +173,6 @@ RSpec.describe MergeRequestPollCachedWidgetEntity do
end
context 'when auto merge is not enabled' do
- let(:resource) { create(:merge_request) }
-
it 'returns auto merge related information' do
expect(subject[:auto_merge_enabled]).to be_falsy
end
@@ -215,15 +212,55 @@ RSpec.describe MergeRequestPollCachedWidgetEntity do
expect(subject[:commits_without_merge_commits].size).to eq(12)
end
end
+ end
- context 'when merge request is not mergeable' do
- before do
- allow(resource).to receive(:mergeable?).and_return(false)
+ describe 'pipeline' do
+ let_it_be(:pipeline) { create(:ci_empty_pipeline, project: project, ref: resource.source_branch, sha: resource.source_branch_sha, head_pipeline_of: resource) }
+
+ before do
+ allow_any_instance_of(MergeRequestPresenter).to receive(:can?).and_call_original
+ allow_any_instance_of(MergeRequestPresenter).to receive(:can?).with(user, :read_pipeline, anything).and_return(can_access)
+ end
+
+ context 'when user has access to pipelines' do
+ let(:can_access) { true }
+
+ context 'when is up to date' do
+ let(:req) { double('request', current_user: user, project: project) }
+
+ it 'returns pipeline' do
+ pipeline_payload =
+ MergeRequests::PipelineEntity
+ .represent(pipeline, request: req)
+ .as_json
+
+ expect(subject[:pipeline]).to eq(pipeline_payload)
+ end
+
+ context 'when merge_request_cached_pipeline_serializer is disabled' do
+ it 'does not return pipeline' do
+ stub_feature_flags(merge_request_cached_pipeline_serializer: false)
+
+ expect(subject[:pipeline]).to be_nil
+ end
+ end
+ end
+
+ context 'when user does not have access to pipelines' do
+ let(:can_access) { false }
+ let(:req) { double('request', current_user: user, project: project) }
+
+ it 'does not have pipeline' do
+ expect(subject[:pipeline]).to eq(nil)
+ end
end
- it 'does not have default_squash_commit_message and commits_without_merge_commits' do
- expect(subject[:default_squash_commit_message]).to eq(nil)
- expect(subject[:commits_without_merge_commits]).to eq(nil)
+ context 'when is not up to date' do
+ it 'returns nil' do
+ pipeline.update!(sha: "not up to date")
+
+ expect(subject[:pipeline]).to eq(nil)
+ end
end
end
end
diff --git a/spec/serializers/merge_request_poll_widget_entity_spec.rb b/spec/serializers/merge_request_poll_widget_entity_spec.rb
index 161940dd01a..1e5a8915da0 100644
--- a/spec/serializers/merge_request_poll_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_widget_entity_spec.rb
@@ -44,20 +44,6 @@ RSpec.describe MergeRequestPollWidgetEntity do
expect(subject[:merge_pipeline]).to eq(pipeline_payload)
end
- context 'when merge_request_short_pipeline_serializer is disabled' do
- it 'returns detailed info about pipeline' do
- stub_feature_flags(merge_request_short_pipeline_serializer: false)
-
- pipeline.reload
- pipeline_payload =
- PipelineDetailsEntity
- .represent(pipeline, request: request)
- .as_json
-
- expect(subject[:merge_pipeline]).to eq(pipeline_payload)
- end
- end
-
context 'when user cannot read pipelines on target project' do
before do
project.add_guest(user)
@@ -236,21 +222,16 @@ RSpec.describe MergeRequestPollWidgetEntity do
context 'when is up to date' do
let(:req) { double('request', current_user: user, project: project) }
- it 'returns pipeline' do
- pipeline_payload =
- MergeRequests::PipelineEntity
- .represent(pipeline, request: req)
- .as_json
-
- expect(subject[:pipeline]).to eq(pipeline_payload)
+ it 'does not return pipeline' do
+ expect(subject[:pipeline]).to be_nil
end
- context 'when merge_request_short_pipeline_serializer is disabled' do
+ context 'when merge_request_cached_pipeline_serializer is disabled' do
it 'returns detailed info about pipeline' do
- stub_feature_flags(merge_request_short_pipeline_serializer: false)
+ stub_feature_flags(merge_request_cached_pipeline_serializer: false)
pipeline_payload =
- PipelineDetailsEntity
+ MergeRequests::PipelineEntity
.represent(pipeline, request: req)
.as_json
@@ -276,10 +257,6 @@ RSpec.describe MergeRequestPollWidgetEntity do
let(:result) { false }
let(:req) { double('request', current_user: user, project: project) }
- it 'does not have pipeline' do
- expect(subject[:pipeline]).to eq(nil)
- end
-
it 'does not return ci_status' do
expect(subject[:ci_status]).to eq(nil)
end
diff --git a/spec/serializers/merge_request_widget_entity_spec.rb b/spec/serializers/merge_request_widget_entity_spec.rb
index 1432c4499ae..a1017d05bc3 100644
--- a/spec/serializers/merge_request_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_widget_entity_spec.rb
@@ -272,7 +272,7 @@ RSpec.describe MergeRequestWidgetEntity do
describe 'user callouts' do
context 'when suggest pipeline feature is enabled' do
before do
- stub_feature_flags(suggest_pipeline: true)
+ stub_experiment(suggest_pipeline: true)
end
it 'provides a valid path value for user callout path' do
@@ -308,7 +308,7 @@ RSpec.describe MergeRequestWidgetEntity do
context 'when suggest pipeline feature is not enabled' do
before do
- stub_feature_flags(suggest_pipeline: false)
+ stub_experiment(suggest_pipeline: false)
end
it 'provides no valid value for user callout path' do
diff --git a/spec/serializers/paginated_diff_entity_spec.rb b/spec/serializers/paginated_diff_entity_spec.rb
index a2c58baed55..821ed34d3ec 100644
--- a/spec/serializers/paginated_diff_entity_spec.rb
+++ b/spec/serializers/paginated_diff_entity_spec.rb
@@ -31,14 +31,4 @@ RSpec.describe PaginatedDiffEntity do
total_pages: 7
)
end
-
- context 'when code_navigation feature flag is disabled' do
- it 'does not execute Gitlab::CodeNavigationPath' do
- stub_feature_flags(code_navigation: false)
-
- expect(Gitlab::CodeNavigationPath).not_to receive(:new)
-
- subject
- end
- end
end
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index b42a4f6ad3f..e0f6ab68034 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -155,7 +155,7 @@ RSpec.describe PipelineSerializer do
it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject }
- expected_queries = Gitlab.ee? ? 43 : 40
+ expected_queries = Gitlab.ee? ? 39 : 36
expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0)
@@ -176,7 +176,7 @@ RSpec.describe PipelineSerializer do
# pipeline. With the same ref this check is cached but if refs are
# different then there is an extra query per ref
# https://gitlab.com/gitlab-org/gitlab-foss/issues/46368
- expected_queries = Gitlab.ee? ? 49 : 46
+ expected_queries = Gitlab.ee? ? 42 : 39
expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0)
@@ -199,11 +199,10 @@ RSpec.describe PipelineSerializer do
it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject }
- # 99 queries by default + 2 related to preloading
- # :source_pipeline and :source_job
# Existing numbers are high and require performance optimization
+ # Ongoing issue:
# https://gitlab.com/gitlab-org/gitlab/-/issues/225156
- expected_queries = Gitlab.ee? ? 95 : 86
+ expected_queries = Gitlab.ee? ? 85 : 76
expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0)
diff --git a/spec/serializers/test_case_entity_spec.rb b/spec/serializers/test_case_entity_spec.rb
index 32e9562f4c1..45e63e3feec 100644
--- a/spec/serializers/test_case_entity_spec.rb
+++ b/spec/serializers/test_case_entity_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe TestCaseEntity do
expect(subject[:status]).to eq('success')
expect(subject[:name]).to eq('Test#sum when a is 1 and b is 3 returns summary')
expect(subject[:classname]).to eq('spec.test_spec')
+ expect(subject[:file]).to eq('./spec/test_spec.rb')
expect(subject[:execution_time]).to eq(1.11)
end
end
@@ -30,6 +31,7 @@ RSpec.describe TestCaseEntity do
expect(subject[:status]).to eq('failed')
expect(subject[:name]).to eq('Test#sum when a is 1 and b is 3 returns summary')
expect(subject[:classname]).to eq('spec.test_spec')
+ expect(subject[:file]).to eq('./spec/test_spec.rb')
expect(subject[:execution_time]).to eq(2.22)
end
end
diff --git a/spec/services/admin/propagate_integration_service_spec.rb b/spec/services/admin/propagate_integration_service_spec.rb
index 49d974b7154..5dfe39aebbc 100644
--- a/spec/services/admin/propagate_integration_service_spec.rb
+++ b/spec/services/admin/propagate_integration_service_spec.rb
@@ -10,9 +10,7 @@ RSpec.describe Admin::PropagateIntegrationService do
stub_jira_service_test
end
- let(:excluded_attributes) { %w[id project_id group_id inherit_from_id instance created_at updated_at default] }
- let!(:project) { create(:project) }
- let!(:group) { create(:group) }
+ let_it_be(:project) { create(:project) }
let!(:instance_integration) do
JiraService.create!(
instance: true,
@@ -39,7 +37,7 @@ RSpec.describe Admin::PropagateIntegrationService do
let!(:not_inherited_integration) do
JiraService.create!(
- project: create(:project),
+ project: project,
inherit_from_id: nil,
instance: false,
active: true,
@@ -52,7 +50,7 @@ RSpec.describe Admin::PropagateIntegrationService do
let!(:different_type_inherited_integration) do
BambooService.create!(
- project: create(:project),
+ project: project,
inherit_from_id: instance_integration.id,
instance: false,
active: true,
@@ -64,75 +62,37 @@ RSpec.describe Admin::PropagateIntegrationService do
)
end
- shared_examples 'inherits settings from integration' do
- it 'updates the inherited integrations' do
- described_class.propagate(instance_integration)
+ context 'with inherited integration' do
+ let(:integration) { inherited_integration }
- expect(integration.reload.inherit_from_id).to eq(instance_integration.id)
- expect(integration.attributes.except(*excluded_attributes))
- .to eq(instance_integration.attributes.except(*excluded_attributes))
- end
+ it 'calls to PropagateIntegrationProjectWorker' do
+ expect(PropagateIntegrationInheritWorker).to receive(:perform_async)
+ .with(instance_integration.id, inherited_integration.id, inherited_integration.id)
- context 'integration with data fields' do
- let(:excluded_attributes) { %w[id service_id created_at updated_at] }
-
- it 'updates the data fields from inherited integrations' do
- described_class.propagate(instance_integration)
-
- expect(integration.reload.data_fields.attributes.except(*excluded_attributes))
- .to eq(instance_integration.data_fields.attributes.except(*excluded_attributes))
- end
- end
- end
-
- shared_examples 'does not inherit settings from integration' do
- it 'does not update the not inherited integrations' do
described_class.propagate(instance_integration)
-
- expect(integration.reload.attributes.except(*excluded_attributes))
- .not_to eq(instance_integration.attributes.except(*excluded_attributes))
end
end
- context 'update only inherited integrations' do
- it_behaves_like 'inherits settings from integration' do
- let(:integration) { inherited_integration }
- end
-
- it_behaves_like 'does not inherit settings from integration' do
- let(:integration) { not_inherited_integration }
- end
-
- it_behaves_like 'does not inherit settings from integration' do
- let(:integration) { different_type_inherited_integration }
- end
+ context 'with a project without integration' do
+ let!(:another_project) { create(:project) }
- it_behaves_like 'inherits settings from integration' do
- let(:integration) { project.jira_service }
- end
+ it 'calls to PropagateIntegrationProjectWorker' do
+ expect(PropagateIntegrationProjectWorker).to receive(:perform_async)
+ .with(instance_integration.id, another_project.id, another_project.id)
- it_behaves_like 'inherits settings from integration' do
- let(:integration) { Service.find_by(group_id: group.id) }
+ described_class.propagate(instance_integration)
end
end
- it 'updates project#has_external_issue_tracker for issue tracker services' do
- described_class.propagate(instance_integration)
+ context 'with a group without integration' do
+ let!(:group) { create(:group) }
- expect(project.reload.has_external_issue_tracker).to eq(true)
- end
+ it 'calls to PropagateIntegrationProjectWorker' do
+ expect(PropagateIntegrationGroupWorker).to receive(:perform_async)
+ .with(instance_integration.id, group.id, group.id)
- it 'updates project#has_external_wiki for external wiki services' do
- instance_integration = ExternalWikiService.create!(
- instance: true,
- active: true,
- push_events: false,
- external_wiki_url: 'http://external-wiki-url.com'
- )
-
- described_class.propagate(instance_integration)
-
- expect(project.reload.has_external_wiki).to eq(true)
+ described_class.propagate(instance_integration)
+ end
end
end
end
diff --git a/spec/services/admin/propagate_service_template_spec.rb b/spec/services/admin/propagate_service_template_spec.rb
index 15654653095..d95d31ceaea 100644
--- a/spec/services/admin/propagate_service_template_spec.rb
+++ b/spec/services/admin/propagate_service_template_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Admin::PropagateServiceTemplate do
describe '.propagate' do
+ let_it_be(:project) { create(:project) }
let!(:service_template) do
PushoverService.create!(
template: true,
@@ -19,124 +20,40 @@ RSpec.describe Admin::PropagateServiceTemplate do
)
end
- let!(:project) { create(:project) }
- let(:excluded_attributes) { %w[id project_id template created_at updated_at default] }
-
- it 'creates services for projects' do
- expect(project.pushover_service).to be_nil
+ it 'calls to PropagateIntegrationProjectWorker' do
+ expect(PropagateIntegrationProjectWorker).to receive(:perform_async)
+ .with(service_template.id, project.id, project.id)
described_class.propagate(service_template)
-
- expect(project.reload.pushover_service).to be_present
- end
-
- it 'creates services for a project that has another service' do
- BambooService.create!(
- active: true,
- project: project,
- properties: {
- bamboo_url: 'http://gitlab.com',
- username: 'mic',
- password: 'password',
- build_key: 'build'
- }
- )
-
- expect(project.pushover_service).to be_nil
-
- described_class.propagate(service_template)
-
- expect(project.reload.pushover_service).to be_present
- end
-
- it 'does not create the service if it exists already' do
- other_service = BambooService.create!(
- template: true,
- active: true,
- properties: {
- bamboo_url: 'http://gitlab.com',
- username: 'mic',
- password: 'password',
- build_key: 'build'
- }
- )
-
- Service.build_from_integration(project.id, service_template).save!
- Service.build_from_integration(project.id, other_service).save!
-
- expect { described_class.propagate(service_template) }
- .not_to change { Service.count }
end
- it 'creates the service containing the template attributes' do
- described_class.propagate(service_template)
-
- expect(project.pushover_service.properties).to eq(service_template.properties)
-
- expect(project.pushover_service.attributes.except(*excluded_attributes))
- .to eq(service_template.attributes.except(*excluded_attributes))
- end
-
- context 'service with data fields' do
- include JiraServiceHelper
-
- let(:service_template) do
- stub_jira_service_test
-
- JiraService.create!(
- template: true,
+ context 'with a project that has another service' do
+ before do
+ BambooService.create!(
active: true,
- push_events: false,
- url: 'http://jira.instance.com',
- username: 'user',
- password: 'secret'
+ project: project,
+ properties: {
+ bamboo_url: 'http://gitlab.com',
+ username: 'mic',
+ password: 'password',
+ build_key: 'build'
+ }
)
end
- it 'creates the service containing the template attributes' do
- described_class.propagate(service_template)
-
- expect(project.jira_service.attributes.except(*excluded_attributes))
- .to eq(service_template.attributes.except(*excluded_attributes))
-
- excluded_attributes = %w[id service_id created_at updated_at]
- expect(project.jira_service.data_fields.attributes.except(*excluded_attributes))
- .to eq(service_template.data_fields.attributes.except(*excluded_attributes))
- end
- end
-
- describe 'bulk update', :use_sql_query_cache do
- let(:project_total) { 5 }
-
- before do
- stub_const('Admin::PropagateServiceTemplate::BATCH_SIZE', 3)
-
- project_total.times { create(:project) }
+ it 'calls to PropagateIntegrationProjectWorker' do
+ expect(PropagateIntegrationProjectWorker).to receive(:perform_async)
+ .with(service_template.id, project.id, project.id)
described_class.propagate(service_template)
end
-
- it 'creates services for all projects' do
- expect(Service.all.reload.count).to eq(project_total + 2)
- end
- end
-
- describe 'external tracker' do
- it 'updates the project external tracker' do
- service_template.update!(category: 'issue_tracker')
-
- expect { described_class.propagate(service_template) }
- .to change { project.reload.has_external_issue_tracker }.to(true)
- end
end
- describe 'external wiki' do
- it 'updates the project external tracker' do
- service_template.update!(type: 'ExternalWikiService')
+ it 'does not create the service if it exists already' do
+ Service.build_from_integration(service_template, project_id: project.id).save!
- expect { described_class.propagate(service_template) }
- .to change { project.reload.has_external_wiki }.to(true)
- end
+ expect { described_class.propagate(service_template) }
+ .not_to change { Service.count }
end
end
end
diff --git a/spec/services/alert_management/process_prometheus_alert_service_spec.rb b/spec/services/alert_management/process_prometheus_alert_service_spec.rb
index b14cc65506a..ad4ab26c198 100644
--- a/spec/services/alert_management/process_prometheus_alert_service_spec.rb
+++ b/spec/services/alert_management/process_prometheus_alert_service_spec.rb
@@ -148,28 +148,20 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
expect { execute }.to change { alert.reload.resolved? }.to(true)
end
- [true, false].each do |state_tracking_enabled|
- context 'existing issue' do
- before do
- stub_feature_flags(track_resource_state_change_events: state_tracking_enabled)
- end
-
- let!(:alert) { create(:alert_management_alert, :with_issue, project: project, fingerprint: fingerprint) }
-
- it 'closes the issue' do
- issue = alert.issue
-
- expect { execute }
- .to change { issue.reload.state }
- .from('opened')
- .to('closed')
- end
-
- if state_tracking_enabled
- specify { expect { execute }.to change(ResourceStateEvent, :count).by(1) }
- else
- specify { expect { execute }.to change(Note, :count).by(1) }
- end
+ context 'existing issue' do
+ let!(:alert) { create(:alert_management_alert, :with_issue, project: project, fingerprint: fingerprint) }
+
+ it 'closes the issue' do
+ issue = alert.issue
+
+ expect { execute }
+ .to change { issue.reload.state }
+ .from('opened')
+ .to('closed')
+ end
+
+ it 'creates a resource state event' do
+ expect { execute }.to change(ResourceStateEvent, :count).by(1)
end
end
end
diff --git a/spec/services/audit_event_service_spec.rb b/spec/services/audit_event_service_spec.rb
index 93de2a23edc..3317fcf8444 100644
--- a/spec/services/audit_event_service_spec.rb
+++ b/spec/services/audit_event_service_spec.rb
@@ -57,7 +57,7 @@ RSpec.describe AuditEventService do
let(:audit_service) { described_class.new(user, user, with: 'standard') }
it 'creates an authentication event' do
- expect(AuthenticationEvent).to receive(:create).with(
+ expect(AuthenticationEvent).to receive(:new).with(
user: user,
user_name: user.name,
ip_address: user.current_sign_in_ip,
@@ -67,6 +67,17 @@ RSpec.describe AuditEventService do
audit_service.for_authentication.security_event
end
+
+ it 'tracks exceptions when the event cannot be created' do
+ allow(user).to receive_messages(current_sign_in_ip: 'invalid IP')
+
+ expect(Gitlab::ErrorTracking).to(
+ receive(:track_exception)
+ .with(ActiveRecord::RecordInvalid, audit_event_type: 'AuthenticationEvent').and_call_original
+ )
+
+ audit_service.for_authentication.security_event
+ end
end
end
diff --git a/spec/services/bulk_create_integration_service_spec.rb b/spec/services/bulk_create_integration_service_spec.rb
new file mode 100644
index 00000000000..5d896f78b35
--- /dev/null
+++ b/spec/services/bulk_create_integration_service_spec.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkCreateIntegrationService do
+ include JiraServiceHelper
+
+ before do
+ stub_jira_service_test
+ end
+
+ let(:excluded_attributes) { %w[id project_id group_id inherit_from_id instance template created_at updated_at] }
+ let!(:instance_integration) { create(:jira_service, :instance) }
+ let!(:template_integration) { create(:jira_service, :template) }
+
+ shared_examples 'creates integration from batch ids' do
+ it 'updates the inherited integrations' do
+ described_class.new(integration, batch, association).execute
+
+ expect(created_integration.attributes.except(*excluded_attributes))
+ .to eq(integration.attributes.except(*excluded_attributes))
+ end
+
+ context 'integration with data fields' do
+ let(:excluded_attributes) { %w[id service_id created_at updated_at] }
+
+ it 'updates the data fields from inherited integrations' do
+ described_class.new(integration, batch, association).execute
+
+ expect(created_integration.reload.data_fields.attributes.except(*excluded_attributes))
+ .to eq(integration.data_fields.attributes.except(*excluded_attributes))
+ end
+ end
+ end
+
+ shared_examples 'updates inherit_from_id' do
+ it 'updates inherit_from_id attributes' do
+ described_class.new(integration, batch, association).execute
+
+ expect(created_integration.reload.inherit_from_id).to eq(integration.id)
+ end
+ end
+
+ shared_examples 'runs project callbacks' do
+ it 'updates projects#has_external_issue_tracker for issue tracker services' do
+ described_class.new(integration, batch, association).execute
+
+ expect(project.reload.has_external_issue_tracker).to eq(true)
+ end
+
+ context 'with an external wiki integration' do
+ let(:integration) do
+ ExternalWikiService.create!(
+ instance: true,
+ active: true,
+ push_events: false,
+ external_wiki_url: 'http://external-wiki-url.com'
+ )
+ end
+
+ it 'updates projects#has_external_wiki for external wiki services' do
+ described_class.new(integration, batch, association).execute
+
+ expect(project.reload.has_external_wiki).to eq(true)
+ end
+ end
+ end
+
+ context 'with an instance-level integration' do
+ let(:integration) { instance_integration }
+
+ context 'with a project association' do
+ let!(:project) { create(:project) }
+ let(:created_integration) { project.jira_service }
+ let(:batch) { Project.all }
+ let(:association) { 'project' }
+
+ it_behaves_like 'creates integration from batch ids'
+ it_behaves_like 'updates inherit_from_id'
+ it_behaves_like 'runs project callbacks'
+ end
+
+ context 'with a group association' do
+ let!(:group) { create(:group) }
+ let(:created_integration) { Service.find_by(group: group) }
+ let(:batch) { Group.all }
+ let(:association) { 'group' }
+
+ it_behaves_like 'creates integration from batch ids'
+ it_behaves_like 'updates inherit_from_id'
+ end
+ end
+
+ context 'with a template integration' do
+ let(:integration) { template_integration }
+
+ context 'with a project association' do
+ let!(:project) { create(:project) }
+ let(:created_integration) { project.jira_service }
+ let(:batch) { Project.all }
+ let(:association) { 'project' }
+
+ it_behaves_like 'creates integration from batch ids'
+ it_behaves_like 'runs project callbacks'
+ end
+ end
+end
diff --git a/spec/services/bulk_update_integration_service_spec.rb b/spec/services/bulk_update_integration_service_spec.rb
new file mode 100644
index 00000000000..2f0bfd31600
--- /dev/null
+++ b/spec/services/bulk_update_integration_service_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkUpdateIntegrationService do
+ include JiraServiceHelper
+
+ before do
+ stub_jira_service_test
+ end
+
+ let(:excluded_attributes) { %w[id project_id group_id inherit_from_id instance template created_at updated_at] }
+ let!(:instance_integration) do
+ JiraService.create!(
+ instance: true,
+ active: true,
+ push_events: true,
+ url: 'http://update-jira.instance.com',
+ username: 'user',
+ password: 'secret'
+ )
+ end
+
+ let!(:integration) do
+ JiraService.create!(
+ project: create(:project),
+ inherit_from_id: instance_integration.id,
+ instance: false,
+ active: true,
+ push_events: false,
+ url: 'http://jira.instance.com',
+ username: 'user',
+ password: 'secret'
+ )
+ end
+
+ context 'with inherited integration' do
+ it 'updates the integration' do
+ described_class.new(instance_integration, Service.inherit_from_id(instance_integration.id)).execute
+
+ expect(integration.reload.inherit_from_id).to eq(instance_integration.id)
+ expect(integration.attributes.except(*excluded_attributes))
+ .to eq(instance_integration.attributes.except(*excluded_attributes))
+ end
+
+ context 'with integration with data fields' do
+ let(:excluded_attributes) { %w[id service_id created_at updated_at] }
+
+ it 'updates the data fields from the integration' do
+ described_class.new(instance_integration, Service.inherit_from_id(instance_integration.id)).execute
+
+ expect(integration.reload.data_fields.attributes.except(*excluded_attributes))
+ .to eq(instance_integration.data_fields.attributes.except(*excluded_attributes))
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index e0893ed6de3..c28c3449485 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -731,30 +731,11 @@ RSpec.describe Ci::CreatePipelineService do
.and_call_original
end
- context 'when ci_pipeline_rewind_iid is enabled' do
- before do
- stub_feature_flags(ci_pipeline_rewind_iid: true)
- end
-
- it 'rewinds iid' do
- result = execute_service
-
- expect(result).not_to be_persisted
- expect(internal_id.last_value).to eq(0)
- end
- end
-
- context 'when ci_pipeline_rewind_iid is disabled' do
- before do
- stub_feature_flags(ci_pipeline_rewind_iid: false)
- end
-
- it 'does not rewind iid' do
- result = execute_service
+ it 'rewinds iid' do
+ result = execute_service
- expect(result).not_to be_persisted
- expect(internal_id.last_value).to eq(1)
- end
+ expect(result).not_to be_persisted
+ expect(internal_id.last_value).to eq(0)
end
end
end
diff --git a/spec/services/ci/expire_pipeline_cache_service_spec.rb b/spec/services/ci/expire_pipeline_cache_service_spec.rb
index b5d664947de..8df5d0bc159 100644
--- a/spec/services/ci/expire_pipeline_cache_service_spec.rb
+++ b/spec/services/ci/expire_pipeline_cache_service_spec.rb
@@ -26,9 +26,11 @@ RSpec.describe Ci::ExpirePipelineCacheService do
project = merge_request.target_project
merge_request_pipelines_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/pipelines.json"
+ merge_request_widget_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/cached_widget.json"
allow_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch)
expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(merge_request_pipelines_path)
+ expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(merge_request_widget_path)
subject.execute(merge_request.all_pipelines.last)
end
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb b/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
index 77645298bc7..2936d6fae4d 100644
--- a/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
+++ b/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
@@ -43,12 +43,12 @@ RSpec.shared_context 'Pipeline Processing Service Tests With Yaml' do
{
pipeline: pipeline.status,
stages: pipeline.stages.pluck(:name, :status).to_h,
- jobs: pipeline.statuses.latest.pluck(:name, :status).to_h
+ jobs: pipeline.latest_statuses.pluck(:name, :status).to_h
}
end
def event_on_jobs(event, job_names)
- statuses = pipeline.statuses.latest.by_name(job_names).to_a
+ statuses = pipeline.latest_statuses.by_name(job_names).to_a
expect(statuses.count).to eq(job_names.count) # ensure that we have the same counts
statuses.each { |status| status.public_send("#{event}!") }
diff --git a/spec/services/ci/pipelines/create_artifact_service_spec.rb b/spec/services/ci/pipelines/create_artifact_service_spec.rb
index d5e9cf83a6d..6f177889ed3 100644
--- a/spec/services/ci/pipelines/create_artifact_service_spec.rb
+++ b/spec/services/ci/pipelines/create_artifact_service_spec.rb
@@ -35,16 +35,6 @@ RSpec.describe ::Ci::Pipelines::CreateArtifactService do
end
end
- context 'when feature is disabled' do
- it 'does not create a pipeline artifact' do
- stub_feature_flags(coverage_report_view: false)
-
- subject
-
- expect(Ci::PipelineArtifact.count).to eq(0)
- end
- end
-
context 'when pipeline artifact has already been created' do
it 'do not raise an error and do not persist the same artifact twice' do
expect { 2.times { described_class.new.execute(pipeline) } }.not_to raise_error(ActiveRecord::RecordNotUnique)
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index 51741440075..4c1e698d52d 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -154,7 +154,7 @@ RSpec.describe Ci::RetryBuildService do
describe '#execute' do
let(:new_build) do
- Timecop.freeze(1.second.from_now) do
+ travel_to(1.second.from_now) do
service.execute(build)
end
end
@@ -257,7 +257,7 @@ RSpec.describe Ci::RetryBuildService do
describe '#reprocess' do
let(:new_build) do
- Timecop.freeze(1.second.from_now) do
+ travel_to(1.second.from_now) do
service.reprocess!(build)
end
end
diff --git a/spec/services/ci/update_build_queue_service_spec.rb b/spec/services/ci/update_build_queue_service_spec.rb
index 0f4c0fa5ecb..ebccfdc5140 100644
--- a/spec/services/ci/update_build_queue_service_spec.rb
+++ b/spec/services/ci/update_build_queue_service_spec.rb
@@ -45,21 +45,7 @@ RSpec.describe Ci::UpdateBuildQueueService do
runner.update!(contacted_at: Ci::Runner.recent_queue_deadline)
end
- context 'when ci_update_queues_for_online_runners is enabled' do
- before do
- stub_feature_flags(ci_update_queues_for_online_runners: true)
- end
-
- it_behaves_like 'does not refresh runner'
- end
-
- context 'when ci_update_queues_for_online_runners is disabled' do
- before do
- stub_feature_flags(ci_update_queues_for_online_runners: false)
- end
-
- it_behaves_like 'refreshes runner'
- end
+ it_behaves_like 'does not refresh runner'
end
end
diff --git a/spec/services/ci/update_build_state_service_spec.rb b/spec/services/ci/update_build_state_service_spec.rb
index f5ad732bf7e..aa1de368154 100644
--- a/spec/services/ci/update_build_state_service_spec.rb
+++ b/spec/services/ci/update_build_state_service_spec.rb
@@ -83,9 +83,26 @@ RSpec.describe Ci::UpdateBuildStateService do
{ checksum: 'crc32:12345678', state: 'failed', failure_reason: 'script_failure' }
end
+ context 'when build does not have associated trace chunks' do
+ it 'updates a build status' do
+ result = subject.execute
+
+ expect(build).to be_failed
+ expect(result.status).to eq 200
+ end
+
+ it 'does not increment invalid trace metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .not_to have_received(:increment_trace_operation)
+ .with(operation: :invalid)
+ end
+ end
+
context 'when build trace has been migrated' do
before do
- create(:ci_build_trace_chunk, :database_with_data, build: build)
+ create(:ci_build_trace_chunk, :persisted, build: build, initial_data: 'abcd')
end
it 'updates a build state' do
@@ -100,6 +117,12 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(result.status).to eq 200
end
+ it 'does not set a backoff value' do
+ result = subject.execute
+
+ expect(result.backoff).to be_nil
+ end
+
it 'increments trace finalized operation metric' do
execute_with_stubbed_metrics!
@@ -107,6 +130,48 @@ RSpec.describe Ci::UpdateBuildStateService do
.to have_received(:increment_trace_operation)
.with(operation: :finalized)
end
+
+ context 'when trace checksum is not valid' do
+ it 'increments invalid trace metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .to have_received(:increment_trace_operation)
+ .with(operation: :invalid)
+ end
+ end
+
+ context 'when trace checksum is valid' do
+ let(:params) { { checksum: 'crc32:ed82cd11', state: 'success' } }
+
+ it 'does not increment invalid trace metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .not_to have_received(:increment_trace_operation)
+ .with(operation: :invalid)
+ end
+ end
+
+ context 'when failed to acquire a build trace lock' do
+ it 'accepts a state update request' do
+ build.trace.lock do
+ result = subject.execute
+
+ expect(result.status).to eq 202
+ end
+ end
+
+ it 'increment locked trace metric' do
+ build.trace.lock do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .to have_received(:increment_trace_operation)
+ .with(operation: :locked)
+ end
+ end
+ end
end
context 'when build trace has not been migrated yet' do
@@ -126,6 +191,12 @@ RSpec.describe Ci::UpdateBuildStateService do
expect(result.status).to eq 202
end
+ it 'sets a request backoff value' do
+ result = subject.execute
+
+ expect(result.backoff.to_i).to be > 0
+ end
+
it 'schedules live chunks for migration' do
expect(Ci::BuildTraceChunkFlushWorker)
.to receive(:perform_async)
@@ -134,14 +205,6 @@ RSpec.describe Ci::UpdateBuildStateService do
subject.execute
end
- it 'increments trace accepted operation metric' do
- execute_with_stubbed_metrics!
-
- expect(metrics)
- .to have_received(:increment_trace_operation)
- .with(operation: :accepted)
- end
-
it 'creates a pending state record' do
subject.execute
@@ -153,6 +216,22 @@ RSpec.describe Ci::UpdateBuildStateService do
end
end
+ it 'increments trace accepted operation metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .to have_received(:increment_trace_operation)
+ .with(operation: :accepted)
+ end
+
+ it 'does not increment invalid trace metric' do
+ execute_with_stubbed_metrics!
+
+ expect(metrics)
+ .not_to have_received(:increment_trace_operation)
+ .with(operation: :invalid)
+ end
+
context 'when build pending state is outdated' do
before do
build.create_pending_state(
diff --git a/spec/services/clusters/gcp/finalize_creation_service_spec.rb b/spec/services/clusters/gcp/finalize_creation_service_spec.rb
index be362dc6e23..a20cf90a770 100644
--- a/spec/services/clusters/gcp/finalize_creation_service_spec.rb
+++ b/spec/services/clusters/gcp/finalize_creation_service_spec.rb
@@ -84,11 +84,9 @@ RSpec.describe Clusters::Gcp::FinalizeCreationService, '#execute' do
before do
stub_cloud_platform_get_zone_cluster(
provider.gcp_project_id, provider.zone, cluster.name,
- {
- endpoint: endpoint,
- username: username,
- password: password
- }
+ endpoint: endpoint,
+ username: username,
+ password: password
)
stub_kubeclient_discover(api_url)
@@ -101,11 +99,9 @@ RSpec.describe Clusters::Gcp::FinalizeCreationService, '#execute' do
stub_kubeclient_get_secret(
api_url,
- {
- metadata_name: secret_name,
- token: Base64.encode64(token),
- namespace: 'default'
- }
+ metadata_name: secret_name,
+ token: Base64.encode64(token),
+ namespace: 'default'
)
stub_kubeclient_put_cluster_role_binding(api_url, 'gitlab-admin')
diff --git a/spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb b/spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb
index b4402aadc88..f26177a56d0 100644
--- a/spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb
+++ b/spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb
@@ -26,27 +26,21 @@ RSpec.describe Clusters::Kubernetes::ConfigureIstioIngressService, '#execute' do
stub_kubeclient_get_secret(
api_url,
- {
- metadata_name: "#{namespace}-token",
- token: Base64.encode64('sample-token'),
- namespace: namespace
- }
+ metadata_name: "#{namespace}-token",
+ token: Base64.encode64('sample-token'),
+ namespace: namespace
)
stub_kubeclient_get_secret(
api_url,
- {
- metadata_name: 'istio-ingressgateway-ca-certs',
- namespace: 'istio-system'
- }
+ metadata_name: 'istio-ingressgateway-ca-certs',
+ namespace: 'istio-system'
)
stub_kubeclient_get_secret(
api_url,
- {
- metadata_name: 'istio-ingressgateway-certs',
- namespace: 'istio-system'
- }
+ metadata_name: 'istio-ingressgateway-certs',
+ namespace: 'istio-system'
)
stub_kubeclient_put_secret(api_url, 'istio-ingressgateway-ca-certs', namespace: 'istio-system')
diff --git a/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb b/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
index ee10c59390e..7e3f1fdb379 100644
--- a/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
+++ b/spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb
@@ -41,11 +41,9 @@ RSpec.describe Clusters::Kubernetes::CreateOrUpdateNamespaceService, '#execute'
stub_kubeclient_get_secret(
api_url,
- {
- metadata_name: "#{namespace}-token",
- token: Base64.encode64('sample-token'),
- namespace: namespace
- }
+ metadata_name: "#{namespace}-token",
+ token: Base64.encode64('sample-token'),
+ namespace: namespace
)
end
diff --git a/spec/services/clusters/kubernetes/fetch_kubernetes_token_service_spec.rb b/spec/services/clusters/kubernetes/fetch_kubernetes_token_service_spec.rb
index c4daae9dbf0..7a283a974d2 100644
--- a/spec/services/clusters/kubernetes/fetch_kubernetes_token_service_spec.rb
+++ b/spec/services/clusters/kubernetes/fetch_kubernetes_token_service_spec.rb
@@ -31,11 +31,9 @@ RSpec.describe Clusters::Kubernetes::FetchKubernetesTokenService do
before do
stub_kubeclient_get_secret(
api_url,
- {
- metadata_name: service_account_token_name,
- namespace: namespace,
- token: token
- }
+ metadata_name: service_account_token_name,
+ namespace: namespace,
+ token: token
)
end
@@ -54,11 +52,9 @@ RSpec.describe Clusters::Kubernetes::FetchKubernetesTokenService do
before do
stub_kubeclient_get_secret_not_found_then_found(
api_url,
- {
- metadata_name: service_account_token_name,
- namespace: namespace,
- token: token
- }
+ metadata_name: service_account_token_name,
+ namespace: namespace,
+ token: token
)
end
@@ -96,11 +92,9 @@ RSpec.describe Clusters::Kubernetes::FetchKubernetesTokenService do
before do
stub_kubeclient_get_secret(
api_url,
- {
- metadata_name: service_account_token_name,
- namespace: namespace,
- token: nil
- }
+ metadata_name: service_account_token_name,
+ namespace: namespace,
+ token: nil
)
end
diff --git a/spec/services/deployments/after_create_service_spec.rb b/spec/services/deployments/after_create_service_spec.rb
index 6cdb4c88191..ee9b008c2f8 100644
--- a/spec/services/deployments/after_create_service_spec.rb
+++ b/spec/services/deployments/after_create_service_spec.rb
@@ -269,14 +269,14 @@ RSpec.describe Deployments::AfterCreateService do
it "does not overwrite the older 'first_deployed_to_production_at' time" do
# Previous deploy
time = 5.minutes.from_now
- Timecop.freeze(time) { service.execute }
+ travel_to(time) { service.execute }
expect(merge_request.reload.metrics.merged_at).to be < merge_request.reload.metrics.first_deployed_to_production_at
previous_time = merge_request.reload.metrics.first_deployed_to_production_at
# Current deploy
- Timecop.freeze(time + 12.hours) { service.execute }
+ travel_to(time + 12.hours) { service.execute }
expect(merge_request.reload.metrics.first_deployed_to_production_at).to eq(previous_time)
end
diff --git a/spec/services/design_management/copy_design_collection/copy_service_spec.rb b/spec/services/design_management/copy_design_collection/copy_service_spec.rb
new file mode 100644
index 00000000000..e93e5f13fea
--- /dev/null
+++ b/spec/services/design_management/copy_design_collection/copy_service_spec.rb
@@ -0,0 +1,259 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe DesignManagement::CopyDesignCollection::CopyService, :clean_gitlab_redis_shared_state do
+ include DesignManagementTestHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue, refind: true) { create(:issue, project: project) }
+ let(:target_issue) { create(:issue) }
+
+ subject { described_class.new(project, user, issue: issue, target_issue: target_issue).execute }
+
+ before do
+ enable_design_management
+ end
+
+ shared_examples 'service error' do |message:|
+ it 'returns an error response', :aggregate_failures do
+ expect(subject).to be_kind_of(ServiceResponse)
+ expect(subject).to be_error
+ expect(subject.message).to eq(message)
+ end
+ end
+
+ shared_examples 'service success' do
+ it 'returns a success response', :aggregate_failures do
+ expect(subject).to be_kind_of(ServiceResponse)
+ expect(subject).to be_success
+ end
+ end
+
+ include_examples 'service error', message: 'User cannot copy design collection to issue'
+
+ context 'when user has permission to read the design collection' do
+ before_all do
+ project.add_reporter(user)
+ end
+
+ include_examples 'service error', message: 'User cannot copy design collection to issue'
+
+ context 'when the user also has permission to admin the target issue' do
+ let(:target_repository) { target_issue.project.design_repository }
+
+ before do
+ target_issue.project.add_reporter(user)
+ end
+
+ include_examples 'service error', message: 'Target design collection must first be queued'
+
+ context 'when the target design collection has been queued' do
+ before do
+ target_issue.design_collection.start_copy!
+ end
+
+ include_examples 'service error', message: 'Design collection has no designs'
+
+ context 'when design collection has designs' do
+ let_it_be(:designs) do
+ create_list(:design, 3, :with_lfs_file, :with_relative_position, issue: issue, project: project)
+ end
+
+ context 'when target issue already has designs' do
+ before do
+ create(:design, issue: target_issue, project: target_issue.project)
+ end
+
+ include_examples 'service error', message: 'Target design collection already has designs'
+ end
+
+ include_examples 'service success'
+
+ it 'creates a design repository for the target project' do
+ expect { subject }.to change { target_repository.exists? }.from(false).to(true)
+ end
+
+ context 'when the target project already has a design repository' do
+ before do
+ target_repository.create_if_not_exists
+ end
+
+ include_examples 'service success'
+ end
+
+ it 'copies the designs correctly', :aggregate_failures do
+ expect { subject }.to change { target_issue.designs.count }.by(3)
+
+ old_designs = issue.designs.ordered
+ new_designs = target_issue.designs.ordered
+
+ new_designs.zip(old_designs).each do |new_design, old_design|
+ expect(new_design).to have_attributes(
+ filename: old_design.filename,
+ relative_position: old_design.relative_position,
+ issue: target_issue,
+ project: target_issue.project
+ )
+ end
+ end
+
+ it 'copies the design versions correctly', :aggregate_failures do
+ expect { subject }.to change { target_issue.design_versions.count }.by(3)
+
+ old_versions = issue.design_versions.ordered
+ new_versions = target_issue.design_versions.ordered
+
+ new_versions.zip(old_versions).each do |new_version, old_version|
+ expect(new_version).to have_attributes(
+ created_at: old_version.created_at,
+ author_id: old_version.author_id
+ )
+ expect(new_version.designs.pluck(:filename)).to eq(old_version.designs.pluck(:filename))
+ expect(new_version.actions.pluck(:event)).to eq(old_version.actions.pluck(:event))
+ end
+ end
+
+ it 'copies the design actions correctly', :aggregate_failures do
+ expect { subject }.to change { DesignManagement::Action.count }.by(3)
+
+ old_actions = issue.design_versions.ordered.flat_map(&:actions)
+ new_actions = target_issue.design_versions.ordered.flat_map(&:actions)
+
+ new_actions.zip(old_actions).each do |new_action, old_action|
+ # This is a way to identify if the versions linked to the actions
+ # are correct is to compare design filenames, as the SHA changes.
+ new_design_filenames = new_action.version.designs.ordered.pluck(:filename)
+ old_design_filenames = old_action.version.designs.ordered.pluck(:filename)
+
+ expect(new_design_filenames).to eq(old_design_filenames)
+ expect(new_action.event).to eq(old_action.event)
+ expect(new_action.design.filename).to eq(old_action.design.filename)
+ end
+ end
+
+ it 'copies design notes correctly', :aggregate_failures, :sidekiq_inline do
+ old_notes = [
+ create(:diff_note_on_design, note: 'first note', noteable: designs.first, project: project, author: create(:user)),
+ create(:diff_note_on_design, note: 'second note', noteable: designs.first, project: project, author: create(:user))
+ ]
+ matchers = old_notes.map do |note|
+ have_attributes(
+ note.attributes.slice(
+ :type,
+ :author_id,
+ :note,
+ :position
+ )
+ )
+ end
+
+ expect { subject }.to change { Note.count }.by(2)
+
+ new_notes = target_issue.designs.first.notes.fresh
+
+ expect(new_notes).to match_array(matchers)
+ end
+
+ it 'links the LfsObjects' do
+ expect { subject }.to change { target_issue.project.lfs_objects.count }.by(3)
+ end
+
+ it 'copies the Git repository data', :aggregate_failures do
+ subject
+
+ commit_shas = target_repository.commits('master', limit: 99).map(&:id)
+
+ expect(commit_shas).to include(*target_issue.design_versions.ordered.pluck(:sha))
+ end
+
+ it 'creates a master branch if none previously existed' do
+ expect { subject }.to change { target_repository.branch_names }.from([]).to(['master'])
+ end
+
+ it 'leaves the design collection in the correct copy state' do
+ subject
+
+ expect(target_issue.design_collection).to be_copy_ready
+ end
+
+ describe 'rollback' do
+ before do
+ # Ensure the very last step throws an error
+ expect_next_instance_of(described_class) do |service|
+ expect(service).to receive(:finalize!).and_raise
+ end
+ end
+
+ include_examples 'service error', message: 'Designs were unable to be copied successfully'
+
+ it 'rollsback all PostgreSQL data created', :aggregate_failures do
+ expect { subject }.not_to change {
+ [
+ DesignManagement::Design.count,
+ DesignManagement::Action.count,
+ DesignManagement::Version.count,
+ Note.count
+ ]
+ }
+
+ collections = [
+ target_issue.design_collection,
+ target_issue.designs,
+ target_issue.design_versions
+ ]
+
+ expect(collections).to all(be_empty)
+ end
+
+ it 'does not alter master branch', :aggregate_failures do
+ # Add some Git data to the target_repository, so we are testing
+ # that any original data remains
+ issue_2 = create(:issue, project: target_issue.project)
+ create(:design, :with_file, issue: issue_2, project: target_issue.project)
+
+ expect { subject }.not_to change {
+ expect(target_repository.commits('master', limit: 10).size).to eq(1)
+ }
+ end
+
+ it 'sets the design collection copy state' do
+ subject
+
+ expect(target_issue.design_collection).to be_copy_error
+ end
+ end
+ end
+ end
+ end
+ end
+
+ describe 'Alert if schema changes', :aggregate_failures do
+ let_it_be(:config_file) { Rails.root.join('lib/gitlab/design_management/copy_design_collection_model_attributes.yml') }
+ let_it_be(:config) { YAML.load_file(config_file).symbolize_keys }
+
+ %w(Design Action Version).each do |model|
+ specify do
+ attributes = config["#{model.downcase}_attributes".to_sym] || []
+ ignored_attributes = config["ignore_#{model.downcase}_attributes".to_sym]
+
+ expect(attributes + ignored_attributes).to contain_exactly(
+ *DesignManagement.const_get(model, false).column_names
+ ), failure_message(model)
+ end
+ end
+
+ def failure_message(model)
+ <<-MSG
+ The schema of the `#{model}` model has changed.
+
+ `#{described_class.name}` refers to specific lists of attributes of `#{model}` to either
+ copy or ignore, so that we continue to copy designs correctly after schema changes.
+
+ Please update:
+ #{config_file}
+ to reflect the latest changes to `#{model}`. See that file for more information.
+ MSG
+ end
+ end
+end
diff --git a/spec/services/design_management/copy_design_collection/queue_service_spec.rb b/spec/services/design_management/copy_design_collection/queue_service_spec.rb
new file mode 100644
index 00000000000..2d9ea4633a0
--- /dev/null
+++ b/spec/services/design_management/copy_design_collection/queue_service_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe DesignManagement::CopyDesignCollection::QueueService, :clean_gitlab_redis_shared_state do
+ include DesignManagementTestHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:issue) { create(:issue) }
+ let_it_be(:target_issue, refind: true) { create(:issue) }
+ let_it_be(:design) { create(:design, issue: issue, project: issue.project) }
+
+ subject { described_class.new(user, issue, target_issue).execute }
+
+ before do
+ enable_design_management
+ end
+
+ it 'returns an error if user does not have permission' do
+ expect(subject).to be_kind_of(ServiceResponse)
+ expect(subject).to be_error
+ expect(subject.message).to eq('User cannot copy designs to issue')
+ end
+
+ context 'when user has permission' do
+ before_all do
+ issue.project.add_reporter(user)
+ target_issue.project.add_reporter(user)
+ end
+
+ it 'returns an error if design collection copy_state is not queuable' do
+ target_issue.design_collection.start_copy!
+
+ expect(subject).to be_kind_of(ServiceResponse)
+ expect(subject).to be_error
+ expect(subject.message).to eq('Target design collection copy state must be `ready`')
+ end
+
+ it 'sets the design collection copy state' do
+ expect { subject }.to change { target_issue.design_collection.copy_state }.from('ready').to('in_progress')
+ end
+
+ it 'queues a DesignManagement::CopyDesignCollectionWorker' do
+ expect { subject }.to change(DesignManagement::CopyDesignCollectionWorker.jobs, :size).by(1)
+ end
+
+ it 'returns success' do
+ expect(subject).to be_kind_of(ServiceResponse)
+ expect(subject).to be_success
+ end
+ end
+end
diff --git a/spec/services/design_management/delete_designs_service_spec.rb b/spec/services/design_management/delete_designs_service_spec.rb
index ace63b6e59c..9b2866cab11 100644
--- a/spec/services/design_management/delete_designs_service_spec.rb
+++ b/spec/services/design_management/delete_designs_service_spec.rb
@@ -105,7 +105,7 @@ RSpec.describe DesignManagement::DeleteDesignsService do
end
it 'informs the new-version-worker' do
- expect(::DesignManagement::NewVersionWorker).to receive(:perform_async).with(Integer)
+ expect(::DesignManagement::NewVersionWorker).to receive(:perform_async).with(Integer, false)
run_service
end
diff --git a/spec/services/design_management/generate_image_versions_service_spec.rb b/spec/services/design_management/generate_image_versions_service_spec.rb
index 631eec97e5a..749030af97d 100644
--- a/spec/services/design_management/generate_image_versions_service_spec.rb
+++ b/spec/services/design_management/generate_image_versions_service_spec.rb
@@ -52,25 +52,50 @@ RSpec.describe DesignManagement::GenerateImageVersionsService do
end
context 'when an error is encountered when generating the image versions' do
- before do
- expect_next_instance_of(DesignManagement::DesignV432x230Uploader) do |uploader|
- expect(uploader).to receive(:cache!).and_raise(CarrierWave::DownloadError, 'foo')
+ context "CarrierWave::IntegrityError" do
+ before do
+ expect_next_instance_of(DesignManagement::DesignV432x230Uploader) do |uploader|
+ expect(uploader).to receive(:cache!).and_raise(CarrierWave::IntegrityError, 'foo')
+ end
+ end
+
+ it 'logs the exception' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ instance_of(CarrierWave::IntegrityError),
+ project_id: project.id, version_id: version.id, design_id: version.designs.first.id
+ )
+
+ described_class.new(version).execute
end
- end
- it 'logs the error' do
- expect(Gitlab::AppLogger).to receive(:error).with('foo')
+ it 'logs the error' do
+ expect(Gitlab::AppLogger).to receive(:error).with('foo')
- described_class.new(version).execute
+ described_class.new(version).execute
+ end
end
- it 'tracks the error' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
- instance_of(CarrierWave::DownloadError),
- project_id: project.id, version_id: version.id, design_id: version.designs.first.id
- )
+ context "CarrierWave::UploadError" do
+ before do
+ expect_next_instance_of(DesignManagement::DesignV432x230Uploader) do |uploader|
+ expect(uploader).to receive(:cache!).and_raise(CarrierWave::UploadError, 'foo')
+ end
+ end
- described_class.new(version).execute
+ it 'logs the error' do
+ expect(Gitlab::AppLogger).to receive(:error).with('foo')
+
+ described_class.new(version).execute
+ end
+
+ it 'tracks the error' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ instance_of(CarrierWave::UploadError),
+ project_id: project.id, version_id: version.id, design_id: version.designs.first.id
+ )
+
+ described_class.new(version).execute
+ end
end
end
end
diff --git a/spec/services/design_management/save_designs_service_spec.rb b/spec/services/design_management/save_designs_service_spec.rb
index abba5de2c27..ec241daf3cd 100644
--- a/spec/services/design_management/save_designs_service_spec.rb
+++ b/spec/services/design_management/save_designs_service_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe DesignManagement::SaveDesignsService do
end
allow(::DesignManagement::NewVersionWorker)
- .to receive(:perform_async).with(Integer).and_return(nil)
+ .to receive(:perform_async).with(Integer, false).and_return(nil)
end
def run_service(files_to_upload = nil)
@@ -128,6 +128,25 @@ RSpec.describe DesignManagement::SaveDesignsService do
expect { run_parallel(blocks) }.to change(DesignManagement::Version, :count).by(parellism)
end
+ context 'when the design collection is in the process of being copied', :clean_gitlab_redis_shared_state do
+ before do
+ issue.design_collection.start_copy!
+ end
+
+ it_behaves_like 'a service error'
+ end
+
+ context 'when the design collection has a copy error', :clean_gitlab_redis_shared_state do
+ before do
+ issue.design_collection.copy_state = 'error'
+ issue.design_collection.send(:set_stored_copy_state!)
+ end
+
+ it 'resets the copy state' do
+ expect { run_service }.to change { issue.design_collection.copy_state }.from('error').to('ready')
+ end
+ end
+
describe 'the response' do
it 'includes designs with the expected properties' do
updated_designs = response[:designs]
@@ -220,7 +239,7 @@ RSpec.describe DesignManagement::SaveDesignsService do
counter = Gitlab::UsageDataCounters::DesignsCounter
expect(::DesignManagement::NewVersionWorker)
- .to receive(:perform_async).once.with(Integer).and_return(nil)
+ .to receive(:perform_async).once.with(Integer, false).and_return(nil)
expect { run_service }
.to change { Event.count }.by(2)
@@ -254,7 +273,7 @@ RSpec.describe DesignManagement::SaveDesignsService do
design_repository.has_visible_content?
expect(::DesignManagement::NewVersionWorker)
- .to receive(:perform_async).once.with(Integer).and_return(nil)
+ .to receive(:perform_async).once.with(Integer, false).and_return(nil)
expect { service.execute }
.to change { issue.designs.count }.from(0).to(2)
@@ -271,6 +290,14 @@ RSpec.describe DesignManagement::SaveDesignsService do
expect(response[:message]).to match(/only \d+ files are allowed simultaneously/i)
end
end
+
+ context 'when uploading duplicate files' do
+ let(:files) { [rails_sample, dk_png, rails_sample] }
+
+ it 'returns the correct error' do
+ expect(response[:message]).to match('Duplicate filenames are not allowed!')
+ end
+ end
end
context 'when the user is not allowed to upload designs' do
diff --git a/spec/services/feature_flags/create_service_spec.rb b/spec/services/feature_flags/create_service_spec.rb
new file mode 100644
index 00000000000..e80a24f9760
--- /dev/null
+++ b/spec/services/feature_flags/create_service_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlags::CreateService do
+ let(:project) { create(:project) }
+ let(:developer) { create(:user) }
+ let(:reporter) { create(:user) }
+ let(:user) { developer }
+
+ before do
+ project.add_developer(developer)
+ project.add_reporter(reporter)
+ end
+
+ describe '#execute' do
+ subject do
+ described_class.new(project, user, params).execute
+ end
+
+ let(:feature_flag) { subject[:feature_flag] }
+
+ context 'when feature flag can not be created' do
+ let(:params) { {} }
+
+ it 'returns status error' do
+ expect(subject[:status]).to eq(:error)
+ end
+
+ it 'returns validation errors' do
+ expect(subject[:message]).to include("Name can't be blank")
+ end
+
+ it 'does not create audit log' do
+ expect { subject }.not_to change { AuditEvent.count }
+ end
+ end
+
+ context 'when feature flag is saved correctly' do
+ let(:params) do
+ {
+ name: 'feature_flag',
+ description: 'description',
+ scopes_attributes: [{ environment_scope: '*', active: true },
+ { environment_scope: 'production', active: false }]
+ }
+ end
+
+ it 'returns status success' do
+ expect(subject[:status]).to eq(:success)
+ end
+
+ it 'creates feature flag' do
+ expect { subject }.to change { Operations::FeatureFlag.count }.by(1)
+ end
+
+ it 'creates audit event' do
+ expected_message = 'Created feature flag <strong>feature_flag</strong> '\
+ 'with description <strong>"description"</strong>. '\
+ 'Created rule <strong>*</strong> and set it as <strong>active</strong> '\
+ 'with strategies <strong>[{"name"=>"default", "parameters"=>{}}]</strong>. '\
+ 'Created rule <strong>production</strong> and set it as <strong>inactive</strong> '\
+ 'with strategies <strong>[{"name"=>"default", "parameters"=>{}}]</strong>.'
+
+ expect { subject }.to change { AuditEvent.count }.by(1)
+ expect(AuditEvent.last.details[:custom_message]).to eq(expected_message)
+ end
+
+ context 'when user is reporter' do
+ let(:user) { reporter }
+
+ it 'returns error status' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq('Access Denied')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/feature_flags/destroy_service_spec.rb b/spec/services/feature_flags/destroy_service_spec.rb
new file mode 100644
index 00000000000..df83969e167
--- /dev/null
+++ b/spec/services/feature_flags/destroy_service_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlags::DestroyService do
+ include FeatureFlagHelpers
+
+ let(:project) { create(:project) }
+ let(:developer) { create(:user) }
+ let(:reporter) { create(:user) }
+ let(:user) { developer }
+ let!(:feature_flag) { create(:operations_feature_flag, project: project) }
+
+ before do
+ project.add_developer(developer)
+ project.add_reporter(reporter)
+ end
+
+ describe '#execute' do
+ subject { described_class.new(project, user, params).execute(feature_flag) }
+
+ let(:audit_event_message) { AuditEvent.last.details[:custom_message] }
+ let(:params) { {} }
+
+ it 'returns status success' do
+ expect(subject[:status]).to eq(:success)
+ end
+
+ it 'destroys feature flag' do
+ expect { subject }.to change { Operations::FeatureFlag.count }.by(-1)
+ end
+
+ it 'creates audit log' do
+ expect { subject }.to change { AuditEvent.count }.by(1)
+ expect(audit_event_message).to eq("Deleted feature flag <strong>#{feature_flag.name}</strong>.")
+ end
+
+ context 'when user is reporter' do
+ let(:user) { reporter }
+
+ it 'returns error status' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq('Access Denied')
+ end
+ end
+
+ context 'when feature flag can not be destroyed' do
+ before do
+ allow(feature_flag).to receive(:destroy).and_return(false)
+ end
+
+ it 'returns status error' do
+ expect(subject[:status]).to eq(:error)
+ end
+
+ it 'does not create audit log' do
+ expect { subject }.not_to change { AuditEvent.count }
+ end
+ end
+ end
+end
diff --git a/spec/services/feature_flags/disable_service_spec.rb b/spec/services/feature_flags/disable_service_spec.rb
new file mode 100644
index 00000000000..00369d63ccf
--- /dev/null
+++ b/spec/services/feature_flags/disable_service_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlags::DisableService do
+ include FeatureFlagHelpers
+
+ let_it_be(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:service) { described_class.new(project, user, params) }
+ let(:params) { {} }
+
+ before do
+ project.add_developer(user)
+ end
+
+ describe '#execute' do
+ subject { service.execute }
+
+ context 'with params to disable default strategy on prd scope' do
+ let(:params) do
+ {
+ name: 'awesome',
+ environment_scope: 'prd',
+ strategy: { name: 'userWithId', parameters: { 'userIds': 'User:1' } }.deep_stringify_keys
+ }
+ end
+
+ context 'when there is a persisted feature flag' do
+ let!(:feature_flag) { create_flag(project, params[:name]) }
+
+ context 'when there is a persisted scope' do
+ let!(:scope) do
+ create_scope(feature_flag, params[:environment_scope], true, strategies)
+ end
+
+ context 'when there is a persisted strategy' do
+ let(:strategies) do
+ [
+ { name: 'userWithId', parameters: { 'userIds': 'User:1' } }.deep_stringify_keys,
+ { name: 'userWithId', parameters: { 'userIds': 'User:2' } }.deep_stringify_keys
+ ]
+ end
+
+ it 'deletes the specified strategy' do
+ subject
+
+ scope.reload
+ expect(scope.strategies.count).to eq(1)
+ expect(scope.strategies).not_to include(params[:strategy])
+ end
+
+ context 'when strategies will be empty' do
+ let(:strategies) { [params[:strategy]] }
+
+ it 'deletes the persisted scope' do
+ subject
+
+ expect(feature_flag.scopes.exists?(environment_scope: params[:environment_scope]))
+ .to eq(false)
+ end
+ end
+ end
+
+ context 'when there is no persisted strategy' do
+ let(:strategies) { [{ name: 'default', parameters: {} }] }
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to include('Strategy not found')
+ end
+ end
+ end
+
+ context 'when there is no persisted scope' do
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to include('Feature Flag Scope not found')
+ end
+ end
+ end
+
+ context 'when there is no persisted feature flag' do
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to include('Feature Flag not found')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/feature_flags/enable_service_spec.rb b/spec/services/feature_flags/enable_service_spec.rb
new file mode 100644
index 00000000000..26dffcae0c7
--- /dev/null
+++ b/spec/services/feature_flags/enable_service_spec.rb
@@ -0,0 +1,153 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlags::EnableService do
+ include FeatureFlagHelpers
+
+ let_it_be(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:service) { described_class.new(project, user, params) }
+ let(:params) { {} }
+
+ before do
+ project.add_developer(user)
+ end
+
+ describe '#execute' do
+ subject { service.execute }
+
+ context 'with params to enable default strategy on prd scope' do
+ let(:params) do
+ {
+ name: 'awesome',
+ environment_scope: 'prd',
+ strategy: { name: 'default', parameters: {} }.stringify_keys
+ }
+ end
+
+ context 'when there is no persisted feature flag' do
+ it 'creates a new feature flag with scope' do
+ feature_flag = subject[:feature_flag]
+ scope = feature_flag.scopes.find_by_environment_scope(params[:environment_scope])
+ expect(subject[:status]).to eq(:success)
+ expect(feature_flag.name).to eq(params[:name])
+ expect(feature_flag.default_scope).not_to be_active
+ expect(scope).to be_active
+ expect(scope.strategies).to include(params[:strategy])
+ end
+
+ context 'when params include default scope' do
+ let(:params) do
+ {
+ name: 'awesome',
+ environment_scope: '*',
+ strategy: { name: 'userWithId', parameters: { 'userIds': 'abc' } }.deep_stringify_keys
+ }
+ end
+
+ it 'create a new feature flag with an active default scope with the specified strategy' do
+ feature_flag = subject[:feature_flag]
+ expect(subject[:status]).to eq(:success)
+ expect(feature_flag.default_scope).to be_active
+ expect(feature_flag.default_scope.strategies).to include(params[:strategy])
+ end
+ end
+ end
+
+ context 'when there is a persisted feature flag' do
+ let!(:feature_flag) { create_flag(project, params[:name]) }
+
+ context 'when there is no persisted scope' do
+ it 'creates a new scope for the persisted feature flag' do
+ feature_flag = subject[:feature_flag]
+ scope = feature_flag.scopes.find_by_environment_scope(params[:environment_scope])
+ expect(subject[:status]).to eq(:success)
+ expect(feature_flag.name).to eq(params[:name])
+ expect(scope).to be_active
+ expect(scope.strategies).to include(params[:strategy])
+ end
+ end
+
+ context 'when there is a persisted scope' do
+ let!(:feature_flag_scope) do
+ create_scope(feature_flag, params[:environment_scope], active, strategies)
+ end
+
+ let(:active) { true }
+
+ context 'when the persisted scope does not have the specified strategy yet' do
+ let(:strategies) { [{ name: 'userWithId', parameters: { 'userIds': 'abc' } }] }
+
+ it 'adds the specified strategy to the scope' do
+ subject
+
+ feature_flag_scope.reload
+ expect(feature_flag_scope.strategies).to include(params[:strategy])
+ end
+
+ context 'when the persisted scope is inactive' do
+ let(:active) { false }
+
+ it 'reactivates the scope' do
+ expect { subject }
+ .to change { feature_flag_scope.reload.active }.from(false).to(true)
+ end
+ end
+ end
+
+ context 'when the persisted scope has the specified strategy already' do
+ let(:strategies) { [params[:strategy]] }
+
+ it 'does not add a duplicated strategy to the scope' do
+ expect { subject }
+ .not_to change { feature_flag_scope.reload.strategies.count }
+ end
+ end
+ end
+ end
+ end
+
+ context 'when strategy is not specified in params' do
+ let(:params) do
+ {
+ name: 'awesome',
+ environment_scope: 'prd'
+ }
+ end
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to include('Scopes strategies must be an array of strategy hashes')
+ end
+ end
+
+ context 'when environment scope is not specified in params' do
+ let(:params) do
+ {
+ name: 'awesome',
+ strategy: { name: 'default', parameters: {} }.stringify_keys
+ }
+ end
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to include("Scopes environment scope can't be blank")
+ end
+ end
+
+ context 'when name is not specified in params' do
+ let(:params) do
+ {
+ environment_scope: 'prd',
+ strategy: { name: 'default', parameters: {} }.stringify_keys
+ }
+ end
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to include("Name can't be blank")
+ end
+ end
+ end
+end
diff --git a/spec/services/feature_flags/update_service_spec.rb b/spec/services/feature_flags/update_service_spec.rb
new file mode 100644
index 00000000000..16c3ff23443
--- /dev/null
+++ b/spec/services/feature_flags/update_service_spec.rb
@@ -0,0 +1,250 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FeatureFlags::UpdateService do
+ let(:project) { create(:project) }
+ let(:developer) { create(:user) }
+ let(:reporter) { create(:user) }
+ let(:user) { developer }
+ let(:feature_flag) { create(:operations_feature_flag, project: project, active: true) }
+
+ before do
+ project.add_developer(developer)
+ project.add_reporter(reporter)
+ end
+
+ describe '#execute' do
+ subject { described_class.new(project, user, params).execute(feature_flag) }
+
+ let(:params) { { name: 'new_name' } }
+ let(:audit_event_message) do
+ AuditEvent.last.details[:custom_message]
+ end
+
+ it 'returns success status' do
+ expect(subject[:status]).to eq(:success)
+ end
+
+ it 'creates audit event with correct message' do
+ name_was = feature_flag.name
+
+ expect { subject }.to change { AuditEvent.count }.by(1)
+ expect(audit_event_message).to(
+ eq("Updated feature flag <strong>new_name</strong>. "\
+ "Updated name from <strong>\"#{name_was}\"</strong> "\
+ "to <strong>\"new_name\"</strong>.")
+ )
+ end
+
+ context 'with invalid params' do
+ let(:params) { { name: nil } }
+
+ it 'returns error status' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:http_status]).to eq(:bad_request)
+ end
+
+ it 'returns error messages' do
+ expect(subject[:message]).to include("Name can't be blank")
+ end
+
+ it 'does not create audit event' do
+ expect { subject }.not_to change { AuditEvent.count }
+ end
+ end
+
+ context 'when user is reporter' do
+ let(:user) { reporter }
+
+ it 'returns error status' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq('Access Denied')
+ end
+ end
+
+ context 'when nothing is changed' do
+ let(:params) { {} }
+
+ it 'returns success status' do
+ expect(subject[:status]).to eq(:success)
+ end
+
+ it 'does not create audit event' do
+ expect { subject }.not_to change { AuditEvent.count }
+ end
+ end
+
+ context 'description is being changed' do
+ let(:params) { { description: 'new description' } }
+
+ it 'creates audit event with changed description' do
+ expect { subject }.to change { AuditEvent.count }.by(1)
+ expect(audit_event_message).to(
+ include("Updated description from <strong>\"\"</strong>"\
+ " to <strong>\"new description\"</strong>.")
+ )
+ end
+ end
+
+ context 'when flag active state is changed' do
+ let(:params) do
+ {
+ active: false
+ }
+ end
+
+ it 'creates audit event about changing active state' do
+ expect { subject }.to change { AuditEvent.count }.by(1)
+ expect(audit_event_message).to(
+ include('Updated active from <strong>"true"</strong> to <strong>"false"</strong>.')
+ )
+ end
+ end
+
+ context 'when scope active state is changed' do
+ let(:params) do
+ {
+ scopes_attributes: [{ id: feature_flag.scopes.first.id, active: false }]
+ }
+ end
+
+ it 'creates audit event about changing active state' do
+ expect { subject }.to change { AuditEvent.count }.by(1)
+ expect(audit_event_message).to(
+ include("Updated rule <strong>*</strong> active state "\
+ "from <strong>true</strong> to <strong>false</strong>.")
+ )
+ end
+ end
+
+ context 'when scope is renamed' do
+ let(:changed_scope) { feature_flag.scopes.create!(environment_scope: 'review', active: true) }
+ let(:params) do
+ {
+ scopes_attributes: [{ id: changed_scope.id, environment_scope: 'staging' }]
+ }
+ end
+
+ it 'creates audit event with changed name' do
+ expect { subject }.to change { AuditEvent.count }.by(1)
+ expect(audit_event_message).to(
+ include("Updated rule <strong>staging</strong> environment scope "\
+ "from <strong>review</strong> to <strong>staging</strong>.")
+ )
+ end
+
+ context 'when scope can not be updated' do
+ let(:params) do
+ {
+ scopes_attributes: [{ id: changed_scope.id, environment_scope: '' }]
+ }
+ end
+
+ it 'returns error status' do
+ expect(subject[:status]).to eq(:error)
+ end
+
+ it 'returns error messages' do
+ expect(subject[:message]).to include("Scopes environment scope can't be blank")
+ end
+
+ it 'does not create audit event' do
+ expect { subject }.not_to change { AuditEvent.count }
+ end
+ end
+ end
+
+ context 'when scope is deleted' do
+ let(:deleted_scope) { feature_flag.scopes.create!(environment_scope: 'review', active: true) }
+ let(:params) do
+ {
+ scopes_attributes: [{ id: deleted_scope.id, '_destroy': true }]
+ }
+ end
+
+ it 'creates audit event with deleted scope' do
+ expect { subject }.to change { AuditEvent.count }.by(1)
+ expect(audit_event_message).to include("Deleted rule <strong>review</strong>.")
+ end
+
+ context 'when scope can not be deleted' do
+ before do
+ allow(deleted_scope).to receive(:destroy).and_return(false)
+ end
+
+ it 'does not create audit event' do
+ expect do
+ subject
+ end.to not_change { AuditEvent.count }.and raise_error(ActiveRecord::RecordNotDestroyed)
+ end
+ end
+ end
+
+ context 'when new scope is being added' do
+ let(:new_environment_scope) { 'review' }
+ let(:params) do
+ {
+ scopes_attributes: [{ environment_scope: new_environment_scope, active: true }]
+ }
+ end
+
+ it 'creates audit event with new scope' do
+ expected = 'Created rule <strong>review</strong> and set it as <strong>active</strong> '\
+ 'with strategies <strong>[{"name"=>"default", "parameters"=>{}}]</strong>.'
+
+ subject
+
+ expect(audit_event_message).to include(expected)
+ end
+
+ context 'when scope can not be created' do
+ let(:new_environment_scope) { '' }
+
+ it 'returns error status' do
+ expect(subject[:status]).to eq(:error)
+ end
+
+ it 'returns error messages' do
+ expect(subject[:message]).to include("Scopes environment scope can't be blank")
+ end
+
+ it 'does not create audit event' do
+ expect { subject }.not_to change { AuditEvent.count }
+ end
+ end
+ end
+
+ context 'when the strategy is changed' do
+ let(:scope) do
+ create(:operations_feature_flag_scope,
+ feature_flag: feature_flag,
+ environment_scope: 'sandbox',
+ strategies: [{ name: "default", parameters: {} }])
+ end
+
+ let(:params) do
+ {
+ scopes_attributes: [{
+ id: scope.id,
+ environment_scope: 'sandbox',
+ strategies: [{
+ name: 'gradualRolloutUserId',
+ parameters: {
+ groupId: 'mygroup',
+ percentage: "40"
+ }
+ }]
+ }]
+ }
+ end
+
+ it 'creates an audit event' do
+ expected = %r{Updated rule <strong>sandbox</strong> strategies from <strong>.*</strong> to <strong>.*</strong>.}
+
+ expect { subject }.to change { AuditEvent.count }.by(1)
+ expect(audit_event_message).to match(expected)
+ end
+ end
+ end
+end
diff --git a/spec/services/git/branch_hooks_service_spec.rb b/spec/services/git/branch_hooks_service_spec.rb
index db25bb766c9..a5290f0be68 100644
--- a/spec/services/git/branch_hooks_service_spec.rb
+++ b/spec/services/git/branch_hooks_service_spec.rb
@@ -429,4 +429,26 @@ RSpec.describe Git::BranchHooksService do
end
end
end
+
+ describe 'Metrics dashboard sync' do
+ context 'with feature flag enabled' do
+ before do
+ Feature.enable(:metrics_dashboards_sync)
+ end
+
+ it 'imports metrics to database' do
+ expect(Metrics::Dashboard::SyncDashboardsWorker).to receive(:perform_async)
+
+ service.execute
+ end
+ end
+
+ context 'with feature flag disabled' do
+ it 'imports metrics to database' do
+ expect(Metrics::Dashboard::SyncDashboardsWorker).to receive(:perform_async)
+
+ service.execute
+ end
+ end
+ end
end
diff --git a/spec/services/git/wiki_push_service_spec.rb b/spec/services/git/wiki_push_service_spec.rb
index 816f20f0bc3..cd38f2e97fb 100644
--- a/spec/services/git/wiki_push_service_spec.rb
+++ b/spec/services/git/wiki_push_service_spec.rb
@@ -254,24 +254,6 @@ RSpec.describe Git::WikiPushService, services: true do
service.execute
end
end
-
- context 'the wiki_events_on_git_push feature is disabled' do
- before do
- stub_feature_flags(wiki_events_on_git_push: false)
- end
-
- it_behaves_like 'a no-op push'
-
- context 'but is enabled for a given container' do
- before do
- stub_feature_flags(wiki_events_on_git_push: wiki.container)
- end
-
- it 'creates events' do
- expect { process_changes { write_new_page } }.to change(Event, :count).by(1)
- end
- end
- end
end
end
diff --git a/spec/services/groups/create_service_spec.rb b/spec/services/groups/create_service_spec.rb
index fc877f45a39..d5479fa2a06 100644
--- a/spec/services/groups/create_service_spec.rb
+++ b/spec/services/groups/create_service_spec.rb
@@ -138,4 +138,91 @@ RSpec.describe Groups::CreateService, '#execute' do
expect(group.namespace_settings).to be_persisted
end
end
+
+ describe 'create service for the group' do
+ let(:service) { described_class.new(user, group_params) }
+ let(:created_group) { service.execute }
+
+ context 'with an active instance-level integration' do
+ let!(:instance_integration) { create(:prometheus_service, :instance, api_url: 'https://prometheus.instance.com/') }
+
+ it 'creates a service from the instance-level integration' do
+ expect(created_group.services.count).to eq(1)
+ expect(created_group.services.first.api_url).to eq(instance_integration.api_url)
+ expect(created_group.services.first.inherit_from_id).to eq(instance_integration.id)
+ end
+
+ context 'with an active group-level integration' do
+ let(:service) { described_class.new(user, group_params.merge(parent_id: group.id)) }
+ let!(:group_integration) { create(:prometheus_service, group: group, project: nil, api_url: 'https://prometheus.group.com/') }
+ let(:group) do
+ create(:group).tap do |group|
+ group.add_owner(user)
+ end
+ end
+
+ it 'creates a service from the group-level integration' do
+ expect(created_group.services.count).to eq(1)
+ expect(created_group.services.first.api_url).to eq(group_integration.api_url)
+ expect(created_group.services.first.inherit_from_id).to eq(group_integration.id)
+ end
+
+ context 'with an active subgroup' do
+ let(:service) { described_class.new(user, group_params.merge(parent_id: subgroup.id)) }
+ let!(:subgroup_integration) { create(:prometheus_service, group: subgroup, project: nil, api_url: 'https://prometheus.subgroup.com/') }
+ let(:subgroup) do
+ create(:group, parent: group).tap do |subgroup|
+ subgroup.add_owner(user)
+ end
+ end
+
+ it 'creates a service from the subgroup-level integration' do
+ expect(created_group.services.count).to eq(1)
+ expect(created_group.services.first.api_url).to eq(subgroup_integration.api_url)
+ expect(created_group.services.first.inherit_from_id).to eq(subgroup_integration.id)
+ end
+ end
+ end
+ end
+ end
+
+ context 'shared runners configuration' do
+ context 'parent group present' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:shared_runners_config, :descendants_override_disabled_shared_runners_config) do
+ true | false
+ false | false
+ # true | true # invalid at the group level, leaving as comment to make explicit
+ false | true
+ end
+
+ with_them do
+ let!(:group) { create(:group, shared_runners_enabled: shared_runners_config, allow_descendants_override_disabled_shared_runners: descendants_override_disabled_shared_runners_config) }
+ let!(:service) { described_class.new(user, group_params.merge(parent_id: group.id)) }
+
+ before do
+ group.add_owner(user)
+ end
+
+ it 'creates group following the parent config' do
+ new_group = service.execute
+
+ expect(new_group.shared_runners_enabled).to eq(shared_runners_config)
+ expect(new_group.allow_descendants_override_disabled_shared_runners).to eq(descendants_override_disabled_shared_runners_config)
+ end
+ end
+ end
+
+ context 'root group' do
+ let!(:service) { described_class.new(user) }
+
+ it 'follows default config' do
+ new_group = service.execute
+
+ expect(new_group.shared_runners_enabled).to eq(true)
+ expect(new_group.allow_descendants_override_disabled_shared_runners).to eq(false)
+ end
+ end
+ end
end
diff --git a/spec/services/groups/import_export/import_service_spec.rb b/spec/services/groups/import_export/import_service_spec.rb
index 4aac602a6da..f284225e23a 100644
--- a/spec/services/groups/import_export/import_service_spec.rb
+++ b/spec/services/groups/import_export/import_service_spec.rb
@@ -10,6 +10,15 @@ RSpec.describe Groups::ImportExport::ImportService do
context 'when the job can be successfully scheduled' do
subject(:import_service) { described_class.new(group: group, user: user) }
+ it 'creates group import state' do
+ import_service.async_execute
+
+ import_state = group.import_state
+
+ expect(import_state.user).to eq(user)
+ expect(import_state.group).to eq(group)
+ end
+
it 'enqueues an import job' do
expect(GroupImportWorker).to receive(:perform_async).with(user.id, group.id)
diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb
index 89e4d091ff7..6144b86a316 100644
--- a/spec/services/groups/transfer_service_spec.rb
+++ b/spec/services/groups/transfer_service_spec.rb
@@ -285,6 +285,44 @@ RSpec.describe Groups::TransferService do
end
end
+ context 'shared runners configuration' do
+ before do
+ create(:group_member, :owner, group: new_parent_group, user: user)
+ end
+
+ context 'if parent group has disabled shared runners but allows overrides' do
+ let(:new_parent_group) { create(:group, shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: true) }
+
+ it 'calls update service' do
+ expect(Groups::UpdateSharedRunnersService).to receive(:new).with(group, user, { shared_runners_setting: 'disabled_with_override' }).and_call_original
+
+ transfer_service.execute(new_parent_group)
+ end
+ end
+
+ context 'if parent group does not allow shared runners' do
+ let(:new_parent_group) { create(:group, shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: false) }
+
+ it 'calls update service' do
+ expect(Groups::UpdateSharedRunnersService).to receive(:new).with(group, user, { shared_runners_setting: 'disabled_and_unoverridable' }).and_call_original
+
+ transfer_service.execute(new_parent_group)
+ end
+ end
+
+ context 'if parent group allows shared runners' do
+ let(:group) { create(:group, :public, :nested, shared_runners_enabled: false) }
+ let(:new_parent_group) { create(:group, shared_runners_enabled: true) }
+
+ it 'does not call update service and keeps them disabled on the group' do
+ expect(Groups::UpdateSharedRunnersService).not_to receive(:new)
+
+ transfer_service.execute(new_parent_group)
+ expect(group.reload.shared_runners_enabled).to be_falsy
+ end
+ end
+ end
+
context 'when a group is transferred to its subgroup' do
let(:new_parent_group) { create(:group, parent: group) }
diff --git a/spec/services/groups/update_service_spec.rb b/spec/services/groups/update_service_spec.rb
index 1e6a8d53354..a79cda86a86 100644
--- a/spec/services/groups/update_service_spec.rb
+++ b/spec/services/groups/update_service_spec.rb
@@ -283,6 +283,31 @@ RSpec.describe Groups::UpdateService do
end
end
+ context 'change shared Runners config' do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, shared_runners_enabled: true, group: group) }
+
+ subject { described_class.new(group, user, shared_runners_setting: 'disabled_and_unoverridable').execute }
+
+ before do
+ group.add_owner(user)
+ end
+
+ it 'calls the shared runners update service' do
+ expect_any_instance_of(::Groups::UpdateSharedRunnersService).to receive(:execute).and_return({ status: :success })
+
+ expect(subject).to be_truthy
+ end
+
+ it 'handles errors in the shared runners update service' do
+ expect_any_instance_of(::Groups::UpdateSharedRunnersService).to receive(:execute).and_return({ status: :error, message: 'something happened' })
+
+ expect(subject).to be_falsy
+
+ expect(group.errors[:update_shared_runners].first).to eq('something happened')
+ end
+ end
+
def update_group(group, user, opts)
Groups::UpdateService.new(group, user, opts).execute
end
diff --git a/spec/services/groups/update_shared_runners_service_spec.rb b/spec/services/groups/update_shared_runners_service_spec.rb
index 9fd8477a455..e2838c4ce0b 100644
--- a/spec/services/groups/update_shared_runners_service_spec.rb
+++ b/spec/services/groups/update_shared_runners_service_spec.rb
@@ -13,17 +13,14 @@ RSpec.describe Groups::UpdateSharedRunnersService do
context 'when current_user is not the group owner' do
let_it_be(:group) { create(:group) }
- let(:params) { { shared_runners_enabled: '0' } }
+ let(:params) { { shared_runners_setting: 'enabled' } }
before do
group.add_maintainer(user)
end
it 'results error and does not call any method' do
- expect(group).not_to receive(:enable_shared_runners!)
- expect(group).not_to receive(:disable_shared_runners!)
- expect(group).not_to receive(:allow_descendants_override_disabled_shared_runners!)
- expect(group).not_to receive(:disallow_descendants_override_disabled_shared_runners!)
+ expect(group).not_to receive(:update_shared_runners_setting!)
expect(subject[:status]).to eq(:error)
expect(subject[:message]).to eq('Operation not allowed')
@@ -37,191 +34,60 @@ RSpec.describe Groups::UpdateSharedRunnersService do
end
context 'enable shared Runners' do
- where(:desired_params) do
- ['1', true]
- end
-
- with_them do
- let(:params) { { shared_runners_enabled: desired_params } }
-
- context 'group that its ancestors have shared runners disabled' do
- let_it_be(:parent) { create(:group, :shared_runners_disabled) }
- let_it_be(:group) { create(:group, :shared_runners_disabled, parent: parent) }
-
- it 'results error' do
- expect(subject[:status]).to eq(:error)
- expect(subject[:message]).to eq('Shared Runners disabled for the parent group')
- end
- end
+ let(:params) { { shared_runners_setting: 'enabled' } }
- context 'root group with shared runners disabled' do
- let_it_be(:group) { create(:group, :shared_runners_disabled) }
+ context 'group that its ancestors have shared runners disabled' do
+ let_it_be(:parent) { create(:group, :shared_runners_disabled) }
+ let_it_be(:group) { create(:group, :shared_runners_disabled, parent: parent) }
- it 'receives correct method and succeeds' do
- expect(group).to receive(:enable_shared_runners!)
- expect(group).not_to receive(:disable_shared_runners!)
- expect(group).not_to receive(:allow_descendants_override_disabled_shared_runners!)
- expect(group).not_to receive(:disallow_descendants_override_disabled_shared_runners!)
-
- expect(subject[:status]).to eq(:success)
- end
+ it 'results error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq('Validation failed: Shared runners enabled cannot be enabled because parent group has shared Runners disabled')
end
end
- end
-
- context 'disable shared Runners' do
- let_it_be(:group) { create(:group) }
-
- where(:desired_params) do
- ['0', false]
- end
- with_them do
- let(:params) { { shared_runners_enabled: desired_params } }
+ context 'root group with shared runners disabled' do
+ let_it_be(:group) { create(:group, :shared_runners_disabled) }
it 'receives correct method and succeeds' do
- expect(group).to receive(:disable_shared_runners!)
- expect(group).not_to receive(:enable_shared_runners!)
- expect(group).not_to receive(:allow_descendants_override_disabled_shared_runners!)
- expect(group).not_to receive(:disallow_descendants_override_disabled_shared_runners!)
+ expect(group).to receive(:update_shared_runners_setting!).with('enabled')
expect(subject[:status]).to eq(:success)
end
end
end
- context 'allow descendants to override' do
- where(:desired_params) do
- ['1', true]
- end
-
- with_them do
- let(:params) { { allow_descendants_override_disabled_shared_runners: desired_params } }
-
- context 'top level group' do
- let_it_be(:group) { create(:group, :shared_runners_disabled) }
-
- it 'receives correct method and succeeds' do
- expect(group).to receive(:allow_descendants_override_disabled_shared_runners!)
- expect(group).not_to receive(:disallow_descendants_override_disabled_shared_runners!)
- expect(group).not_to receive(:enable_shared_runners!)
- expect(group).not_to receive(:disable_shared_runners!)
-
- expect(subject[:status]).to eq(:success)
- end
- end
+ context 'disable shared Runners' do
+ let_it_be(:group) { create(:group) }
+ let(:params) { { shared_runners_setting: 'disabled_and_unoverridable' } }
- context 'when parent does not allow' do
- let_it_be(:parent) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false ) }
- let_it_be(:group) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false, parent: parent) }
+ it 'receives correct method and succeeds' do
+ expect(group).to receive(:update_shared_runners_setting!).with('disabled_and_unoverridable')
- it 'results error' do
- expect(subject[:status]).to eq(:error)
- expect(subject[:message]).to eq('Group level shared Runners not allowed')
- end
- end
+ expect(subject[:status]).to eq(:success)
end
end
- context 'disallow descendants to override' do
- where(:desired_params) do
- ['0', false]
- end
-
- with_them do
- let(:params) { { allow_descendants_override_disabled_shared_runners: desired_params } }
-
- context 'top level group' do
- let_it_be(:group) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners ) }
-
- it 'receives correct method and succeeds' do
- expect(group).to receive(:disallow_descendants_override_disabled_shared_runners!)
- expect(group).not_to receive(:allow_descendants_override_disabled_shared_runners!)
- expect(group).not_to receive(:enable_shared_runners!)
- expect(group).not_to receive(:disable_shared_runners!)
-
- expect(subject[:status]).to eq(:success)
- end
- end
-
- context 'top level group that has shared Runners enabled' do
- let_it_be(:group) { create(:group, shared_runners_enabled: true) }
-
- it 'results error' do
- expect(subject[:status]).to eq(:error)
- expect(subject[:message]).to eq('Shared Runners enabled')
- end
- end
- end
- end
+ context 'allow descendants to override' do
+ let(:params) { { shared_runners_setting: 'disabled_with_override' } }
- context 'both params are present' do
- context 'shared_runners_enabled: 1 and allow_descendants_override_disabled_shared_runners' do
+ context 'top level group' do
let_it_be(:group) { create(:group, :shared_runners_disabled) }
- let_it_be(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
- let_it_be(:project) { create(:project, shared_runners_enabled: false, group: sub_group) }
- where(:allow_descendants_override) do
- ['1', true, '0', false]
- end
+ it 'receives correct method and succeeds' do
+ expect(group).to receive(:update_shared_runners_setting!).with('disabled_with_override')
- with_them do
- let(:params) { { shared_runners_enabled: '1', allow_descendants_override_disabled_shared_runners: allow_descendants_override } }
-
- it 'results in an error because shared Runners are enabled' do
- expect { subject }
- .to not_change { group.reload.shared_runners_enabled }
- .and not_change { sub_group.reload.shared_runners_enabled }
- .and not_change { project.reload.shared_runners_enabled }
- .and not_change { group.reload.allow_descendants_override_disabled_shared_runners }
- .and not_change { sub_group.reload.allow_descendants_override_disabled_shared_runners }
- expect(subject[:status]).to eq(:error)
- expect(subject[:message]).to eq('Cannot set shared_runners_enabled to true and allow_descendants_override_disabled_shared_runners')
- end
+ expect(subject[:status]).to eq(:success)
end
end
- context 'shared_runners_enabled: 0 and allow_descendants_override_disabled_shared_runners: 0' do
- let_it_be(:group) { create(:group, :allow_descendants_override_disabled_shared_runners) }
- let_it_be(:sub_group) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners, parent: group) }
- let_it_be(:sub_group_2) { create(:group, parent: group) }
- let_it_be(:project) { create(:project, group: group, shared_runners_enabled: true) }
- let_it_be(:project_2) { create(:project, group: sub_group_2, shared_runners_enabled: true) }
-
- let(:params) { { shared_runners_enabled: '0', allow_descendants_override_disabled_shared_runners: '0' } }
-
- it 'disables shared Runners and disable allow_descendants_override_disabled_shared_runners' do
- expect { subject }
- .to change { group.reload.shared_runners_enabled }.from(true).to(false)
- .and change { group.reload.allow_descendants_override_disabled_shared_runners }.from(true).to(false)
- .and not_change { sub_group.reload.shared_runners_enabled }
- .and change { sub_group.reload.allow_descendants_override_disabled_shared_runners }.from(true).to(false)
- .and change { sub_group_2.reload.shared_runners_enabled }.from(true).to(false)
- .and not_change { sub_group_2.reload.allow_descendants_override_disabled_shared_runners }
- .and change { project.reload.shared_runners_enabled }.from(true).to(false)
- .and change { project_2.reload.shared_runners_enabled }.from(true).to(false)
- end
- end
+ context 'when parent does not allow' do
+ let_it_be(:parent) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false ) }
+ let_it_be(:group) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false, parent: parent) }
- context 'shared_runners_enabled: 0 and allow_descendants_override_disabled_shared_runners: 1' do
- let_it_be(:group) { create(:group) }
- let_it_be(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
- let_it_be(:sub_group_2) { create(:group, parent: group) }
- let_it_be(:project) { create(:project, group: group, shared_runners_enabled: true) }
- let_it_be(:project_2) { create(:project, group: sub_group_2, shared_runners_enabled: true) }
-
- let(:params) { { shared_runners_enabled: '0', allow_descendants_override_disabled_shared_runners: '1' } }
-
- it 'disables shared Runners and enable allow_descendants_override_disabled_shared_runners only for itself' do
- expect { subject }
- .to change { group.reload.shared_runners_enabled }.from(true).to(false)
- .and change { group.reload.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
- .and not_change { sub_group.reload.shared_runners_enabled }
- .and not_change { sub_group.reload.allow_descendants_override_disabled_shared_runners }
- .and change { sub_group_2.reload.shared_runners_enabled }.from(true).to(false)
- .and not_change { sub_group_2.reload.allow_descendants_override_disabled_shared_runners }
- .and change { project.reload.shared_runners_enabled }.from(true).to(false)
- .and change { project_2.reload.shared_runners_enabled }.from(true).to(false)
+ it 'results error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq('Validation failed: Allow descendants override disabled shared runners cannot be enabled because parent group does not allow it')
end
end
end
diff --git a/spec/services/incident_management/incidents/update_severity_service_spec.rb b/spec/services/incident_management/incidents/update_severity_service_spec.rb
new file mode 100644
index 00000000000..bc1abf82cf2
--- /dev/null
+++ b/spec/services/incident_management/incidents/update_severity_service_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IncidentManagement::Incidents::UpdateSeverityService do
+ let_it_be(:user) { create(:user) }
+
+ describe '#execute' do
+ let(:severity) { 'low' }
+ let(:system_note_worker) { ::IncidentManagement::AddSeveritySystemNoteWorker }
+
+ subject(:update_severity) { described_class.new(issuable, user, severity).execute }
+
+ before do
+ allow(system_note_worker).to receive(:perform_async)
+ end
+
+ shared_examples 'adds a system note' do
+ it 'calls AddSeveritySystemNoteWorker' do
+ update_severity
+
+ expect(system_note_worker).to have_received(:perform_async).with(issuable.id, user.id)
+ end
+ end
+
+ context 'when issuable not an incident' do
+ %i(issue merge_request).each do |issuable_type|
+ let(:issuable) { build_stubbed(issuable_type) }
+
+ it { is_expected.to be_nil }
+
+ it 'does not set severity' do
+ expect { update_severity }.not_to change(IssuableSeverity, :count)
+ end
+
+ it 'does not add a system note' do
+ update_severity
+
+ expect(system_note_worker).not_to have_received(:perform_async)
+ end
+ end
+ end
+
+ context 'when issuable is an incident' do
+ let!(:issuable) { create(:incident) }
+
+ context 'when issuable does not have issuable severity yet' do
+ it 'creates new record' do
+ expect { update_severity }.to change { IssuableSeverity.where(issue: issuable).count }.to(1)
+ end
+
+ it 'sets severity to specified value' do
+ expect { update_severity }.to change { issuable.severity }.to('low')
+ end
+
+ it_behaves_like 'adds a system note'
+ end
+
+ context 'when issuable has an issuable severity' do
+ let!(:issuable_severity) { create(:issuable_severity, issue: issuable, severity: 'medium') }
+
+ it 'does not create new record' do
+ expect { update_severity }.not_to change(IssuableSeverity, :count)
+ end
+
+ it 'updates existing issuable severity' do
+ expect { update_severity }.to change { issuable_severity.severity }.to(severity)
+ end
+
+ it_behaves_like 'adds a system note'
+ end
+
+ context 'when severity value is unsupported' do
+ let(:severity) { 'unsupported-severity' }
+
+ it 'sets the severity to default value' do
+ update_severity
+
+ expect(issuable.issuable_severity.severity).to eq(IssuableSeverity::DEFAULT)
+ end
+
+ it_behaves_like 'adds a system note'
+ end
+ end
+ end
+end
diff --git a/spec/services/issuable/bulk_update_service_spec.rb b/spec/services/issuable/bulk_update_service_spec.rb
index 168a80a97c0..f2bc4f717af 100644
--- a/spec/services/issuable/bulk_update_service_spec.rb
+++ b/spec/services/issuable/bulk_update_service_spec.rb
@@ -254,7 +254,7 @@ RSpec.describe Issuable::BulkUpdateService do
describe 'unsubscribe from issues' do
let(:issues) do
create_list(:closed_issue, 2, project: project) do |issue|
- issue.subscriptions.create(user: user, project: project, subscribed: true)
+ issue.subscriptions.create!(user: user, project: project, subscribed: true)
end
end
diff --git a/spec/services/issuable/clone/attributes_rewriter_spec.rb b/spec/services/issuable/clone/attributes_rewriter_spec.rb
index 372e6d480e3..7f434b8b246 100644
--- a/spec/services/issuable/clone/attributes_rewriter_spec.rb
+++ b/spec/services/issuable/clone/attributes_rewriter_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Issuable::Clone::AttributesRewriter do
group_label = create(:group_label, title: 'group_label', group: group)
create(:label, title: 'label3', project: project2)
- original_issue.update(labels: [project1_label_1, project1_label_2, group_label])
+ original_issue.update!(labels: [project1_label_1, project1_label_2, group_label])
subject.execute
@@ -48,7 +48,7 @@ RSpec.describe Issuable::Clone::AttributesRewriter do
it 'sets milestone to nil when old issue milestone is not in the new project' do
milestone = create(:milestone, title: 'milestone', project: project1)
- original_issue.update(milestone: milestone)
+ original_issue.update!(milestone: milestone)
subject.execute
@@ -59,7 +59,7 @@ RSpec.describe Issuable::Clone::AttributesRewriter do
milestone_project1 = create(:milestone, title: 'milestone', project: project1)
milestone_project2 = create(:milestone, title: 'milestone', project: project2)
- original_issue.update(milestone: milestone_project1)
+ original_issue.update!(milestone: milestone_project1)
subject.execute
@@ -69,7 +69,7 @@ RSpec.describe Issuable::Clone::AttributesRewriter do
it 'copies the milestone when old issue milestone is a group milestone' do
milestone = create(:milestone, title: 'milestone', group: group)
- original_issue.update(milestone: milestone)
+ original_issue.update!(milestone: milestone)
subject.execute
@@ -85,7 +85,7 @@ RSpec.describe Issuable::Clone::AttributesRewriter do
let!(:milestone2_project2) { create(:milestone, title: 'milestone2', project: project2) }
before do
- original_issue.update(milestone: milestone2_project1)
+ original_issue.update!(milestone: milestone2_project1)
create_event(milestone1_project1)
create_event(milestone2_project1)
diff --git a/spec/services/issuable/common_system_notes_service_spec.rb b/spec/services/issuable/common_system_notes_service_spec.rb
index 217550542bb..fc01ee8f672 100644
--- a/spec/services/issuable/common_system_notes_service_spec.rb
+++ b/spec/services/issuable/common_system_notes_service_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Issuable::CommonSystemNotesService do
before do
issuable.labels << label
- issuable.save
+ issuable.save!
end
it 'creates a resource label event' do
@@ -69,7 +69,7 @@ RSpec.describe Issuable::CommonSystemNotesService do
subject { described_class.new(project, user).execute(issuable, old_labels: [], is_update: false) }
it 'does not create system note for title and description' do
- issuable.save
+ issuable.save!
expect { subject }.not_to change { issuable.notes.count }
end
@@ -78,7 +78,7 @@ RSpec.describe Issuable::CommonSystemNotesService do
label = create(:label, project: project)
issuable.labels << label
- issuable.save
+ issuable.save!
expect { subject }.to change { issuable.resource_label_events.count }.from(0).to(1)
@@ -104,7 +104,7 @@ RSpec.describe Issuable::CommonSystemNotesService do
it 'creates a system note for due_date set' do
issuable.due_date = Date.today
- issuable.save
+ issuable.save!
expect { subject }.to change { issuable.notes.count }.from(0).to(1)
expect(issuable.notes.last.note).to match('changed due date')
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index 4db6e5cac12..9076fb11c9b 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -233,26 +233,11 @@ RSpec.describe Issues::CloseService do
expect(email.subject).to include(issue.title)
end
- context 'when resource state events are disabled' do
- before do
- stub_feature_flags(track_resource_state_change_events: false)
- end
-
- it 'creates system note about the issue being closed' do
- close_issue
-
- note = issue.notes.last
- expect(note.note).to include "closed"
- end
- end
-
- context 'when resource state events are enabled' do
- it 'creates resource state event about the issue being closed' do
- close_issue
+ it 'creates resource state event about the issue being closed' do
+ close_issue
- event = issue.resource_state_events.last
- expect(event.state).to eq('closed')
- end
+ event = issue.resource_state_events.last
+ expect(event.state).to eq('closed')
end
it 'marks todos as done' do
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index c2989dc86cf..7997b8de3fd 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Issues::MoveService do
+ include DesignManagementTestHelpers
+
let_it_be(:user) { create(:user) }
let_it_be(:author) { create(:user) }
let_it_be(:title) { 'Some issue' }
@@ -201,6 +203,54 @@ RSpec.describe Issues::MoveService do
expect(copied_notes.order('id ASC').pluck(:note)).to eq(notes.map(&:note))
end
end
+
+ context 'issue with a design', :clean_gitlab_redis_shared_state do
+ let!(:design) { create(:design, :with_lfs_file, issue: old_issue) }
+ let!(:note) { create(:diff_note_on_design, noteable: design, issue: old_issue, project: old_issue.project) }
+ let(:subject) { move_service.execute(old_issue, new_project) }
+
+ before do
+ enable_design_management
+ end
+
+ it 'calls CopyDesignCollection::QueueService' do
+ expect(DesignManagement::CopyDesignCollection::QueueService).to receive(:new)
+ .with(user, old_issue, kind_of(Issue))
+ .and_call_original
+
+ subject
+ end
+
+ it 'logs if QueueService returns an error', :aggregate_failures do
+ error_message = 'error'
+
+ expect_next_instance_of(DesignManagement::CopyDesignCollection::QueueService) do |service|
+ expect(service).to receive(:execute).and_return(
+ ServiceResponse.error(message: error_message)
+ )
+ end
+ expect(Gitlab::AppLogger).to receive(:error).with(error_message)
+
+ subject
+ end
+
+ it 'does not call QueueService when the feature flag is disabled' do
+ stub_feature_flags(design_management_copy_designs: false)
+
+ expect(DesignManagement::CopyDesignCollection::QueueService).not_to receive(:new)
+
+ subject
+ end
+
+ # Perform a small integration test to ensure the services and worker
+ # can correctly create designs.
+ it 'copies the design and its notes', :sidekiq_inline, :aggregate_failures do
+ new_issue = subject
+
+ expect(new_issue.designs.size).to eq(1)
+ expect(new_issue.designs.first.notes.size).to eq(1)
+ end
+ end
end
describe 'move permissions' do
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index b3e8fba4e9a..cfda27795c7 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -650,7 +650,7 @@ RSpec.describe Issues::UpdateService, :mailer do
context 'when the labels change' do
before do
- Timecop.freeze(1.minute.from_now) do
+ travel_to(1.minute.from_now) do
update_issue(label_ids: [label.id])
end
end
diff --git a/spec/services/keys/last_used_service_spec.rb b/spec/services/keys/last_used_service_spec.rb
index 82b6b05975b..a2cd5ffdd38 100644
--- a/spec/services/keys/last_used_service_spec.rb
+++ b/spec/services/keys/last_used_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Keys::LastUsedService do
key = create(:key, last_used_at: 1.year.ago)
time = Time.zone.now
- Timecop.freeze(time) { described_class.new(key).execute }
+ travel_to(time) { described_class.new(key).execute }
expect(key.reload.last_used_at).to be_like_time(time)
end
diff --git a/spec/services/lfs/push_service_spec.rb b/spec/services/lfs/push_service_spec.rb
index 8e5b98fdc9c..f67284ff48d 100644
--- a/spec/services/lfs/push_service_spec.rb
+++ b/spec/services/lfs/push_service_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Lfs::PushService do
stub_lfs_batch(lfs_object)
expect(lfs_client)
- .to receive(:upload)
+ .to receive(:upload!)
.with(lfs_object, upload_action_spec(lfs_object), authenticated: true)
expect(service.execute).to eq(status: :success)
@@ -28,7 +28,7 @@ RSpec.describe Lfs::PushService do
it 'does nothing if there are no LFS objects' do
lfs_object.destroy!
- expect(lfs_client).not_to receive(:upload)
+ expect(lfs_client).not_to receive(:upload!)
expect(service.execute).to eq(status: :success)
end
@@ -36,20 +36,39 @@ RSpec.describe Lfs::PushService do
it 'does not upload the object when upload is not requested' do
stub_lfs_batch(lfs_object, upload: false)
- expect(lfs_client).not_to receive(:upload)
+ expect(lfs_client).not_to receive(:upload!)
expect(service.execute).to eq(status: :success)
end
+ it 'verifies the upload if requested' do
+ stub_lfs_batch(lfs_object, verify: true)
+
+ expect(lfs_client).to receive(:upload!)
+ expect(lfs_client)
+ .to receive(:verify!)
+ .with(lfs_object, verify_action_spec(lfs_object), authenticated: true)
+
+ expect(service.execute).to eq(status: :success)
+ end
+
+ it 'skips verification if requested but upload fails' do
+ stub_lfs_batch(lfs_object, verify: true)
+
+ expect(lfs_client).to receive(:upload!) { raise 'failed' }
+ expect(lfs_client).not_to receive(:verify!)
+ expect(service.execute).to eq(status: :error, message: 'failed')
+ end
+
it 'returns a failure when submitting a batch fails' do
- expect(lfs_client).to receive(:batch) { raise 'failed' }
+ expect(lfs_client).to receive(:batch!) { raise 'failed' }
expect(service.execute).to eq(status: :error, message: 'failed')
end
it 'returns a failure when submitting an upload fails' do
stub_lfs_batch(lfs_object)
- expect(lfs_client).to receive(:upload) { raise 'failed' }
+ expect(lfs_client).to receive(:upload!) { raise 'failed' }
expect(service.execute).to eq(status: :error, message: 'failed')
end
@@ -71,23 +90,28 @@ RSpec.describe Lfs::PushService do
create(:lfs_objects_project, project: project, repository_type: type).lfs_object
end
- def stub_lfs_batch(*objects, upload: true)
+ def stub_lfs_batch(*objects, upload: true, verify: false)
expect(lfs_client)
- .to receive(:batch).with('upload', containing_exactly(*objects))
- .and_return('transfer' => 'basic', 'objects' => objects.map { |o| object_spec(o, upload: upload) })
+ .to receive(:batch!).with('upload', containing_exactly(*objects))
+ .and_return('transfer' => 'basic', 'objects' => objects.map { |o| object_spec(o, upload: upload, verify: verify) })
end
- def batch_spec(*objects, upload: true)
+ def batch_spec(*objects, upload: true, verify: false)
{ 'transfer' => 'basic', 'objects' => objects.map {|o| object_spec(o, upload: upload) } }
end
- def object_spec(object, upload: true)
- { 'oid' => object.oid, 'size' => object.size, 'authenticated' => true }.tap do |spec|
- spec['actions'] = { 'upload' => upload_action_spec(object) } if upload
+ def object_spec(object, upload: true, verify: false)
+ { 'oid' => object.oid, 'size' => object.size, 'authenticated' => true, 'actions' => {} }.tap do |spec|
+ spec['actions']['upload'] = upload_action_spec(object) if upload
+ spec['actions']['verify'] = verify_action_spec(object) if verify
end
end
def upload_action_spec(object)
{ 'href' => "https://example.com/#{object.oid}/#{object.size}", 'header' => { 'Key' => 'value' } }
end
+
+ def verify_action_spec(object)
+ { 'href' => "https://example.com/#{object.oid}/#{object.size}/verify", 'header' => { 'Key' => 'value' } }
+ end
end
diff --git a/spec/services/members/destroy_service_spec.rb b/spec/services/members/destroy_service_spec.rb
index 3b3f2f3b95a..4f731ad5852 100644
--- a/spec/services/members/destroy_service_spec.rb
+++ b/spec/services/members/destroy_service_spec.rb
@@ -29,15 +29,15 @@ RSpec.describe Members::DestroyService do
end
it 'destroys the member' do
- expect { described_class.new(current_user).execute(member, opts) }.to change { member.source.members_and_requesters.count }.by(-1)
+ expect { described_class.new(current_user).execute(member, **opts) }.to change { member.source.members_and_requesters.count }.by(-1)
end
it 'destroys member notification_settings' do
if member_user.notification_settings.any?
- expect { described_class.new(current_user).execute(member, opts) }
+ expect { described_class.new(current_user).execute(member, **opts) }
.to change { member_user.notification_settings.count }.by(-1)
else
- expect { described_class.new(current_user).execute(member, opts) }
+ expect { described_class.new(current_user).execute(member, **opts) }
.not_to change { member_user.notification_settings.count }
end
end
@@ -63,7 +63,7 @@ RSpec.describe Members::DestroyService do
expect(service).to receive(:enqueue_unassign_issuables).with(member)
end
- service.execute(member, opts)
+ service.execute(member, **opts)
expect(member_user.assigned_open_merge_requests_count).to be(0)
expect(member_user.assigned_open_issues_count).to be(0)
@@ -83,14 +83,14 @@ RSpec.describe Members::DestroyService do
it 'calls Member#after_decline_request' do
expect_any_instance_of(NotificationService).to receive(:decline_access_request).with(member)
- described_class.new(current_user).execute(member, opts)
+ described_class.new(current_user).execute(member, **opts)
end
context 'when current user is the member' do
it 'does not call Member#after_decline_request' do
expect_any_instance_of(NotificationService).not_to receive(:decline_access_request).with(member)
- described_class.new(member_user).execute(member, opts)
+ described_class.new(member_user).execute(member, **opts)
end
end
end
@@ -280,7 +280,6 @@ RSpec.describe Members::DestroyService do
context 'subresources' do
let(:user) { create(:user) }
let(:member_user) { create(:user) }
- let(:opts) { {} }
let(:group) { create(:group, :public) }
let(:subgroup) { create(:group, parent: group) }
@@ -303,7 +302,7 @@ RSpec.describe Members::DestroyService do
group_member = create(:group_member, :developer, group: group, user: member_user)
- described_class.new(user).execute(group_member, opts)
+ described_class.new(user).execute(group_member)
end
it 'removes the project membership' do
@@ -350,7 +349,6 @@ RSpec.describe Members::DestroyService do
context 'deletion of invitations created by deleted project member' do
let(:user) { project.owner }
let(:member_user) { create(:user) }
- let(:opts) { {} }
let(:project) { create(:project) }
@@ -359,7 +357,7 @@ RSpec.describe Members::DestroyService do
project_member = create(:project_member, :maintainer, user: member_user, project: project)
- described_class.new(user).execute(project_member, opts)
+ described_class.new(user).execute(project_member)
end
it 'removes project members invited by deleted user' do
diff --git a/spec/services/members/invitation_reminder_email_service_spec.rb b/spec/services/members/invitation_reminder_email_service_spec.rb
new file mode 100644
index 00000000000..88280869476
--- /dev/null
+++ b/spec/services/members/invitation_reminder_email_service_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Members::InvitationReminderEmailService do
+ describe 'sending invitation reminders' do
+ subject { described_class.new(invitation).execute }
+
+ let_it_be(:frozen_time) { Date.today.beginning_of_day }
+ let_it_be(:invitation) { build(:group_member, :invited, created_at: frozen_time) }
+
+ context 'when the experiment is disabled' do
+ before do
+ allow(Gitlab::Experimentation).to receive(:enabled_for_attribute?).and_return(false)
+ invitation.expires_at = frozen_time + 2.days
+ end
+
+ it 'does not send an invitation' do
+ travel_to(frozen_time + 1.day) do
+ expect(invitation).not_to receive(:send_invitation_reminder)
+
+ subject
+ end
+ end
+ end
+
+ context 'when the experiment is enabled' do
+ before do
+ allow(Gitlab::Experimentation).to receive(:enabled_for_attribute?).and_return(true)
+ invitation.expires_at = frozen_time + expires_at_days.days if expires_at_days
+ end
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:expires_at_days, :send_reminder_at_days) do
+ 0 | []
+ 1 | []
+ 2 | [1]
+ 3 | [1, 2]
+ 4 | [1, 2, 3]
+ 5 | [1, 2, 4]
+ 6 | [1, 3, 5]
+ 7 | [1, 3, 5]
+ 8 | [2, 3, 6]
+ 9 | [2, 4, 7]
+ 10 | [2, 4, 8]
+ 11 | [2, 4, 8]
+ 12 | [2, 5, 9]
+ 13 | [2, 5, 10]
+ 14 | [2, 5, 10]
+ 15 | [2, 5, 10]
+ nil | [2, 5, 10]
+ end
+
+ with_them do
+ # Create an invitation today with an expiration date from 0 to 10 days in the future or without an expiration date
+ # We chose 10 days here, because we fetch invitations that were created at most 10 days ago.
+ (0..10).each do |day|
+ it 'sends an invitation reminder only on the expected days' do
+ next if day > (expires_at_days || 10) # We don't need to test after the invitation has already expired
+
+ # We are traveling in a loop from today to 10 days from now
+ travel_to(frozen_time + day.days) do
+ # Given an expiration date and the number of days after the creation of the invitation based on the current day in the loop, a reminder may be sent
+ if (reminder_index = send_reminder_at_days.index(day))
+ expect(invitation).to receive(:send_invitation_reminder).with(reminder_index)
+ else
+ expect(invitation).not_to receive(:send_invitation_reminder)
+ end
+
+ subject
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/close_service_spec.rb b/spec/services/merge_requests/close_service_spec.rb
index e7ac286f48b..67fb4eaade5 100644
--- a/spec/services/merge_requests/close_service_spec.rb
+++ b/spec/services/merge_requests/close_service_spec.rb
@@ -19,54 +19,45 @@ RSpec.describe MergeRequests::CloseService do
describe '#execute' do
it_behaves_like 'cache counters invalidator'
- [true, false].each do |state_tracking_enabled|
- context "valid params with state_tracking #{state_tracking_enabled ? 'enabled' : 'disabled'}" do
- let(:service) { described_class.new(project, user, {}) }
+ context 'valid params' do
+ let(:service) { described_class.new(project, user, {}) }
- before do
- stub_feature_flags(track_resource_state_change_events: state_tracking_enabled)
-
- allow(service).to receive(:execute_hooks)
+ before do
+ allow(service).to receive(:execute_hooks)
- perform_enqueued_jobs do
- @merge_request = service.execute(merge_request)
- end
+ perform_enqueued_jobs do
+ @merge_request = service.execute(merge_request)
end
+ end
- it { expect(@merge_request).to be_valid }
- it { expect(@merge_request).to be_closed }
+ it { expect(@merge_request).to be_valid }
+ it { expect(@merge_request).to be_closed }
- it 'executes hooks with close action' do
- expect(service).to have_received(:execute_hooks)
- .with(@merge_request, 'close')
- end
+ it 'executes hooks with close action' do
+ expect(service).to have_received(:execute_hooks)
+ .with(@merge_request, 'close')
+ end
- it 'sends email to user2 about assign of new merge_request', :sidekiq_might_not_need_inline do
- email = ActionMailer::Base.deliveries.last
- expect(email.to.first).to eq(user2.email)
- expect(email.subject).to include(merge_request.title)
- end
+ it 'sends email to user2 about assign of new merge_request', :sidekiq_might_not_need_inline do
+ email = ActionMailer::Base.deliveries.last
+ expect(email.to.first).to eq(user2.email)
+ expect(email.subject).to include(merge_request.title)
+ end
- it 'creates system note about merge_request reassign' do
- if state_tracking_enabled
- event = @merge_request.resource_state_events.last
- expect(event.state).to eq('closed')
- else
- note = @merge_request.notes.last
- expect(note.note).to include 'closed'
- end
- end
+ it 'creates a resource event' do
+ event = @merge_request.resource_state_events.last
+ expect(event.state).to eq('closed')
+ end
- it 'marks todos as done' do
- expect(todo.reload).to be_done
- end
+ it 'marks todos as done' do
+ expect(todo.reload).to be_done
+ end
- context 'when auto merge is enabled' do
- let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
+ context 'when auto merge is enabled' do
+ let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
- it 'cancels the auto merge' do
- expect(@merge_request).not_to be_auto_merge_enabled
- end
+ it 'cancels the auto merge' do
+ expect(@merge_request).not_to be_auto_merge_enabled
end
end
end
diff --git a/spec/services/merge_requests/export_csv_service_spec.rb b/spec/services/merge_requests/export_csv_service_spec.rb
new file mode 100644
index 00000000000..8161a444231
--- /dev/null
+++ b/spec/services/merge_requests/export_csv_service_spec.rb
@@ -0,0 +1,115 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::ExportCsvService do
+ let_it_be(:merge_request) { create(:merge_request) }
+ let(:csv) { CSV.parse(subject.csv_data, headers: true).first }
+
+ subject { described_class.new(MergeRequest.where(id: merge_request.id)) }
+
+ describe 'csv_data' do
+ it 'contains the correct information', :aggregate_failures do
+ expect(csv['MR IID']).to eq(merge_request.iid.to_s)
+ expect(csv['Title']).to eq(merge_request.title)
+ expect(csv['State']).to eq(merge_request.state)
+ expect(csv['Description']).to eq(merge_request.description)
+ expect(csv['Source Branch']).to eq(merge_request.source_branch)
+ expect(csv['Target Branch']).to eq(merge_request.target_branch)
+ expect(csv['Source Project ID']).to eq(merge_request.source_project_id.to_s)
+ expect(csv['Target Project ID']).to eq(merge_request.target_project_id.to_s)
+ expect(csv['Author']).to eq(merge_request.author.name)
+ expect(csv['Author Username']).to eq(merge_request.author.username)
+ end
+
+ describe 'assignees' do
+ context 'when assigned' do
+ let_it_be(:merge_request) { create(:merge_request, assignees: create_list(:user, 2)) }
+
+ it 'contains the names of assignees' do
+ expect(csv['Assignees']).to eq(merge_request.assignees.map(&:name).join(', '))
+ end
+
+ it 'contains the usernames of assignees' do
+ expect(csv['Assignee Usernames']).to eq(merge_request.assignees.map(&:username).join(', '))
+ end
+ end
+
+ context 'when not assigned' do
+ it 'returns empty strings' do
+ expect(csv['Assignees']).to eq('')
+ expect(csv['Assignee Usernames']).to eq('')
+ end
+ end
+ end
+
+ describe 'approvers' do
+ context 'when approved' do
+ let_it_be(:merge_request) { create(:merge_request) }
+ let(:approvers) { create_list(:user, 2) }
+
+ before do
+ merge_request.approved_by_users = approvers
+ end
+
+ it 'contains the names of approvers separated by a comma' do
+ expect(csv['Approvers'].split(', ')).to contain_exactly(approvers[0].name, approvers[1].name)
+ end
+
+ it 'contains the usernames of approvers separated by a comma' do
+ expect(csv['Approver Usernames'].split(', ')).to contain_exactly(approvers[0].username, approvers[1].username)
+ end
+ end
+
+ context 'when not approved' do
+ it 'returns empty strings' do
+ expect(csv['Approvers']).to eq('')
+ expect(csv['Approver Usernames']).to eq('')
+ end
+ end
+ end
+
+ describe 'merged user' do
+ context 'MR is merged' do
+ let_it_be(:merge_request) { create(:merge_request, :merged, :with_merged_metrics) }
+
+ it 'is merged' do
+ expect(csv['State']).to eq('merged')
+ end
+
+ it 'has a merged user' do
+ expect(csv['Merged User']).to eq(merge_request.metrics.merged_by.name)
+ expect(csv['Merged Username']).to eq(merge_request.metrics.merged_by.username)
+ end
+ end
+
+ context 'MR is not merged' do
+ it 'returns empty strings' do
+ expect(csv['Merged User']).to eq('')
+ expect(csv['Merged Username']).to eq('')
+ end
+ end
+ end
+
+ describe 'milestone' do
+ context 'milestone is assigned' do
+ let_it_be(:merge_request) { create(:merge_request) }
+ let_it_be(:milestone) { create(:milestone, :active, project: merge_request.project) }
+
+ before do
+ merge_request.update!(milestone_id: milestone.id)
+ end
+
+ it 'contains the milestone ID' do
+ expect(csv['Milestone ID']).to eq(merge_request.milestone.id.to_s)
+ end
+ end
+
+ context 'no milestone is assigned' do
+ it 'returns an empty string' do
+ expect(csv['Milestone ID']).to eq('')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/ff_merge_service_spec.rb b/spec/services/merge_requests/ff_merge_service_spec.rb
index 55856deeaca..64c473d947f 100644
--- a/spec/services/merge_requests/ff_merge_service_spec.rb
+++ b/spec/services/merge_requests/ff_merge_service_spec.rb
@@ -22,74 +22,72 @@ RSpec.describe MergeRequests::FfMergeService do
end
describe '#execute' do
- [true, false].each do |state_tracking_enabled|
- context "valid params with state_tracking #{state_tracking_enabled ? 'enabled' : 'disabled'}" do
- let(:service) { described_class.new(project, user, valid_merge_params) }
-
- def execute_ff_merge
- perform_enqueued_jobs do
- service.execute(merge_request)
- end
+ context 'valid params' do
+ let(:service) { described_class.new(project, user, valid_merge_params) }
+
+ def execute_ff_merge
+ perform_enqueued_jobs do
+ service.execute(merge_request)
end
+ end
- before do
- stub_feature_flags(track_resource_state_change_events: state_tracking_enabled)
+ before do
+ allow(service).to receive(:execute_hooks)
+ end
- allow(service).to receive(:execute_hooks)
- end
+ it "does not create merge commit" do
+ execute_ff_merge
- it "does not create merge commit" do
- execute_ff_merge
+ source_branch_sha = merge_request.source_project.repository.commit(merge_request.source_branch).sha
+ target_branch_sha = merge_request.target_project.repository.commit(merge_request.target_branch).sha
- source_branch_sha = merge_request.source_project.repository.commit(merge_request.source_branch).sha
- target_branch_sha = merge_request.target_project.repository.commit(merge_request.target_branch).sha
+ expect(source_branch_sha).to eq(target_branch_sha)
+ end
- expect(source_branch_sha).to eq(target_branch_sha)
- end
+ it 'keeps the merge request valid' do
+ expect { execute_ff_merge }
+ .not_to change { merge_request.valid? }
+ end
- it 'keeps the merge request valid' do
- expect { execute_ff_merge }
- .not_to change { merge_request.valid? }
- end
+ it 'updates the merge request to merged' do
+ expect { execute_ff_merge }
+ .to change { merge_request.merged? }
+ .from(false)
+ .to(true)
+ end
- it 'updates the merge request to merged' do
- expect { execute_ff_merge }
- .to change { merge_request.merged? }
- .from(false)
- .to(true)
- end
+ it 'sends email to user2 about merge of new merge_request' do
+ execute_ff_merge
- it 'sends email to user2 about merge of new merge_request' do
- execute_ff_merge
+ email = ActionMailer::Base.deliveries.last
+ expect(email.to.first).to eq(user2.email)
+ expect(email.subject).to include(merge_request.title)
+ end
- email = ActionMailer::Base.deliveries.last
- expect(email.to.first).to eq(user2.email)
- expect(email.subject).to include(merge_request.title)
- end
+ it 'creates resource event about merge_request merge' do
+ execute_ff_merge
- it 'creates system note about merge_request merge' do
- execute_ff_merge
+ event = merge_request.resource_state_events.last
+ expect(event.state).to eq('merged')
+ end
- if state_tracking_enabled
- event = merge_request.resource_state_events.last
- expect(event.state).to eq('merged')
- else
- note = merge_request.notes.last
- expect(note.note).to include 'merged'
- end
- end
+ it 'does not update squash_commit_sha if it is not a squash' do
+ expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
- it 'does not update squash_commit_sha if it is not a squash' do
- expect { execute_ff_merge }.not_to change { merge_request.squash_commit_sha }
- end
+ expect { execute_ff_merge }.not_to change { merge_request.squash_commit_sha }
+ expect(merge_request.in_progress_merge_commit_sha).to be_nil
+ end
- it 'updates squash_commit_sha if it is a squash' do
- merge_request.update!(squash: true)
+ it 'updates squash_commit_sha if it is a squash' do
+ expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
- expect { execute_ff_merge }
- .to change { merge_request.squash_commit_sha }
- .from(nil)
- end
+ merge_request.update!(squash: true)
+
+ expect { execute_ff_merge }
+ .to change { merge_request.squash_commit_sha }
+ .from(nil)
+
+ expect(merge_request.in_progress_merge_commit_sha).to be_nil
end
end
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index 8328f461029..d0e3102f157 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -20,12 +20,9 @@ RSpec.describe MergeRequests::MergeService do
end
context 'valid params' do
- let(:state_tracking) { true }
-
before do
- stub_feature_flags(track_resource_state_change_events: state_tracking)
-
allow(service).to receive(:execute_hooks)
+ expect(merge_request).to receive(:update_and_mark_in_progress_merge_commit_sha).twice.and_call_original
perform_enqueued_jobs do
service.execute(merge_request)
@@ -47,20 +44,9 @@ RSpec.describe MergeRequests::MergeService do
end
context 'note creation' do
- context 'when resource state event tracking is disabled' do
- let(:state_tracking) { false }
-
- it 'creates system note about merge_request merge' do
- note = merge_request.notes.last
- expect(note.note).to include 'merged'
- end
- end
-
- context 'when resource state event tracking is enabled' do
- it 'creates resource state event about merge_request merge' do
- event = merge_request.resource_state_events.last
- expect(event.state).to eq('merged')
- end
+ it 'creates resource state event about merge_request merge' do
+ event = merge_request.resource_state_events.last
+ expect(event.state).to eq('merged')
end
end
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index cace1e0bf09..ca0c4b29ebe 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -367,76 +367,58 @@ RSpec.describe MergeRequests::RefreshService do
end
end
- [true, false].each do |state_tracking_enabled|
- context "push to origin repo target branch with state tracking #{state_tracking_enabled ? 'enabled' : 'disabled'}", :sidekiq_might_not_need_inline do
+ context 'push to origin repo target branch', :sidekiq_might_not_need_inline do
+ context 'when all MRs to the target branch had diffs' do
before do
- stub_feature_flags(track_resource_state_change_events: state_tracking_enabled)
+ service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/feature')
+ reload_mrs
end
- context 'when all MRs to the target branch had diffs' do
- before do
- service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/feature')
- reload_mrs
- end
+ it 'updates the merge state' do
+ expect(@merge_request).to be_merged
+ expect(@fork_merge_request).to be_merged
+ expect(@build_failed_todo).to be_done
+ expect(@fork_build_failed_todo).to be_done
- it 'updates the merge state' do
- expect(@merge_request).to be_merged
- expect(@fork_merge_request).to be_merged
- expect(@build_failed_todo).to be_done
- expect(@fork_build_failed_todo).to be_done
-
- if state_tracking_enabled
- expect(@merge_request.resource_state_events.last.state).to eq('merged')
- expect(@fork_merge_request.resource_state_events.last.state).to eq('merged')
- else
- expect(@merge_request.notes.last.note).to include('merged')
- expect(@fork_merge_request.notes.last.note).to include('merged')
- end
- end
+ expect(@merge_request.resource_state_events.last.state).to eq('merged')
+ expect(@fork_merge_request.resource_state_events.last.state).to eq('merged')
end
+ end
- context 'when an MR to be closed was empty already' do
- let!(:empty_fork_merge_request) do
- create(:merge_request,
- source_project: @fork_project,
- source_branch: 'master',
- target_branch: 'master',
- target_project: @project)
- end
+ context 'when an MR to be closed was empty already' do
+ let!(:empty_fork_merge_request) do
+ create(:merge_request,
+ source_project: @fork_project,
+ source_branch: 'master',
+ target_branch: 'master',
+ target_project: @project)
+ end
- before do
- # This spec already has a fake push, so pretend that we were targeting
- # feature all along.
- empty_fork_merge_request.update_columns(target_branch: 'feature')
+ before do
+ # This spec already has a fake push, so pretend that we were targeting
+ # feature all along.
+ empty_fork_merge_request.update_columns(target_branch: 'feature')
- service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/feature')
- reload_mrs
- empty_fork_merge_request.reload
- end
+ service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/feature')
+ reload_mrs
+ empty_fork_merge_request.reload
+ end
- it 'only updates the non-empty MRs' do
- expect(@merge_request).to be_merged
- expect(@fork_merge_request).to be_merged
-
- expect(empty_fork_merge_request).to be_open
- expect(empty_fork_merge_request.merge_request_diff.state).to eq('empty')
- expect(empty_fork_merge_request.notes).to be_empty
-
- if state_tracking_enabled
- expect(@merge_request.resource_state_events.last.state).to eq('merged')
- expect(@fork_merge_request.resource_state_events.last.state).to eq('merged')
- else
- expect(@merge_request.notes.last.note).to include('merged')
- expect(@fork_merge_request.notes.last.note).to include('merged')
- end
- end
+ it 'only updates the non-empty MRs' do
+ expect(@merge_request).to be_merged
+ expect(@fork_merge_request).to be_merged
+
+ expect(empty_fork_merge_request).to be_open
+ expect(empty_fork_merge_request.merge_request_diff.state).to eq('empty')
+ expect(empty_fork_merge_request.notes).to be_empty
+
+ expect(@merge_request.resource_state_events.last.state).to eq('merged')
+ expect(@fork_merge_request.resource_state_events.last.state).to eq('merged')
end
end
- context "manual merge of source branch #{state_tracking_enabled ? 'enabled' : 'disabled'}", :sidekiq_might_not_need_inline do
+ context 'manual merge of source branch', :sidekiq_might_not_need_inline do
before do
- stub_feature_flags(track_resource_state_change_events: state_tracking_enabled)
-
# Merge master -> feature branch
@project.repository.merge(@user, @merge_request.diff_head_sha, @merge_request, 'Test message')
commit = @project.repository.commit('feature')
@@ -445,13 +427,8 @@ RSpec.describe MergeRequests::RefreshService do
end
it 'updates the merge state' do
- if state_tracking_enabled
- expect(@merge_request.resource_state_events.last.state).to eq('merged')
- expect(@fork_merge_request.resource_state_events.last.state).to eq('merged')
- else
- expect(@merge_request.notes.last.note).to include('merged')
- expect(@fork_merge_request.notes.last.note).to include('merged')
- end
+ expect(@merge_request.resource_state_events.last.state).to eq('merged')
+ expect(@fork_merge_request.resource_state_events.last.state).to eq('merged')
expect(@merge_request).to be_merged
expect(@merge_request.diffs.size).to be > 0
@@ -616,29 +593,21 @@ RSpec.describe MergeRequests::RefreshService do
end
end
- [true, false].each do |state_tracking_enabled|
- context "push to origin repo target branch after fork project was removed #{state_tracking_enabled ? 'enabled' : 'disabled'}" do
- before do
- stub_feature_flags(track_resource_state_change_events: state_tracking_enabled)
+ context 'push to origin repo target branch after fork project was removed' do
+ before do
+ @fork_project.destroy!
+ service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/feature')
+ reload_mrs
+ end
- @fork_project.destroy!
- service.new(@project, @user).execute(@oldrev, @newrev, 'refs/heads/feature')
- reload_mrs
- end
+ it 'updates the merge request state' do
+ expect(@merge_request.resource_state_events.last.state).to eq('merged')
- it 'updates the merge request state' do
- if state_tracking_enabled
- expect(@merge_request.resource_state_events.last.state).to eq('merged')
- else
- expect(@merge_request.notes.last.note).to include('merged')
- end
-
- expect(@merge_request).to be_merged
- expect(@fork_merge_request).to be_open
- expect(@fork_merge_request.notes).to be_empty
- expect(@build_failed_todo).to be_done
- expect(@fork_build_failed_todo).to be_done
- end
+ expect(@merge_request).to be_merged
+ expect(@fork_merge_request).to be_open
+ expect(@fork_merge_request.notes).to be_empty
+ expect(@build_failed_todo).to be_done
+ expect(@fork_build_failed_todo).to be_done
end
end
diff --git a/spec/services/merge_requests/reopen_service_spec.rb b/spec/services/merge_requests/reopen_service_spec.rb
index 0066834180e..ffc2ebb344c 100644
--- a/spec/services/merge_requests/reopen_service_spec.rb
+++ b/spec/services/merge_requests/reopen_service_spec.rb
@@ -20,11 +20,8 @@ RSpec.describe MergeRequests::ReopenService do
context 'valid params' do
let(:service) { described_class.new(project, user, {}) }
- let(:state_tracking) { true }
before do
- stub_feature_flags(track_resource_state_change_events: state_tracking)
-
allow(service).to receive(:execute_hooks)
perform_enqueued_jobs do
@@ -47,20 +44,9 @@ RSpec.describe MergeRequests::ReopenService do
end
context 'note creation' do
- context 'when state event tracking is disabled' do
- let(:state_tracking) { false }
-
- it 'creates system note about merge_request reopen' do
- note = merge_request.notes.last
- expect(note.note).to include 'reopened'
- end
- end
-
- context 'when state event tracking is enabled' do
- it 'creates resource state event about merge_request reopen' do
- event = merge_request.resource_state_events.last
- expect(event.state).to eq('reopened')
- end
+ it 'creates resource state event about merge_request reopen' do
+ event = merge_request.resource_state_events.last
+ expect(event.state).to eq('reopened')
end
end
end
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index 3c3e10495d3..ed8872b71f7 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
title: 'New title',
description: 'Also please fix',
assignee_ids: [user.id],
- reviewer_ids: [user.id],
+ reviewer_ids: [],
state_event: 'close',
label_ids: [label.id],
target_branch: 'target',
@@ -78,7 +78,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
expect(@merge_request).to be_valid
expect(@merge_request.title).to eq('New title')
expect(@merge_request.assignees).to match_array([user])
- expect(@merge_request.reviewers).to match_array([user])
+ expect(@merge_request.reviewers).to match_array([])
expect(@merge_request).to be_closed
expect(@merge_request.labels.count).to eq(1)
expect(@merge_request.labels.first.title).to eq(label.name)
@@ -116,6 +116,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
labels: [],
mentioned_users: [user2],
assignees: [user3],
+ reviewers: [],
milestone: nil,
total_time_spent: 0,
description: "FYI #{user2.to_reference}"
@@ -138,6 +139,35 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
expect(note.note).to include "assigned to #{user.to_reference} and unassigned #{user3.to_reference}"
end
+ context 'with reviewers' do
+ let(:opts) { { reviewer_ids: [user2.id] } }
+
+ context 'when merge_request_reviewers feature is disabled' do
+ before(:context) do
+ stub_feature_flags(merge_request_reviewers: false)
+ end
+
+ it 'does not create a system note about merge_request review request' do
+ note = find_note('review requested from')
+
+ expect(note).to be_nil
+ end
+ end
+
+ context 'when merge_request_reviewers feature is enabled' do
+ before(:context) do
+ stub_feature_flags(merge_request_reviewers: true)
+ end
+
+ it 'creates system note about merge_request review request' do
+ note = find_note('requested review from')
+
+ expect(note).not_to be_nil
+ expect(note.note).to include "requested review from #{user2.to_reference}"
+ end
+ end
+ end
+
it 'creates a resource label event' do
event = merge_request.resource_label_events.last
@@ -467,15 +497,15 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
end
context 'when reviewers gets changed' do
- before do
+ it 'marks pending todo as done' do
update_merge_request({ reviewer_ids: [user2.id] })
- end
- it 'marks pending todo as done' do
expect(pending_todo.reload).to be_done
end
it 'creates a pending todo for new review request' do
+ update_merge_request({ reviewer_ids: [user2.id] })
+
attributes = {
project: project,
author: user,
@@ -488,6 +518,17 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
expect(Todo.where(attributes).count).to eq 1
end
+
+ it 'sends email reviewer change notifications to old and new reviewers', :sidekiq_might_not_need_inline do
+ merge_request.reviewers = [user2]
+
+ perform_enqueued_jobs do
+ update_merge_request({ reviewer_ids: [user3.id] })
+ end
+
+ should_email(user2)
+ should_email(user3)
+ end
end
context 'when the milestone is removed' do
@@ -542,7 +583,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
context 'when the labels change' do
before do
- Timecop.freeze(1.minute.from_now) do
+ travel_to(1.minute.from_now) do
update_merge_request({ label_ids: [label.id] })
end
end
diff --git a/spec/services/metrics/dashboard/custom_dashboard_service_spec.rb b/spec/services/metrics/dashboard/custom_dashboard_service_spec.rb
index aea9c25d104..5dc30c156ac 100644
--- a/spec/services/metrics/dashboard/custom_dashboard_service_spec.rb
+++ b/spec/services/metrics/dashboard/custom_dashboard_service_spec.rb
@@ -67,6 +67,23 @@ RSpec.describe Metrics::Dashboard::CustomDashboardService, :use_clean_rails_memo
.at_least(:once)
end
+ context 'with metric in database' do
+ let!(:prometheus_metric) do
+ create(:prometheus_metric, project: project, identifier: 'metric_a1', group: 'custom')
+ end
+
+ it 'includes metric_id' do
+ dashboard = described_class.new(*service_params).get_dashboard
+
+ metric_id = dashboard[:dashboard][:panel_groups].find { |panel_group| panel_group[:group] == 'Group A' }
+ .fetch(:panels).find { |panel| panel[:title] == 'Super Chart A1' }
+ .fetch(:metrics).find { |metric| metric[:id] == 'metric_a1' }
+ .fetch(:metric_id)
+
+ expect(metric_id).to eq(prometheus_metric.id)
+ end
+ end
+
context 'and the dashboard is then deleted' do
it 'does not return the previously cached dashboard' do
described_class.new(*service_params).get_dashboard
diff --git a/spec/services/milestones/destroy_service_spec.rb b/spec/services/milestones/destroy_service_spec.rb
index 66c5c504c64..dd68471d927 100644
--- a/spec/services/milestones/destroy_service_spec.rb
+++ b/spec/services/milestones/destroy_service_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe Milestones::DestroyService do
let(:group_milestone) { create(:milestone, group: group) }
before do
- project.update(namespace: group)
+ project.update!(namespace: group)
group.add_developer(user)
end
diff --git a/spec/services/milestones/promote_service_spec.rb b/spec/services/milestones/promote_service_spec.rb
index f0a34241c74..8f4201d8d94 100644
--- a/spec/services/milestones/promote_service_spec.rb
+++ b/spec/services/milestones/promote_service_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe Milestones::PromoteService do
end
it 'raises error if project does not belong to a group' do
- project.update(namespace: user.namespace)
+ project.update!(namespace: user.namespace)
expect { service.execute(milestone) }.to raise_error(described_class::PromoteMilestoneError)
end
diff --git a/spec/services/milestones/transfer_service_spec.rb b/spec/services/milestones/transfer_service_spec.rb
index 4a626fe688a..6f4f55b2bd0 100644
--- a/spec/services/milestones/transfer_service_spec.rb
+++ b/spec/services/milestones/transfer_service_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe Milestones::TransferService do
new_group.add_maintainer(user)
project.add_maintainer(user)
# simulate project transfer
- project.update(group: new_group)
+ project.update!(group: new_group)
end
context 'without existing milestone at the new group level' do
diff --git a/spec/services/namespace_settings/update_service_spec.rb b/spec/services/namespace_settings/update_service_spec.rb
new file mode 100644
index 00000000000..2319bdcd4de
--- /dev/null
+++ b/spec/services/namespace_settings/update_service_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe NamespaceSettings::UpdateService do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:settings) { {} }
+
+ subject(:service) { described_class.new(user, group, settings) }
+
+ describe "#execute" do
+ context "group has no namespace_settings" do
+ it "builds out a new namespace_settings record" do
+ expect do
+ service.execute
+ end.to change { NamespaceSetting.count }.by(1)
+ end
+ end
+
+ context "group has a namespace_settings" do
+ before do
+ create(:namespace_settings, namespace: group)
+
+ service.execute
+ end
+
+ it "doesn't create a new namespace_setting record" do
+ expect do
+ service.execute
+ end.not_to change { NamespaceSetting.count }
+ end
+ end
+ end
+end
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index 7c0d4b756bd..4da9f4115a1 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -437,7 +437,7 @@ RSpec.describe Notes::CreateService do
expect do
existing_note
- Timecop.freeze(Time.current + 1.minute) { subject }
+ travel_to(Time.current + 1.minute) { subject }
existing_note.reload
end.to change { existing_note.type }.from(nil).to('DiscussionNote')
diff --git a/spec/services/notes/update_service_spec.rb b/spec/services/notes/update_service_spec.rb
index 47b8ba0cd72..66efdf8abe7 100644
--- a/spec/services/notes/update_service_spec.rb
+++ b/spec/services/notes/update_service_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe Notes::UpdateService do
end
it 'does not update the note when params is blank' do
- Timecop.freeze(1.day.from_now) do
+ travel_to(1.day.from_now) do
expect { update_note({}) }.not_to change { note.reload.updated_at }
end
end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 03e24524f9f..473a06c4c8c 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -150,6 +150,16 @@ RSpec.describe NotificationService, :mailer do
end
end
+ shared_examples 'participating by reviewer notification' do
+ it 'emails the participant' do
+ issuable.reviewers << participant
+
+ notification_trigger
+
+ should_email(participant)
+ end
+ end
+
shared_examples_for 'participating notifications' do
it_behaves_like 'participating by note notification'
it_behaves_like 'participating by author notification'
@@ -1778,6 +1788,60 @@ RSpec.describe NotificationService, :mailer do
end
end
+ describe '#changed_reviewer_of_merge_request' do
+ let(:merge_request) { create(:merge_request, author: author, source_project: project, reviewers: [reviewer], description: 'cc @participant') }
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:reviewer) { create(:user) }
+
+ before do
+ update_custom_notification(:change_reviewer_merge_request, @u_guest_custom, resource: project)
+ update_custom_notification(:change_reviewer_merge_request, @u_custom_global)
+ end
+
+ it 'sends emails to relevant users only', :aggregate_failures do
+ notification.changed_reviewer_of_merge_request(merge_request, current_user, [reviewer])
+
+ merge_request.reviewers.each { |reviewer| should_email(reviewer) }
+ should_email(merge_request.author)
+ should_email(@u_watcher)
+ should_email(@u_participant_mentioned)
+ should_email(@subscriber)
+ should_email(@watcher_and_subscriber)
+ should_email(@u_guest_watcher)
+ should_email(@u_guest_custom)
+ should_email(@u_custom_global)
+ should_not_email(@unsubscriber)
+ should_not_email(@u_participating)
+ should_not_email(@u_disabled)
+ should_not_email(@u_lazy_participant)
+ end
+
+ it 'adds "review requested" reason for new reviewer' do
+ notification.changed_reviewer_of_merge_request(merge_request, current_user, [reviewer])
+
+ merge_request.reviewers.each do |assignee|
+ email = find_email_for(assignee)
+
+ expect(email).to have_header('X-GitLab-NotificationReason', NotificationReason::REVIEW_REQUESTED)
+ end
+ end
+
+ context 'participating notifications with reviewers' do
+ let(:participant) { create(:user, username: 'user-participant') }
+ let(:issuable) { merge_request }
+ let(:notification_trigger) { notification.changed_reviewer_of_merge_request(merge_request, current_user, [reviewer]) }
+
+ it_behaves_like 'participating notifications'
+ it_behaves_like 'participating by reviewer notification'
+ end
+
+ it_behaves_like 'project emails are disabled' do
+ let(:notification_target) { merge_request }
+ let(:notification_trigger) { notification.changed_reviewer_of_merge_request(merge_request, current_user, [reviewer]) }
+ end
+ end
+
describe '#push_to_merge_request' do
before do
update_custom_notification(:push_to_merge_request, @u_guest_custom, resource: project)
@@ -3018,32 +3082,25 @@ RSpec.describe NotificationService, :mailer do
describe '#prometheus_alerts_fired' do
let!(:project) { create(:project) }
- let!(:prometheus_alert) { create(:prometheus_alert, project: project) }
let!(:master) { create(:user) }
let!(:developer) { create(:user) }
+ let(:alert_attributes) { build(:alert_management_alert, project: project).attributes }
before do
project.add_maintainer(master)
end
it 'sends the email to owners and masters' do
- expect(Notify).to receive(:prometheus_alert_fired_email).with(project.id, master.id, prometheus_alert).and_call_original
- expect(Notify).to receive(:prometheus_alert_fired_email).with(project.id, project.owner.id, prometheus_alert).and_call_original
- expect(Notify).not_to receive(:prometheus_alert_fired_email).with(project.id, developer.id, prometheus_alert)
+ expect(Notify).to receive(:prometheus_alert_fired_email).with(project.id, master.id, alert_attributes).and_call_original
+ expect(Notify).to receive(:prometheus_alert_fired_email).with(project.id, project.owner.id, alert_attributes).and_call_original
+ expect(Notify).not_to receive(:prometheus_alert_fired_email).with(project.id, developer.id, alert_attributes)
- subject.prometheus_alerts_fired(prometheus_alert.project, [prometheus_alert])
+ subject.prometheus_alerts_fired(project, [alert_attributes])
end
it_behaves_like 'project emails are disabled' do
- before do
- allow_next_instance_of(::Gitlab::Alerting::Alert) do |instance|
- allow(instance).to receive(:valid?).and_return(true)
- end
- end
-
- let(:alert_params) { { 'labels' => { 'gitlab_alert_id' => 'unknown' } } }
- let(:notification_target) { prometheus_alert.project }
- let(:notification_trigger) { subject.prometheus_alerts_fired(prometheus_alert.project, [alert_params]) }
+ let(:notification_target) { project }
+ let(:notification_trigger) { subject.prometheus_alerts_fired(project, [alert_attributes]) }
around do |example|
perform_enqueued_jobs { example.run }
diff --git a/spec/services/packages/create_event_service_spec.rb b/spec/services/packages/create_event_service_spec.rb
new file mode 100644
index 00000000000..7e66b430a8c
--- /dev/null
+++ b/spec/services/packages/create_event_service_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::CreateEventService do
+ let(:scope) { 'container' }
+ let(:event_name) { 'push_package' }
+
+ let(:params) do
+ {
+ scope: scope,
+ event_name: event_name
+ }
+ end
+
+ subject { described_class.new(nil, user, params).execute }
+
+ describe '#execute' do
+ shared_examples 'package event creation' do |originator_type, expected_scope|
+ it 'creates the event' do
+ expect { subject }.to change { Packages::Event.count }.by(1)
+
+ expect(subject.originator_type).to eq(originator_type)
+ expect(subject.originator).to eq(user&.id)
+ expect(subject.event_scope).to eq(expected_scope)
+ expect(subject.event_type).to eq(event_name)
+ end
+ end
+
+ context 'with a user' do
+ let(:user) { create(:user) }
+
+ it_behaves_like 'package event creation', 'user', 'container'
+ end
+
+ context 'with a deploy token' do
+ let(:user) { create(:deploy_token) }
+
+ it_behaves_like 'package event creation', 'deploy_token', 'container'
+ end
+
+ context 'with no user' do
+ let(:user) { nil }
+
+ it_behaves_like 'package event creation', 'guest', 'container'
+ end
+
+ context 'with a package as scope' do
+ let(:user) { nil }
+ let(:scope) { create(:npm_package) }
+
+ it_behaves_like 'package event creation', 'guest', 'npm'
+ end
+ end
+end
diff --git a/spec/services/packages/generic/create_package_file_service_spec.rb b/spec/services/packages/generic/create_package_file_service_spec.rb
new file mode 100644
index 00000000000..0ae109ef996
--- /dev/null
+++ b/spec/services/packages/generic/create_package_file_service_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Generic::CreatePackageFileService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ describe '#execute' do
+ let(:sha256) { '440e5e148a25331bbd7991575f7d54933c0ebf6cc735a18ee5066ac1381bb590' }
+ let(:temp_file) { Tempfile.new("test") }
+ let(:file) { UploadedFile.new(temp_file.path, sha256: sha256) }
+ let(:package) { create(:generic_package, project: project) }
+ let(:params) do
+ {
+ package_name: 'mypackage',
+ package_version: '0.0.1',
+ file: file,
+ file_name: 'myfile.tar.gz.1'
+ }
+ end
+
+ before do
+ FileUtils.touch(temp_file)
+ end
+
+ after do
+ FileUtils.rm_f(temp_file)
+ end
+
+ it 'creates package file' do
+ package_service = double
+ package_params = {
+ name: params[:package_name],
+ version: params[:package_version],
+ build: params[:build]
+ }
+ expect(::Packages::Generic::FindOrCreatePackageService).to receive(:new).with(project, user, package_params).and_return(package_service)
+ expect(package_service).to receive(:execute).and_return(package)
+
+ service = described_class.new(project, user, params)
+
+ expect { service.execute }.to change { package.package_files.count }.by(1)
+
+ package_file = package.package_files.last
+ aggregate_failures do
+ expect(package_file.package).to eq(package)
+ expect(package_file.file_name).to eq('myfile.tar.gz.1')
+ expect(package_file.size).to eq(file.size)
+ expect(package_file.file_sha256).to eq(sha256)
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/generic/find_or_create_package_service_spec.rb b/spec/services/packages/generic/find_or_create_package_service_spec.rb
new file mode 100644
index 00000000000..5a9b8b03279
--- /dev/null
+++ b/spec/services/packages/generic/find_or_create_package_service_spec.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Generic::FindOrCreatePackageService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:ci_build) { create(:ci_build, :running, user: user) }
+
+ let(:params) do
+ {
+ name: 'mypackage',
+ version: '0.0.1'
+ }
+ end
+
+ describe '#execute' do
+ context 'when packages does not exist yet' do
+ it 'creates package' do
+ service = described_class.new(project, user, params)
+
+ expect { service.execute }.to change { project.packages.generic.count }.by(1)
+
+ package = project.packages.generic.last
+
+ aggregate_failures do
+ expect(package.creator).to eq(user)
+ expect(package.name).to eq('mypackage')
+ expect(package.version).to eq('0.0.1')
+ expect(package.build_info).to be_nil
+ end
+ end
+
+ it 'creates package and package build info when build is provided' do
+ service = described_class.new(project, user, params.merge(build: ci_build))
+
+ expect { service.execute }.to change { project.packages.generic.count }.by(1)
+
+ package = project.packages.generic.last
+
+ aggregate_failures do
+ expect(package.creator).to eq(user)
+ expect(package.name).to eq('mypackage')
+ expect(package.version).to eq('0.0.1')
+ expect(package.build_info.pipeline).to eq(ci_build.pipeline)
+ end
+ end
+ end
+
+ context 'when packages already exists' do
+ let!(:package) { project.packages.generic.create!(params) }
+
+ context 'when package was created manually' do
+ it 'finds the package and does not create package build info even if build is provided' do
+ service = described_class.new(project, user, params.merge(build: ci_build))
+
+ expect do
+ found_package = service.execute
+
+ expect(found_package).to eq(package)
+ end.not_to change { project.packages.generic.count }
+
+ expect(package.reload.build_info).to be_nil
+ end
+ end
+
+ context 'when package was created by pipeline' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ before do
+ package.create_build_info!(pipeline: pipeline)
+ end
+
+ it 'finds the package and does not change package build info even if build is provided' do
+ service = described_class.new(project, user, params.merge(build: ci_build))
+
+ expect do
+ found_package = service.execute
+
+ expect(found_package).to eq(package)
+ end.not_to change { project.packages.generic.count }
+
+ expect(package.reload.build_info.pipeline).to eq(pipeline)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/projects/alerting/notify_service_spec.rb b/spec/services/projects/alerting/notify_service_spec.rb
index 77a0e330109..d8e94a0885b 100644
--- a/spec/services/projects/alerting/notify_service_spec.rb
+++ b/spec/services/projects/alerting/notify_service_spec.rb
@@ -89,6 +89,7 @@ RSpec.describe Projects::Alerting::NotifyService do
it 'creates a system note corresponding to alert creation' do
expect { subject }.to change(Note, :count).by(1)
+ expect(Note.last.note).to include(payload_raw.fetch(:monitoring_tool))
end
context 'existing alert with same fingerprint' do
@@ -127,23 +128,8 @@ RSpec.describe Projects::Alerting::NotifyService do
let(:alert) { create(:alert_management_alert, :with_issue, project: project, fingerprint: fingerprint_sha) }
let(:issue) { alert.issue }
- context 'state_tracking is enabled' do
- before do
- stub_feature_flags(track_resource_state_change_events: true)
- end
-
- it { expect { subject }.to change { issue.reload.state }.from('opened').to('closed') }
- it { expect { subject }.to change(ResourceStateEvent, :count).by(1) }
- end
-
- context 'state_tracking is disabled' do
- before do
- stub_feature_flags(track_resource_state_change_events: false)
- end
-
- it { expect { subject }.to change { issue.reload.state }.from('opened').to('closed') }
- it { expect { subject }.to change(Note, :count).by(1) }
- end
+ it { expect { subject }.to change { issue.reload.state }.from('opened').to('closed') }
+ it { expect { subject }.to change(ResourceStateEvent, :count).by(1) }
end
end
end
@@ -208,15 +194,19 @@ RSpec.describe Projects::Alerting::NotifyService do
environment_id: nil
)
end
+
+ it 'creates a system note corresponding to alert creation' do
+ expect { subject }.to change(Note, :count).by(1)
+ expect(Note.last.note).to include('Generic Alert Endpoint')
+ end
end
end
context 'with overlong payload' do
- let(:payload_raw) do
- {
- title: 'a' * Gitlab::Utils::DeepSize::DEFAULT_MAX_SIZE,
- start_time: starts_at.rfc3339
- }
+ let(:deep_size_object) { instance_double(Gitlab::Utils::DeepSize, valid?: false) }
+
+ before do
+ allow(Gitlab::Utils::DeepSize).to receive(:new).and_return(deep_size_object)
end
it_behaves_like 'does not process incident issues due to error', http_status: :bad_request
@@ -230,17 +220,6 @@ RSpec.describe Projects::Alerting::NotifyService do
it_behaves_like 'processes incident issues'
- context 'with an invalid payload' do
- before do
- allow(Gitlab::Alerting::NotificationPayloadParser)
- .to receive(:call)
- .and_raise(Gitlab::Alerting::NotificationPayloadParser::BadPayloadError)
- end
-
- it_behaves_like 'does not process incident issues due to error', http_status: :bad_request
- it_behaves_like 'does not an create alert management alert'
- end
-
context 'when alert already exists' do
let(:fingerprint_sha) { Digest::SHA1.hexdigest(fingerprint) }
let!(:alert) { create(:alert_management_alert, project: project, fingerprint: fingerprint_sha) }
diff --git a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
index 2c708e75a25..2f2474f2681 100644
--- a/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/cleanup_tags_service_spec.rb
@@ -245,7 +245,7 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
end
it 'succeeds without a user' do
- expect_delete(%w(Bb Ba C))
+ expect_delete(%w(Bb Ba C), container_expiration_policy: true)
is_expected.to include(status: :success, deleted: %w(Bb Ba C))
end
@@ -287,10 +287,10 @@ RSpec.describe Projects::ContainerRepository::CleanupTagsService do
end
end
- def expect_delete(tags)
+ def expect_delete(tags, container_expiration_policy: nil)
expect(Projects::ContainerRepository::DeleteTagsService)
.to receive(:new)
- .with(repository.project, user, tags: tags)
+ .with(repository.project, user, tags: tags, container_expiration_policy: container_expiration_policy)
.and_call_original
expect_any_instance_of(Projects::ContainerRepository::DeleteTagsService)
diff --git a/spec/services/projects/container_repository/delete_tags_service_spec.rb b/spec/services/projects/container_repository/delete_tags_service_spec.rb
index 5116427dad2..c3ae26b1f05 100644
--- a/spec/services/projects/container_repository/delete_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/delete_tags_service_spec.rb
@@ -85,6 +85,41 @@ RSpec.describe Projects::ContainerRepository::DeleteTagsService do
end
end
+ RSpec.shared_examples 'supporting fast delete' do
+ context 'when the registry supports fast delete' do
+ before do
+ allow(repository.client).to receive(:supports_tag_delete?).and_return(true)
+ end
+
+ it_behaves_like 'calling the correct delete tags service', ::Projects::ContainerRepository::Gitlab::DeleteTagsService
+
+ it_behaves_like 'handling invalid params'
+
+ context 'with the real service' do
+ before do
+ stub_delete_reference_requests(tags)
+ expect_delete_tag_by_names(tags)
+ end
+
+ it { is_expected.to include(status: :success) }
+
+ it_behaves_like 'logging a success response'
+ end
+
+ context 'with a timeout error' do
+ before do
+ expect_next_instance_of(::Projects::ContainerRepository::Gitlab::DeleteTagsService) do |delete_service|
+ expect(delete_service).to receive(:delete_tags).and_raise(::Projects::ContainerRepository::Gitlab::DeleteTagsService::TimeoutError)
+ end
+ end
+
+ it { is_expected.to include(status: :error, message: 'timeout while deleting tags') }
+
+ it_behaves_like 'logging an error response', message: 'timeout while deleting tags'
+ end
+ end
+ end
+
describe '#execute' do
let(:tags) { %w[A Ba] }
@@ -103,62 +138,7 @@ RSpec.describe Projects::ContainerRepository::DeleteTagsService do
project.add_developer(user)
end
- context 'when the registry supports fast delete' do
- context 'and the feature is enabled' do
- before do
- allow(repository.client).to receive(:supports_tag_delete?).and_return(true)
- end
-
- it_behaves_like 'calling the correct delete tags service', ::Projects::ContainerRepository::Gitlab::DeleteTagsService
-
- it_behaves_like 'handling invalid params'
-
- context 'with the real service' do
- before do
- stub_delete_reference_requests(tags)
- expect_delete_tag_by_names(tags)
- end
-
- it { is_expected.to include(status: :success) }
-
- it_behaves_like 'logging a success response'
- end
-
- context 'with a timeout error' do
- before do
- expect_next_instance_of(::Projects::ContainerRepository::Gitlab::DeleteTagsService) do |delete_service|
- expect(delete_service).to receive(:delete_tags).and_raise(::Projects::ContainerRepository::Gitlab::DeleteTagsService::TimeoutError)
- end
- end
-
- it { is_expected.to include(status: :error, message: 'timeout while deleting tags') }
-
- it_behaves_like 'logging an error response', message: 'timeout while deleting tags'
- end
- end
-
- context 'and the feature is disabled' do
- before do
- stub_feature_flags(container_registry_fast_tag_delete: false)
- end
-
- it_behaves_like 'calling the correct delete tags service', ::Projects::ContainerRepository::ThirdParty::DeleteTagsService
-
- it_behaves_like 'handling invalid params'
-
- context 'with the real service' do
- before do
- stub_upload('sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
- tags.each { |tag| stub_put_manifest_request(tag) }
- expect_delete_tag_by_digest('sha256:dummy')
- end
-
- it { is_expected.to include(status: :success) }
-
- it_behaves_like 'logging a success response'
- end
- end
- end
+ it_behaves_like 'supporting fast delete'
context 'when the registry does not support fast delete' do
before do
@@ -170,5 +150,19 @@ RSpec.describe Projects::ContainerRepository::DeleteTagsService do
it_behaves_like 'handling invalid params'
end
end
+
+ context 'without user' do
+ let_it_be(:user) { nil }
+
+ context 'when not run by a cleanup policy' do
+ it { is_expected.to include(status: :error) }
+ end
+
+ context 'when run by a cleanup policy' do
+ let(:params) { { tags: tags, container_expiration_policy: true } }
+
+ it_behaves_like 'supporting fast delete'
+ end
+ end
end
end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index e1df8700795..b81b3e095cf 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -487,18 +487,7 @@ RSpec.describe Projects::CreateService, '#execute' do
describe 'create service for the project' do
subject(:project) { create_project(user, opts) }
- context 'when there is an active instance-level and an active template integration' do
- let!(:template_integration) { create(:prometheus_service, :template, api_url: 'https://prometheus.template.com/') }
- let!(:instance_integration) { create(:prometheus_service, :instance, api_url: 'https://prometheus.instance.com/') }
-
- it 'creates a service from the instance-level integration' do
- expect(project.services.count).to eq(1)
- expect(project.services.first.api_url).to eq(instance_integration.api_url)
- expect(project.services.first.inherit_from_id).to eq(instance_integration.id)
- end
- end
-
- context 'when there is an active service template' do
+ context 'with an active service template' do
let!(:template_integration) { create(:prometheus_service, :template, api_url: 'https://prometheus.template.com/') }
it 'creates a service from the template' do
@@ -506,6 +495,60 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(project.services.first.api_url).to eq(template_integration.api_url)
expect(project.services.first.inherit_from_id).to be_nil
end
+
+ context 'with an active instance-level integration' do
+ let!(:instance_integration) { create(:prometheus_service, :instance, api_url: 'https://prometheus.instance.com/') }
+
+ it 'creates a service from the instance-level integration' do
+ expect(project.services.count).to eq(1)
+ expect(project.services.first.api_url).to eq(instance_integration.api_url)
+ expect(project.services.first.inherit_from_id).to eq(instance_integration.id)
+ end
+
+ context 'with an active group-level integration' do
+ let!(:group_integration) { create(:prometheus_service, group: group, project: nil, api_url: 'https://prometheus.group.com/') }
+ let!(:group) do
+ create(:group).tap do |group|
+ group.add_owner(user)
+ end
+ end
+
+ let(:opts) do
+ {
+ name: 'GitLab',
+ namespace_id: group.id
+ }
+ end
+
+ it 'creates a service from the group-level integration' do
+ expect(project.services.count).to eq(1)
+ expect(project.services.first.api_url).to eq(group_integration.api_url)
+ expect(project.services.first.inherit_from_id).to eq(group_integration.id)
+ end
+
+ context 'with an active subgroup' do
+ let!(:subgroup_integration) { create(:prometheus_service, group: subgroup, project: nil, api_url: 'https://prometheus.subgroup.com/') }
+ let!(:subgroup) do
+ create(:group, parent: group).tap do |subgroup|
+ subgroup.add_owner(user)
+ end
+ end
+
+ let(:opts) do
+ {
+ name: 'GitLab',
+ namespace_id: subgroup.id
+ }
+ end
+
+ it 'creates a service from the subgroup-level integration' do
+ expect(project.services.count).to eq(1)
+ expect(project.services.first.api_url).to eq(subgroup_integration.api_url)
+ expect(project.services.first.inherit_from_id).to eq(subgroup_integration.id)
+ end
+ end
+ end
+ end
end
context 'when there is an invalid integration' do
@@ -739,4 +782,100 @@ RSpec.describe Projects::CreateService, '#execute' do
def create_project(user, opts)
Projects::CreateService.new(user, opts).execute
end
+
+ context 'shared Runners config' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:user) { create :user }
+
+ context 'when parent group is present' do
+ let_it_be(:group) do
+ create(:group) do |group|
+ group.add_owner(user)
+ end
+ end
+
+ before do
+ allow_next_found_instance_of(Group) do |group|
+ allow(group).to receive(:shared_runners_setting).and_return(shared_runners_setting)
+ end
+
+ user.refresh_authorized_projects # Ensure cache is warm
+ end
+
+ context 'default value based on parent group setting' do
+ where(:shared_runners_setting, :desired_config_for_new_project, :expected_result_for_project) do
+ 'enabled' | nil | true
+ 'disabled_with_override' | nil | false
+ 'disabled_and_unoverridable' | nil | false
+ end
+
+ with_them do
+ it 'creates project following the parent config' do
+ params = opts.merge(namespace_id: group.id)
+ params = params.merge(shared_runners_enabled: desired_config_for_new_project) unless desired_config_for_new_project.nil?
+ project = create_project(user, params)
+
+ expect(project).to be_valid
+ expect(project.shared_runners_enabled).to eq(expected_result_for_project)
+ end
+ end
+ end
+
+ context 'parent group is present and allows desired config' do
+ where(:shared_runners_setting, :desired_config_for_new_project, :expected_result_for_project) do
+ 'enabled' | true | true
+ 'enabled' | false | false
+ 'disabled_with_override' | false | false
+ 'disabled_with_override' | true | true
+ 'disabled_and_unoverridable' | false | false
+ end
+
+ with_them do
+ it 'creates project following the parent config' do
+ params = opts.merge(namespace_id: group.id, shared_runners_enabled: desired_config_for_new_project)
+ project = create_project(user, params)
+
+ expect(project).to be_valid
+ expect(project.shared_runners_enabled).to eq(expected_result_for_project)
+ end
+ end
+ end
+
+ context 'parent group is present and disallows desired config' do
+ where(:shared_runners_setting, :desired_config_for_new_project) do
+ 'disabled_and_unoverridable' | true
+ end
+
+ with_them do
+ it 'does not create project' do
+ params = opts.merge(namespace_id: group.id, shared_runners_enabled: desired_config_for_new_project)
+ project = create_project(user, params)
+
+ expect(project.persisted?).to eq(false)
+ expect(project).to be_invalid
+ expect(project.errors[:shared_runners_enabled]).to include('cannot be enabled because parent group does not allow it')
+ end
+ end
+ end
+ end
+
+ context 'parent group is not present' do
+ where(:desired_config, :expected_result) do
+ true | true
+ false | false
+ nil | true
+ end
+
+ with_them do
+ it 'follows desired config' do
+ opts[:shared_runners_enabled] = desired_config unless desired_config.nil?
+ project = create_project(user, opts)
+
+ expect(project).to be_valid
+ expect(project.shared_runners_enabled).to eq(expected_result)
+ end
+ end
+ end
+ end
end
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index a0e83fb4a21..3ae96d7a5ab 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -314,6 +314,37 @@ RSpec.describe Projects::TransferService do
end
end
+ context 'shared Runners group level configurations' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:project_shared_runners_enabled, :shared_runners_setting, :expected_shared_runners_enabled) do
+ true | 'disabled_and_unoverridable' | false
+ false | 'disabled_and_unoverridable' | false
+ true | 'disabled_with_override' | true
+ false | 'disabled_with_override' | false
+ true | 'enabled' | true
+ false | 'enabled' | false
+ end
+
+ with_them do
+ let(:project) { create(:project, :public, :repository, namespace: user.namespace, shared_runners_enabled: project_shared_runners_enabled) }
+ let(:group) { create(:group) }
+
+ before do
+ group.add_owner(user)
+ expect_next_found_instance_of(Group) do |group|
+ expect(group).to receive(:shared_runners_setting).and_return(shared_runners_setting)
+ end
+
+ execute_transfer
+ end
+
+ it 'updates shared runners based on the parent group' do
+ expect(project.shared_runners_enabled).to eq(expected_shared_runners_enabled)
+ end
+ end
+ end
+
context 'missing group labels applied to issues or merge requests' do
it 'delegates transfer to Labels::TransferService' do
group.add_owner(user)
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index bfb3cbb0131..b6fd72cd4f5 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -16,8 +16,6 @@ RSpec.describe Projects::UpdatePagesService do
subject { described_class.new(project, build) }
before do
- stub_feature_flags(safezip_use_rubyzip: true)
-
project.remove_pages
end
@@ -104,10 +102,6 @@ RSpec.describe Projects::UpdatePagesService do
let(:file) { fixture_file_upload("spec/fixtures/pages_non_writeable.zip") }
context 'when using RubyZip' do
- before do
- stub_feature_flags(safezip_use_rubyzip: true)
- end
-
it 'succeeds to extract' do
expect(execute).to eq(:success)
expect(project.pages_metadatum).to be_deployed
diff --git a/spec/services/projects/update_remote_mirror_service_spec.rb b/spec/services/projects/update_remote_mirror_service_spec.rb
index 1de04888e0a..f0e76bb81b3 100644
--- a/spec/services/projects/update_remote_mirror_service_spec.rb
+++ b/spec/services/projects/update_remote_mirror_service_spec.rb
@@ -68,25 +68,12 @@ RSpec.describe Projects::UpdateRemoteMirrorService do
end
context "when given URLs containing escaped elements" do
- using RSpec::Parameterized::TableSyntax
+ it_behaves_like "URLs containing escaped elements return expected status" do
+ let(:result) { execute! }
- where(:url, :result_status) do
- "https://user:0a%23@test.example.com/project.git" | :success
- "https://git.example.com:1%2F%2F@source.developers.google.com/project.git" | :success
- CGI.escape("git://localhost:1234/some-path?some-query=some-val\#@example.com/") | :error
- CGI.escape(CGI.escape("https://user:0a%23@test.example.com/project.git")) | :error
- end
-
- with_them do
before do
allow(remote_mirror).to receive(:url).and_return(url)
end
-
- it "returns expected status" do
- result = execute!
-
- expect(result[:status]).to eq(result_status)
- end
end
end
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 7832d727220..3375d9762c8 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -151,6 +151,32 @@ RSpec.describe Projects::UpdateService do
expect(project.reload).to be_internal
end
end
+
+ context 'when updating shared runners' do
+ context 'can enable shared runners' do
+ let(:group) { create(:group, shared_runners_enabled: true) }
+ let(:project) { create(:project, namespace: group, shared_runners_enabled: false) }
+
+ it 'enables shared runners' do
+ result = update_project(project, user, shared_runners_enabled: true)
+
+ expect(result).to eq({ status: :success })
+ expect(project.reload.shared_runners_enabled).to be_truthy
+ end
+ end
+
+ context 'cannot enable shared runners' do
+ let(:group) { create(:group, :shared_runners_disabled) }
+ let(:project) { create(:project, namespace: group, shared_runners_enabled: false) }
+
+ it 'does not enable shared runners' do
+ result = update_project(project, user, shared_runners_enabled: true)
+
+ expect(result).to eq({ status: :error, message: 'Shared runners enabled cannot be enabled because parent group does not allow it' })
+ expect(project.reload.shared_runners_enabled).to be_falsey
+ end
+ end
+ end
end
describe 'when updating project that has forks' do
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index b970a48051f..8f0c60d5d65 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -3,23 +3,27 @@
require 'spec_helper'
RSpec.describe QuickActions::InterpretService do
- let(:project) { create(:project, :public) }
- let(:developer) { create(:user) }
- let(:developer2) { create(:user) }
- let(:issue) { create(:issue, project: project) }
+ let_it_be(:public_project) { create(:project, :public) }
+ let_it_be(:repository_project) { create(:project, :repository) }
+ let_it_be(:project) { public_project }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:developer2) { create(:user) }
+ let_it_be_with_reload(:issue) { create(:issue, project: project) }
let(:milestone) { create(:milestone, project: project, title: '9.10') }
let(:commit) { create(:commit, project: project) }
- let(:inprogress) { create(:label, project: project, title: 'In Progress') }
- let(:helmchart) { create(:label, project: project, title: 'Helm Chart Registry') }
- let(:bug) { create(:label, project: project, title: 'Bug') }
- let(:note) { build(:note, commit_id: merge_request.diff_head_sha) }
+ let_it_be(:inprogress) { create(:label, project: project, title: 'In Progress') }
+ let_it_be(:helmchart) { create(:label, project: project, title: 'Helm Chart Registry') }
+ let_it_be(:bug) { create(:label, project: project, title: 'Bug') }
let(:service) { described_class.new(project, developer) }
+ before_all do
+ public_project.add_developer(developer)
+ repository_project.add_developer(developer)
+ end
+
before do
stub_licensed_features(multiple_issue_assignees: false,
multiple_merge_request_assignees: false)
-
- project.add_developer(developer)
end
describe '#execute' do
@@ -146,7 +150,6 @@ RSpec.describe QuickActions::InterpretService do
shared_examples 'multiword label name starting without ~' do
it 'fetches label ids and populates add_label_ids if content contains /label' do
- helmchart # populate the label
_, updates = service.execute(content, issuable)
expect(updates).to eq(add_label_ids: [helmchart.id])
@@ -155,7 +158,6 @@ RSpec.describe QuickActions::InterpretService do
shared_examples 'label name is included in the middle of another label name' do
it 'ignores the sublabel when the content contains the includer label name' do
- helmchart # populate the label
create(:label, project: project, title: 'Chart')
_, updates = service.execute(content, issuable)
@@ -493,7 +495,7 @@ RSpec.describe QuickActions::InterpretService do
end
shared_examples 'merge immediately command' do
- let(:project) { create(:project, :repository) }
+ let(:project) { repository_project }
it 'runs merge command if content contains /merge' do
_, updates, _ = service.execute(content, issuable)
@@ -509,7 +511,7 @@ RSpec.describe QuickActions::InterpretService do
end
shared_examples 'merge automatically command' do
- let(:project) { create(:project, :repository) }
+ let(:project) { repository_project }
it 'runs merge command if content contains /merge and returns merge message' do
_, updates, message = service.execute(content, issuable)
@@ -600,7 +602,7 @@ RSpec.describe QuickActions::InterpretService do
context 'when issuable is already confidential' do
before do
- issuable.update(confidential: true)
+ issuable.update!(confidential: true)
end
it 'does not return the success message' do
@@ -722,7 +724,7 @@ RSpec.describe QuickActions::InterpretService do
end
context 'when sha is missing' do
- let(:project) { create(:project, :repository) }
+ let(:project) { repository_project }
let(:service) { described_class.new(project, developer, {}) }
it 'precheck passes and returns merge command' do
@@ -844,7 +846,7 @@ RSpec.describe QuickActions::InterpretService do
end
it 'returns the unassign message for all the assignee if content contains /unassign' do
- issue.update(assignee_ids: [developer.id, developer2.id])
+ issue.update!(assignee_ids: [developer.id, developer2.id])
_, _, message = service.execute(content, issue)
expect(message).to eq("Removed assignees #{developer.to_reference} and #{developer2.to_reference}.")
@@ -860,7 +862,7 @@ RSpec.describe QuickActions::InterpretService do
end
it 'returns the unassign message for all the assignee if content contains /unassign' do
- merge_request.update(assignee_ids: [developer.id, developer2.id])
+ merge_request.update!(assignee_ids: [developer.id, developer2.id])
_, _, message = service.execute(content, merge_request)
expect(message).to eq("Removed assignees #{developer.to_reference} and #{developer2.to_reference}.")
@@ -879,10 +881,14 @@ RSpec.describe QuickActions::InterpretService do
end
context 'only group milestones available' do
- let(:ancestor_group) { create(:group) }
- let(:group) { create(:group, parent: ancestor_group) }
- let(:project) { create(:project, :public, namespace: group) }
- let(:milestone) { create(:milestone, group: ancestor_group, title: '10.0') }
+ let_it_be(:ancestor_group) { create(:group) }
+ let_it_be(:group) { create(:group, parent: ancestor_group) }
+ let_it_be(:project) { create(:project, :public, namespace: group) }
+ let_it_be(:milestone) { create(:milestone, group: ancestor_group, title: '10.0') }
+
+ before_all do
+ project.add_developer(developer)
+ end
it_behaves_like 'milestone command' do
let(:content) { "/milestone %#{milestone.title}" }
@@ -1457,14 +1463,14 @@ RSpec.describe QuickActions::InterpretService do
end
context '/board_move command' do
- let(:todo) { create(:label, project: project, title: 'To Do') }
- let(:inreview) { create(:label, project: project, title: 'In Review') }
+ let_it_be(:todo) { create(:label, project: project, title: 'To Do') }
+ let_it_be(:inreview) { create(:label, project: project, title: 'In Review') }
let(:content) { %{/board_move ~"#{inreview.title}"} }
- let!(:board) { create(:board, project: project) }
- let!(:todo_list) { create(:list, board: board, label: todo) }
- let!(:inreview_list) { create(:list, board: board, label: inreview) }
- let!(:inprogress_list) { create(:list, board: board, label: inprogress) }
+ let_it_be(:board) { create(:board, project: project) }
+ let_it_be(:todo_list) { create(:list, board: board, label: todo) }
+ let_it_be(:inreview_list) { create(:list, board: board, label: inreview) }
+ let_it_be(:inprogress_list) { create(:list, board: board, label: inprogress) }
it 'populates remove_label_ids for all current board columns' do
issue.update!(label_ids: [todo.id, inprogress.id])
@@ -1599,6 +1605,10 @@ RSpec.describe QuickActions::InterpretService do
context "when logged user cannot create_merge_requests in the project" do
let(:project) { create(:project, :archived) }
+ before do
+ project.add_developer(developer)
+ end
+
it_behaves_like 'empty command'
end
@@ -1844,8 +1854,7 @@ RSpec.describe QuickActions::InterpretService do
end
describe 'relabel command' do
- let(:content) { '/relabel Bug' }
- let!(:bug) { create(:label, project: project, title: 'Bug') }
+ let(:content) { "/relabel #{bug.title}" }
let(:feature) { create(:label, project: project, title: 'Feature') }
it 'includes label name' do
@@ -1938,8 +1947,7 @@ RSpec.describe QuickActions::InterpretService do
end
describe 'board move command' do
- let(:content) { '/board_move ~bug' }
- let!(:bug) { create(:label, project: project, title: 'bug') }
+ let(:content) { "/board_move ~#{bug.title}" }
let!(:board) { create(:board, project: project) }
it 'includes the label name' do
diff --git a/spec/services/resource_access_tokens/create_service_spec.rb b/spec/services/resource_access_tokens/create_service_spec.rb
index 7dbd55a6909..4a77e5fed9b 100644
--- a/spec/services/resource_access_tokens/create_service_spec.rb
+++ b/spec/services/resource_access_tokens/create_service_spec.rb
@@ -24,16 +24,6 @@ RSpec.describe ResourceAccessTokens::CreateService do
end
end
- shared_examples 'fails when flag is disabled' do
- before do
- stub_feature_flags(resource_access_token: false)
- end
-
- it 'returns nil' do
- expect(subject).to be nil
- end
- end
-
shared_examples 'fails on gitlab.com' do
before do
allow(Gitlab).to receive(:com?) { true }
@@ -53,6 +43,7 @@ RSpec.describe ResourceAccessTokens::CreateService do
access_token = response.payload[:access_token]
expect(access_token.user.reload.user_type).to eq("#{resource_type}_bot")
+ expect(access_token.user.created_by_id).to eq(user.id)
end
context 'email confirmation status' do
@@ -180,7 +171,6 @@ RSpec.describe ResourceAccessTokens::CreateService do
let_it_be(:resource) { project }
it_behaves_like 'fails when user does not have the permission to create a Resource Bot'
- it_behaves_like 'fails when flag is disabled'
it_behaves_like 'fails on gitlab.com'
context 'user with valid permission' do
diff --git a/spec/services/search_service_spec.rb b/spec/services/search_service_spec.rb
index f6bb7acee57..fc613a6224a 100644
--- a/spec/services/search_service_spec.rb
+++ b/spec/services/search_service_spec.rb
@@ -444,7 +444,7 @@ RSpec.describe SearchService do
context 'with :with_api_entity_associations' do
let(:unredacted_results) { ar_relation(MergeRequest.with_api_entity_associations, readable, unreadable) }
- it_behaves_like "redaction limits N+1 queries", limit: 7
+ it_behaves_like "redaction limits N+1 queries", limit: 8
end
end
@@ -481,7 +481,7 @@ RSpec.describe SearchService do
end
context 'with :with_api_entity_associations' do
- it_behaves_like "redaction limits N+1 queries", limit: 12
+ it_behaves_like "redaction limits N+1 queries", limit: 13
end
end
@@ -496,7 +496,7 @@ RSpec.describe SearchService do
end
context 'with :with_api_entity_associations' do
- it_behaves_like "redaction limits N+1 queries", limit: 3
+ it_behaves_like "redaction limits N+1 queries", limit: 4
end
end
diff --git a/spec/services/snippets/update_service_spec.rb b/spec/services/snippets/update_service_spec.rb
index 641fc56294a..406ece30bd7 100644
--- a/spec/services/snippets/update_service_spec.rb
+++ b/spec/services/snippets/update_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Snippets::UpdateService do
- describe '#execute' do
+ describe '#execute', :aggregate_failures do
let_it_be(:user) { create(:user) }
let_it_be(:admin) { create :user, admin: true }
let(:visibility_level) { Gitlab::VisibilityLevel::PRIVATE }
@@ -97,40 +97,81 @@ RSpec.describe Snippets::UpdateService do
end
shared_examples 'creates repository and creates file' do
- it 'creates repository' do
- expect(snippet.repository).not_to exist
+ context 'when file_name and content params are used' do
+ it 'creates repository' do
+ expect(snippet.repository).not_to exist
- subject
+ subject
- expect(snippet.repository).to exist
- end
+ expect(snippet.repository).to exist
+ end
- it 'commits the files to the repository' do
- subject
+ it 'commits the files to the repository' do
+ subject
- expect(snippet.blobs.count).to eq 1
+ expect(snippet.blobs.count).to eq 1
- blob = snippet.repository.blob_at('master', options[:file_name])
+ blob = snippet.repository.blob_at('master', options[:file_name])
- expect(blob.data).to eq options[:content]
+ expect(blob.data).to eq options[:content]
+ end
+
+ context 'when the repository creation fails' do
+ before do
+ allow(snippet).to receive(:repository_exists?).and_return(false)
+ end
+
+ it 'raise an error' do
+ expect(subject).to be_error
+ expect(subject.payload[:snippet].errors[:repository].to_sentence).to eq 'Error updating the snippet - Repository could not be created'
+ end
+
+ it 'does not try to commit file' do
+ expect(service).not_to receive(:create_commit)
+
+ subject
+ end
+ end
end
- context 'when the repository creation fails' do
- before do
- allow(snippet).to receive(:repository_exists?).and_return(false)
+ context 'when snippet_actions param is used' do
+ let(:file_path) { 'CHANGELOG' }
+ let(:created_file_path) { 'New file'}
+ let(:content) { 'foobar' }
+ let(:snippet_actions) { [{ action: :move, previous_path: snippet.file_name, file_path: file_path }, { action: :create, file_path: created_file_path, content: content }] }
+ let(:base_opts) do
+ {
+ snippet_actions: snippet_actions
+ }
end
- it 'raise an error' do
- response = subject
+ it 'performs operation without raising errors' do
+ db_content = snippet.content
- expect(response).to be_error
- expect(response.payload[:snippet].errors[:repository].to_sentence).to eq 'Error updating the snippet - Repository could not be created'
+ expect(subject).to be_success
+
+ new_blob = snippet.repository.blob_at('master', file_path)
+ created_file = snippet.repository.blob_at('master', created_file_path)
+
+ expect(new_blob.data).to eq db_content
+ expect(created_file.data).to eq content
end
- it 'does not try to commit file' do
- expect(service).not_to receive(:create_commit)
+ context 'when the repository is not created' do
+ it 'keeps snippet database data' do
+ old_file_name = snippet.file_name
+ old_file_content = snippet.content
- subject
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).to receive(:create_repository_for).and_raise(StandardError)
+ end
+
+ snippet = subject.payload[:snippet]
+
+ expect(subject).to be_error
+ expect(snippet.file_name).to eq(old_file_name)
+ expect(snippet.content).to eq(old_file_content)
+ end
end
end
end
@@ -366,10 +407,9 @@ RSpec.describe Snippets::UpdateService do
let(:snippet_actions) { [{ action: 'invalid_action' }] }
it 'raises a validation error' do
- response = subject
- snippet = response.payload[:snippet]
+ snippet = subject.payload[:snippet]
- expect(response).to be_error
+ expect(subject).to be_error
expect(snippet.errors.full_messages_for(:snippet_actions)).to eq ['Snippet actions have invalid data']
end
end
@@ -377,13 +417,12 @@ RSpec.describe Snippets::UpdateService do
context 'when an error is raised committing the file' do
it 'keeps any snippet modifications' do
expect_next_instance_of(described_class) do |instance|
- expect(instance).to receive(:create_repository_for).and_raise(StandardError)
+ expect(instance).to receive(:create_commit).and_raise(StandardError)
end
- response = subject
- snippet = response.payload[:snippet]
+ snippet = subject.payload[:snippet]
- expect(response).to be_error
+ expect(subject).to be_error
expect(snippet.title).to eq(new_title)
expect(snippet.file_name).to eq(file_path)
expect(snippet.content).to eq(content)
diff --git a/spec/services/static_site_editor/config_service_spec.rb b/spec/services/static_site_editor/config_service_spec.rb
index 5fff4e0af53..fed373828a1 100644
--- a/spec/services/static_site_editor/config_service_spec.rb
+++ b/spec/services/static_site_editor/config_service_spec.rb
@@ -7,8 +7,8 @@ RSpec.describe StaticSiteEditor::ConfigService do
let_it_be(:user) { create(:user) }
# params
- let(:ref) { double(:ref) }
- let(:path) { double(:path) }
+ let(:ref) { 'master' }
+ let(:path) { 'README.md' }
let(:return_url) { double(:return_url) }
# stub data
@@ -42,22 +42,84 @@ RSpec.describe StaticSiteEditor::ConfigService do
allow_next_instance_of(Gitlab::StaticSiteEditor::Config::GeneratedConfig) do |config|
allow(config).to receive(:data) { generated_data }
end
+ end
+
+ context 'when reading file from repo fails with an unexpected error' do
+ let(:unexpected_error) { RuntimeError.new('some unexpected error') }
- allow_next_instance_of(Gitlab::StaticSiteEditor::Config::FileConfig) do |config|
- allow(config).to receive(:data) { file_data }
+ before do
+ allow(project.repository).to receive(:blob_data_at).and_raise(unexpected_error)
+ end
+
+ it 'returns an error response' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_exception).with(unexpected_error).and_call_original
+ expect { execute }.to raise_error(unexpected_error)
end
end
- it 'returns merged generated data and config file data' do
- expect(execute).to be_success
- expect(execute.payload).to eq(generated: true, file: true)
+ context 'when file is missing' do
+ before do
+ allow(project.repository).to receive(:blob_data_at).and_raise(GRPC::NotFound)
+ expect_next_instance_of(Gitlab::StaticSiteEditor::Config::FileConfig, '{}') do |config|
+ allow(config).to receive(:valid?) { true }
+ allow(config).to receive(:to_hash_with_defaults) { file_data }
+ end
+ end
+
+ it 'returns default config' do
+ expect(execute).to be_success
+ expect(execute.payload).to eq(generated: true, file: true)
+ end
end
- it 'returns an error if any keys would be overwritten by the merge' do
- generated_data[:duplicate_key] = true
- file_data[:duplicate_key] = true
- expect(execute).to be_error
- expect(execute.message).to match(/duplicate key.*duplicate_key.*found/i)
+ context 'when file is present' do
+ before do
+ allow(project.repository).to receive(:blob_data_at).with(ref, anything) do
+ config_content
+ end
+ end
+
+ context 'and configuration is not valid' do
+ let(:config_content) { 'invalid content' }
+
+ before do
+ expect_next_instance_of(Gitlab::StaticSiteEditor::Config::FileConfig, config_content) do |config|
+ error = 'error'
+ allow(config).to receive_message_chain('errors.first') { error }
+ allow(config).to receive(:valid?) { false }
+ end
+ end
+
+ it 'returns an error' do
+ expect(execute).to be_error
+ expect(execute.message).to eq('Invalid configuration format')
+ end
+ end
+
+ context 'and configuration is valid' do
+ # NOTE: This has to be a valid config, even though it is mocked, because
+ # `expect_next_instance_of` executes the constructor logic.
+ let(:config_content) { 'static_site_generator: middleman' }
+
+ before do
+ expect_next_instance_of(Gitlab::StaticSiteEditor::Config::FileConfig, config_content) do |config|
+ allow(config).to receive(:valid?) { true }
+ allow(config).to receive(:to_hash_with_defaults) { file_data }
+ end
+ end
+
+ it 'returns merged generated data and config file data' do
+ expect(execute).to be_success
+ expect(execute.payload).to eq(generated: true, file: true)
+ end
+
+ it 'returns an error if any keys would be overwritten by the merge' do
+ generated_data[:duplicate_key] = true
+ file_data[:duplicate_key] = true
+ expect(execute).to be_error
+ expect(execute.message).to match(/duplicate key.*duplicate_key.*found/i)
+ end
+ end
end
end
end
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index 47b8621b5c9..42e48b9ad81 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -64,6 +64,18 @@ RSpec.describe SystemNoteService do
end
end
+ describe '.change_issuable_reviewers' do
+ let(:reviewers) { [double, double] }
+
+ it 'calls IssuableService' do
+ expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
+ expect(service).to receive(:change_issuable_reviewers).with(reviewers)
+ end
+
+ described_class.change_issuable_reviewers(noteable, project, author, reviewers)
+ end
+ end
+
describe '.close_after_error_tracking_resolve' do
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
@@ -741,4 +753,16 @@ RSpec.describe SystemNoteService do
described_class.create_new_alert(alert, monitoring_tool)
end
end
+
+ describe '.change_incident_severity' do
+ let(:incident) { build(:incident) }
+
+ it 'calls IncidentService' do
+ expect_next_instance_of(SystemNotes::IncidentService) do |service|
+ expect(service).to receive(:change_incident_severity)
+ end
+
+ described_class.change_incident_severity(incident, author)
+ end
+ end
end
diff --git a/spec/services/system_notes/incident_service_spec.rb b/spec/services/system_notes/incident_service_spec.rb
new file mode 100644
index 00000000000..ab9b9eb2bd4
--- /dev/null
+++ b/spec/services/system_notes/incident_service_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::SystemNotes::IncidentService do
+ let_it_be(:author) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:noteable) { create(:incident, project: project) }
+ let_it_be(:issuable_severity) { create(:issuable_severity, issue: noteable, severity: :medium) }
+
+ describe '#change_incident_severity' do
+ subject(:change_severity) { described_class.new(noteable: noteable, project: project, author: author).change_incident_severity }
+
+ before do
+ allow(Gitlab::AppLogger).to receive(:error).and_call_original
+ end
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'severity' }
+ end
+
+ IssuableSeverity.severities.keys.each do |severity|
+ context "with #{severity} severity" do
+ before do
+ issuable_severity.update!(severity: severity)
+ end
+
+ it 'has the appropriate message' do
+ severity_label = IssuableSeverity::SEVERITY_LABELS.fetch(severity.to_sym)
+
+ expect(change_severity.note).to eq("changed the severity to **#{severity_label}**")
+ end
+ end
+ end
+
+ context 'when severity is invalid' do
+ let(:invalid_severity) { 'invalid-severity' }
+
+ before do
+ allow(noteable).to receive(:severity).and_return(invalid_severity)
+ end
+
+ it 'does not create system note' do
+ expect { change_severity }.not_to change { noteable.notes.count }
+ end
+
+ it 'writes error to logs' do
+ change_severity
+
+ expect(Gitlab::AppLogger).to have_received(:error).with(
+ message: 'Cannot create a system note for severity change',
+ noteable_class: noteable.class.to_s,
+ noteable_id: noteable.id,
+ severity: invalid_severity
+ )
+ end
+ end
+ end
+end
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index fec2a711dc2..e78b00fb67a 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -128,49 +128,76 @@ RSpec.describe ::SystemNotes::IssuablesService do
end
end
- describe '#change_status' do
- subject { service.change_status(status, source) }
+ describe '#change_issuable_reviewers' do
+ subject { service.change_issuable_reviewers([reviewer]) }
- context 'when resource state event tracking is enabled' do
- let(:status) { 'reopened' }
- let(:source) { nil }
+ let_it_be(:noteable) { create(:merge_request, :simple, source_project: project) }
+ let_it_be(:reviewer) { create(:user) }
+ let_it_be(:reviewer1) { create(:user) }
+ let_it_be(:reviewer2) { create(:user) }
+ let_it_be(:reviewer3) { create(:user) }
- it 'does not change note count' do
- expect { subject }.not_to change { Note.count }
- end
+ it_behaves_like 'a system note' do
+ let(:action) { 'reviewer' }
end
- context 'with status reopened' do
- before do
- stub_feature_flags(track_resource_state_change_events: false)
- end
+ def build_note(old_reviewers, new_reviewers)
+ noteable.reviewers = new_reviewers
+ service.change_issuable_reviewers(old_reviewers).note
+ end
- let(:status) { 'reopened' }
- let(:source) { nil }
+ it 'builds a correct phrase when a reviewer is added to a non-assigned merge request' do
+ expect(build_note([], [reviewer1])).to eq "requested review from @#{reviewer1.username}"
+ end
- it_behaves_like 'a note with overridable created_at'
+ it 'builds a correct phrase when reviewer is removed' do
+ expect(build_note([reviewer], [])).to eq "removed review request for @#{reviewer.username}"
+ end
- it_behaves_like 'a system note' do
- let(:action) { 'opened' }
- end
+ it 'builds a correct phrase when reviewers changed' do
+ expect(build_note([reviewer1], [reviewer2])).to(
+ eq("requested review from @#{reviewer2.username} and removed review request for @#{reviewer1.username}")
+ )
end
- context 'with a source' do
- before do
- stub_feature_flags(track_resource_state_change_events: false)
- end
+ it 'builds a correct phrase when three reviewers removed and one added' do
+ expect(build_note([reviewer, reviewer1, reviewer2], [reviewer3])).to(
+ eq("requested review from @#{reviewer3.username} and removed review request for @#{reviewer.username}, @#{reviewer1.username}, and @#{reviewer2.username}")
+ )
+ end
- let(:status) { 'opened' }
- let(:source) { double('commit', gfm_reference: 'commit 123456') }
+ it 'builds a correct phrase when one reviewer is changed from a set' do
+ expect(build_note([reviewer, reviewer1], [reviewer, reviewer2])).to(
+ eq("requested review from @#{reviewer2.username} and removed review request for @#{reviewer1.username}")
+ )
+ end
- it_behaves_like 'a note with overridable created_at'
+ it 'builds a correct phrase when one reviewer removed from a set' do
+ expect(build_note([reviewer, reviewer1, reviewer2], [reviewer, reviewer1])).to(
+ eq( "removed review request for @#{reviewer2.username}")
+ )
+ end
- it 'sets the note text' do
- expect(subject.note).to eq "#{status} via commit 123456"
+ it 'builds a correct phrase when the locale is different' do
+ Gitlab::I18n.with_locale('pt-BR') do
+ expect(build_note([reviewer, reviewer1, reviewer2], [reviewer3])).to(
+ eq("requested review from @#{reviewer3.username} and removed review request for @#{reviewer.username}, @#{reviewer1.username}, and @#{reviewer2.username}")
+ )
end
end
end
+ describe '#change_status' do
+ subject { service.change_status(status, source) }
+
+ let(:status) { 'reopened' }
+ let(:source) { nil }
+
+ it 'creates a resource state event' do
+ expect { subject }.to change { ResourceStateEvent.count }.by(1)
+ end
+ end
+
describe '#change_title' do
let(:noteable) { create(:issue, project: project, title: 'Lorem ipsum') }
@@ -636,67 +663,26 @@ RSpec.describe ::SystemNotes::IssuablesService do
describe '#close_after_error_tracking_resolve' do
subject { service.close_after_error_tracking_resolve }
- context 'when state tracking is enabled' do
- before do
- stub_feature_flags(track_resource_state_change_events: true)
- end
-
- it 'creates the expected state event' do
- subject
-
- event = ResourceStateEvent.last
-
- expect(event.close_after_error_tracking_resolve).to eq(true)
- expect(event.state).to eq('closed')
- end
- end
+ it 'creates the expected state event' do
+ subject
- context 'when state tracking is disabled' do
- before do
- stub_feature_flags(track_resource_state_change_events: false)
- end
+ event = ResourceStateEvent.last
- it_behaves_like 'a system note' do
- let(:action) { 'closed' }
- end
-
- it 'creates the expected system note' do
- expect(subject.note)
- .to eq('resolved the corresponding error and closed the issue.')
- end
+ expect(event.close_after_error_tracking_resolve).to eq(true)
+ expect(event.state).to eq('closed')
end
end
describe '#auto_resolve_prometheus_alert' do
subject { service.auto_resolve_prometheus_alert }
- context 'when state tracking is enabled' do
- before do
- stub_feature_flags(track_resource_state_change_events: true)
- end
+ it 'creates the expected state event' do
+ subject
- it 'creates the expected state event' do
- subject
+ event = ResourceStateEvent.last
- event = ResourceStateEvent.last
-
- expect(event.close_auto_resolve_prometheus_alert).to eq(true)
- expect(event.state).to eq('closed')
- end
- end
-
- context 'when state tracking is disabled' do
- before do
- stub_feature_flags(track_resource_state_change_events: false)
- end
-
- it_behaves_like 'a system note' do
- let(:action) { 'closed' }
- end
-
- it 'creates the expected system note' do
- expect(subject.note).to eq('automatically closed this issue because the alert resolved.')
- end
+ expect(event.close_auto_resolve_prometheus_alert).to eq(true)
+ expect(event.state).to eq('closed')
end
end
end
diff --git a/spec/services/users/build_service_spec.rb b/spec/services/users/build_service_spec.rb
index c14fdb35bfa..b445b5b81b1 100644
--- a/spec/services/users/build_service_spec.rb
+++ b/spec/services/users/build_service_spec.rb
@@ -16,6 +16,10 @@ RSpec.describe Users::BuildService do
expect(service.execute).to be_valid
end
+ it 'sets the created_by_id' do
+ expect(service.execute.created_by_id).to eq(admin_user.id)
+ end
+
context 'calls the UpdateCanonicalEmailService' do
specify do
expect(Users::UpdateCanonicalEmailService).to receive(:new).and_call_original
@@ -128,6 +132,16 @@ RSpec.describe Users::BuildService do
it 'raises AccessDeniedError exception' do
expect { service.execute }.to raise_error Gitlab::Access::AccessDeniedError
end
+
+ context 'when authorization is skipped' do
+ subject(:built_user) { service.execute(skip_authorization: true) }
+
+ it { is_expected.to be_valid }
+
+ it 'sets the created_by_id' do
+ expect(built_user.created_by_id).to eq(user.id)
+ end
+ end
end
context 'with nil user' do
diff --git a/spec/simplecov_env.rb b/spec/simplecov_env.rb
index 17b76205d9e..617a45ae449 100644
--- a/spec/simplecov_env.rb
+++ b/spec/simplecov_env.rb
@@ -2,7 +2,6 @@
require 'simplecov'
require 'simplecov-cobertura'
-require 'active_support/core_ext/numeric/time'
require_relative '../lib/gitlab/utils'
module SimpleCovEnv
@@ -75,7 +74,7 @@ module SimpleCovEnv
add_group 'Libraries', %w[/lib /ee/lib]
add_group 'Tooling', %w[/haml_lint /rubocop /tooling]
- merge_timeout 365.days
+ merge_timeout 365 * 24 * 3600
end
end
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 11a83bd9501..2962225859d 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -66,7 +66,11 @@ RSpec.configure do |config|
config.display_try_failure_messages = true
config.infer_spec_type_from_file_location!
- config.full_backtrace = !!ENV['CI']
+
+ # Add :full_backtrace tag to an example if full_backtrace output is desired
+ config.before(:each, full_backtrace: true) do |example|
+ config.full_backtrace = true
+ end
unless ENV['CI']
# Re-run failures locally with `--only-failures`
@@ -124,6 +128,7 @@ RSpec.configure do |config|
config.include LoginHelpers, type: :feature
config.include SearchHelpers, type: :feature
config.include WaitHelpers, type: :feature
+ config.include WaitForRequests, type: :feature
config.include EmailHelpers, :mailer, type: :mailer
config.include Warden::Test::Helpers, type: :request
config.include Gitlab::Routing, type: :routing
@@ -133,7 +138,6 @@ RSpec.configure do |config|
config.include InputHelper, :js
config.include SelectionHelper, :js
config.include InspectRequests, :js
- config.include WaitForRequests, :js
config.include LiveDebugger, :js
config.include MigrationsHelpers, :migration
config.include RedisHelpers
@@ -233,6 +237,12 @@ RSpec.configure do |config|
# expect(Gitlab::Git::KeepAround).to receive(:execute).and_call_original
allow(Gitlab::Git::KeepAround).to receive(:execute)
+ # Stub these calls due to being expensive operations
+ # It can be reenabled for specific tests via:
+ #
+ # expect(Gitlab::JobWaiter).to receive(:wait).and_call_original
+ allow_any_instance_of(Gitlab::JobWaiter).to receive(:wait)
+
Gitlab::ProcessMemoryCache.cache_backend.clear
Sidekiq::Worker.clear_all
diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb
index 66fce4fddf1..27c360ee39a 100644
--- a/spec/support/capybara.rb
+++ b/spec/support/capybara.rb
@@ -123,6 +123,10 @@ RSpec.configure do |config|
port: session.server.port,
protocol: 'http')
+ # CSRF protection is disabled by default. We only enable this for JS specs because some forms
+ # require Javascript to set the CSRF token.
+ allow_any_instance_of(ActionController::Base).to receive(:protect_against_forgery?).and_return(true)
+
# reset window size between tests
unless session.current_window.size == CAPYBARA_WINDOW_SIZE
begin
diff --git a/spec/support/counter_attribute.rb b/spec/support/counter_attribute.rb
index ea71b25b4c0..8bd40b72dcf 100644
--- a/spec/support/counter_attribute.rb
+++ b/spec/support/counter_attribute.rb
@@ -9,6 +9,12 @@ RSpec.configure do |config|
counter_attribute :build_artifacts_size
counter_attribute :commit_count
+
+ attr_accessor :flushed
+
+ counter_attribute_after_flush do |subject|
+ subject.flushed = true
+ end
end
end
end
diff --git a/spec/support/factory_bot.rb b/spec/support/factory_bot.rb
index a86161bfded..c9d372993b5 100644
--- a/spec/support/factory_bot.rb
+++ b/spec/support/factory_bot.rb
@@ -3,7 +3,3 @@
FactoryBot::SyntaxRunner.class_eval do
include RSpec::Mocks::ExampleMethods
end
-
-# Use FactoryBot 4.x behavior:
-# https://github.com/thoughtbot/factory_bot/blob/master/GETTING_STARTED.md#associations
-FactoryBot.use_parent_strategy = false
diff --git a/spec/support/google_api/cloud_platform_helpers.rb b/spec/support/google_api/cloud_platform_helpers.rb
index 38ffca8c5ae..286f3c03357 100644
--- a/spec/support/google_api/cloud_platform_helpers.rb
+++ b/spec/support/google_api/cloud_platform_helpers.rb
@@ -24,7 +24,7 @@ module GoogleApi
def stub_cloud_platform_get_zone_cluster(project_id, zone, cluster_id, **options)
WebMock.stub_request(:get, cloud_platform_get_zone_cluster_url(project_id, zone, cluster_id))
- .to_return(cloud_platform_response(cloud_platform_cluster_body(options)))
+ .to_return(cloud_platform_response(cloud_platform_cluster_body(**options)))
end
def stub_cloud_platform_get_zone_cluster_error(project_id, zone, cluster_id)
diff --git a/spec/support/helpers/api_internal_base_helpers.rb b/spec/support/helpers/api_internal_base_helpers.rb
new file mode 100644
index 00000000000..058d07c7a1d
--- /dev/null
+++ b/spec/support/helpers/api_internal_base_helpers.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+module APIInternalBaseHelpers
+ def gl_repository_for(container)
+ case container
+ when ProjectWiki
+ Gitlab::GlRepository::WIKI.identifier_for_container(container.project)
+ when Project
+ Gitlab::GlRepository::PROJECT.identifier_for_container(container)
+ when Snippet
+ Gitlab::GlRepository::SNIPPET.identifier_for_container(container)
+ else
+ nil
+ end
+ end
+
+ def full_path_for(container)
+ case container
+ when PersonalSnippet
+ "snippets/#{container.id}"
+ when ProjectSnippet
+ "#{container.project.full_path}/snippets/#{container.id}"
+ else
+ container.full_path
+ end
+ end
+
+ def pull(key, container, protocol = 'ssh')
+ post(
+ api("/internal/allowed"),
+ params: {
+ key_id: key.id,
+ project: full_path_for(container),
+ gl_repository: gl_repository_for(container),
+ action: 'git-upload-pack',
+ secret_token: secret_token,
+ protocol: protocol
+ }
+ )
+ end
+
+ def push(key, container, protocol = 'ssh', env: nil, changes: nil)
+ push_with_path(key,
+ full_path: full_path_for(container),
+ gl_repository: gl_repository_for(container),
+ protocol: protocol,
+ env: env,
+ changes: changes)
+ end
+
+ def push_with_path(key, full_path:, gl_repository: nil, protocol: 'ssh', env: nil, changes: nil)
+ changes ||= 'd14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master'
+
+ params = {
+ changes: changes,
+ key_id: key.id,
+ project: full_path,
+ action: 'git-receive-pack',
+ secret_token: secret_token,
+ protocol: protocol,
+ env: env
+ }
+ params[:gl_repository] = gl_repository if gl_repository
+
+ post(
+ api("/internal/allowed"),
+ params: params
+ )
+ end
+
+ def archive(key, container)
+ post(
+ api("/internal/allowed"),
+ params: {
+ ref: 'master',
+ key_id: key.id,
+ project: full_path_for(container),
+ gl_repository: gl_repository_for(container),
+ action: 'git-upload-archive',
+ secret_token: secret_token,
+ protocol: 'ssh'
+ }
+ )
+ end
+end
diff --git a/spec/support/helpers/features/canonical_link_helpers.rb b/spec/support/helpers/features/canonical_link_helpers.rb
new file mode 100644
index 00000000000..da3a28f1cb2
--- /dev/null
+++ b/spec/support/helpers/features/canonical_link_helpers.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+# These helpers allow you to manipulate with notes.
+#
+# Usage:
+# describe "..." do
+# include Spec::Support::Helpers::Features::CanonicalLinkHelpers
+# ...
+#
+# expect(page).to have_canonical_link(url)
+#
+module Spec
+ module Support
+ module Helpers
+ module Features
+ module CanonicalLinkHelpers
+ def have_canonical_link(url)
+ have_xpath("//link[@rel=\"canonical\" and @href=\"#{url}\"]", visible: false)
+ end
+
+ def have_any_canonical_links
+ have_xpath('//link[@rel="canonical"]', visible: false)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/helpers/git_http_helpers.rb b/spec/support/helpers/git_http_helpers.rb
index de8bb9ac8e3..c9c1c4dcfc9 100644
--- a/spec/support/helpers/git_http_helpers.rb
+++ b/spec/support/helpers/git_http_helpers.rb
@@ -5,45 +5,45 @@ require_relative 'workhorse_helpers'
module GitHttpHelpers
include WorkhorseHelpers
- def clone_get(project, options = {})
+ def clone_get(project, **options)
get "/#{project}/info/refs", params: { service: 'git-upload-pack' }, headers: auth_env(*options.values_at(:user, :password, :spnego_request_token))
end
- def clone_post(project, options = {})
+ def clone_post(project, **options)
post "/#{project}/git-upload-pack", headers: auth_env(*options.values_at(:user, :password, :spnego_request_token))
end
- def push_get(project, options = {})
+ def push_get(project, **options)
get "/#{project}/info/refs", params: { service: 'git-receive-pack' }, headers: auth_env(*options.values_at(:user, :password, :spnego_request_token))
end
- def push_post(project, options = {})
+ def push_post(project, **options)
post "/#{project}/git-receive-pack", headers: auth_env(*options.values_at(:user, :password, :spnego_request_token))
end
def download(project, user: nil, password: nil, spnego_request_token: nil)
- args = [project, { user: user, password: password, spnego_request_token: spnego_request_token }]
+ args = { user: user, password: password, spnego_request_token: spnego_request_token }
- clone_get(*args)
+ clone_get(project, **args)
yield response
- clone_post(*args)
+ clone_post(project, **args)
yield response
end
def upload(project, user: nil, password: nil, spnego_request_token: nil)
- args = [project, { user: user, password: password, spnego_request_token: spnego_request_token }]
+ args = { user: user, password: password, spnego_request_token: spnego_request_token }
- push_get(*args)
+ push_get(project, **args)
yield response
- push_post(*args)
+ push_post(project, **args)
yield response
end
- def download_or_upload(*args, &block)
- download(*args, &block)
- upload(*args, &block)
+ def download_or_upload(project, **args, &block)
+ download(project, **args, &block)
+ upload(project, **args, &block)
end
def auth_env(user, password, spnego_request_token)
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index 5635ba3df05..e754a24417c 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -234,7 +234,8 @@ module GraphqlHelpers
end
def post_graphql(query, current_user: nil, variables: nil, headers: {})
- post api('/', current_user, version: 'graphql'), params: { query: query, variables: variables }, headers: headers
+ params = { query: query, variables: variables&.to_json }
+ post api('/', current_user, version: 'graphql'), params: params, headers: headers
end
def post_graphql_mutation(mutation, current_user: nil)
diff --git a/spec/support/helpers/kubernetes_helpers.rb b/spec/support/helpers/kubernetes_helpers.rb
index 8882f31e2f4..16d2e84cc0f 100644
--- a/spec/support/helpers/kubernetes_helpers.rb
+++ b/spec/support/helpers/kubernetes_helpers.rb
@@ -153,7 +153,7 @@ module KubernetesHelpers
options[:name] ||= "kubetest"
options[:domain] ||= "example.com"
- options[:response] ||= kube_response(kube_knative_services_body(options))
+ options[:response] ||= kube_response(kube_knative_services_body(**options))
stub_kubeclient_discover(service.api_url)
@@ -167,7 +167,7 @@ module KubernetesHelpers
options[:namespace] ||= "default"
WebMock.stub_request(:get, api_url + "/api/v1/namespaces/#{options[:namespace]}/secrets/#{options[:metadata_name]}")
- .to_return(kube_response(kube_v1_secret_body(options)))
+ .to_return(kube_response(kube_v1_secret_body(**options)))
end
def stub_kubeclient_get_secret_error(api_url, name, namespace: 'default', status: 404)
@@ -517,7 +517,7 @@ module KubernetesHelpers
def kube_knative_services_body(**options)
{
"kind" => "List",
- "items" => [knative_09_service(options)]
+ "items" => [knative_09_service(**options)]
}
end
@@ -604,7 +604,7 @@ module KubernetesHelpers
}
end
- def kube_deployment(name: "kube-deployment", environment_slug: "production", project_slug: "project-path-slug", track: nil)
+ def kube_deployment(name: "kube-deployment", environment_slug: "production", project_slug: "project-path-slug", track: nil, replicas: 3)
{
"metadata" => {
"name" => name,
@@ -617,7 +617,7 @@ module KubernetesHelpers
"track" => track
}.compact
},
- "spec" => { "replicas" => 3 },
+ "spec" => { "replicas" => replicas },
"status" => {
"observedGeneration" => 4
}
diff --git a/spec/support/helpers/search_helpers.rb b/spec/support/helpers/search_helpers.rb
index db6e47459e9..328f272724a 100644
--- a/spec/support/helpers/search_helpers.rb
+++ b/spec/support/helpers/search_helpers.rb
@@ -1,6 +1,14 @@
# frozen_string_literal: true
module SearchHelpers
+ def fill_in_search(text)
+ page.within('.search-input-wrap') do
+ fill_in('search', with: text)
+ end
+
+ wait_for_all_requests
+ end
+
def submit_search(query, scope: nil)
page.within('.search-form, .search-page-form') do
field = find_field('search')
@@ -11,6 +19,8 @@ module SearchHelpers
else
click_button('Search')
end
+
+ wait_for_all_requests
end
end
diff --git a/spec/support/helpers/stub_experiments.rb b/spec/support/helpers/stub_experiments.rb
index ff3b02dc3f6..7a6154d5ef9 100644
--- a/spec/support/helpers/stub_experiments.rb
+++ b/spec/support/helpers/stub_experiments.rb
@@ -22,10 +22,10 @@ module StubExperiments
# Examples
# - `stub_experiment_for_user(signup_flow: false)` ... Disable `signup_flow` experiment for user.
def stub_experiment_for_user(experiments)
- allow(Gitlab::Experimentation).to receive(:enabled_for_user?).and_call_original
+ allow(Gitlab::Experimentation).to receive(:enabled_for_value?).and_call_original
experiments.each do |experiment_key, enabled|
- allow(Gitlab::Experimentation).to receive(:enabled_for_user?).with(experiment_key, anything) { enabled }
+ allow(Gitlab::Experimentation).to receive(:enabled_for_value?).with(experiment_key, anything) { enabled }
end
end
end
diff --git a/spec/support/helpers/stub_feature_flags.rb b/spec/support/helpers/stub_feature_flags.rb
index 792a1c21c31..7f30a2a70cd 100644
--- a/spec/support/helpers/stub_feature_flags.rb
+++ b/spec/support/helpers/stub_feature_flags.rb
@@ -62,4 +62,8 @@ module StubFeatureFlags
StubFeatureGate.new(object)
end
+
+ def skip_feature_flags_yaml_validation
+ allow(Feature::Definition).to receive(:valid_usage!)
+ end
end
diff --git a/spec/support/helpers/stub_object_storage.rb b/spec/support/helpers/stub_object_storage.rb
index 476b7d34ee5..dba3d2b137e 100644
--- a/spec/support/helpers/stub_object_storage.rb
+++ b/spec/support/helpers/stub_object_storage.rb
@@ -82,13 +82,27 @@ module StubObjectStorage
**params)
end
- def stub_terraform_state_object_storage(uploader = described_class, **params)
+ def stub_terraform_state_object_storage(**params)
stub_object_storage_uploader(config: Gitlab.config.terraform_state.object_store,
- uploader: uploader,
+ uploader: Terraform::VersionedStateUploader,
+ remote_directory: 'terraform',
+ **params)
+ end
+
+ def stub_terraform_state_version_object_storage(**params)
+ stub_object_storage_uploader(config: Gitlab.config.terraform_state.object_store,
+ uploader: Terraform::StateUploader,
remote_directory: 'terraform',
**params)
end
+ def stub_pages_object_storage(uploader = described_class, **params)
+ stub_object_storage_uploader(config: Gitlab.config.pages.object_store,
+ uploader: uploader,
+ remote_directory: 'pages',
+ **params)
+ end
+
def stub_object_storage_multipart_init(endpoint, upload_id = "upload_id")
stub_request(:post, %r{\A#{endpoint}tmp/uploads/[a-z0-9-]*\?uploads\z})
.to_return status: 200, body: <<-EOS.strip_heredoc
diff --git a/spec/support/helpers/usage_data_helpers.rb b/spec/support/helpers/usage_data_helpers.rb
index d92fcdc2d4a..17e806d21d9 100644
--- a/spec/support/helpers/usage_data_helpers.rb
+++ b/spec/support/helpers/usage_data_helpers.rb
@@ -99,6 +99,7 @@ module UsageDataHelpers
projects_with_error_tracking_enabled
projects_with_alerts_service_enabled
projects_with_prometheus_alerts
+ projects_with_tracing_enabled
projects_with_expiration_policy_enabled
projects_with_expiration_policy_disabled
projects_with_expiration_policy_enabled_with_keep_n_unset
@@ -133,6 +134,7 @@ module UsageDataHelpers
todos
uploads
web_hooks
+ user_preferences_user_gitpod_enabled
).push(*SMAU_KEYS)
USAGE_DATA_KEYS = %i(
@@ -228,7 +230,7 @@ module UsageDataHelpers
receive_matchers.each { |m| expect(prometheus_client).to m }
end
- def for_defined_days_back(days: [29, 2])
+ def for_defined_days_back(days: [31, 3])
days.each do |n|
Timecop.travel(n.days.ago) do
yield
diff --git a/spec/support/helpers/wait_for_requests.rb b/spec/support/helpers/wait_for_requests.rb
index 2cfd47634ca..43060e571a9 100644
--- a/spec/support/helpers/wait_for_requests.rb
+++ b/spec/support/helpers/wait_for_requests.rb
@@ -48,17 +48,10 @@ module WaitForRequests
def finished_all_js_requests?
return true unless javascript_test?
- finished_all_ajax_requests? &&
- finished_all_axios_requests?
- end
-
- def finished_all_axios_requests?
- Capybara.page.evaluate_script('window.pendingRequests || 0').zero? # rubocop:disable Style/NumericPredicate
+ finished_all_ajax_requests?
end
def finished_all_ajax_requests?
- return true if Capybara.page.evaluate_script('typeof jQuery === "undefined"')
-
- Capybara.page.evaluate_script('jQuery.active').zero? # rubocop:disable Style/NumericPredicate
+ Capybara.page.evaluate_script('window.pendingRequests || window.pendingRailsUJSRequests || 0').zero? # rubocop:disable Style/NumericPredicate
end
end
diff --git a/spec/support/helpers/wiki_helpers.rb b/spec/support/helpers/wiki_helpers.rb
index e59c6bde264..8873a90579d 100644
--- a/spec/support/helpers/wiki_helpers.rb
+++ b/spec/support/helpers/wiki_helpers.rb
@@ -13,16 +13,16 @@ module WikiHelpers
find('.svg-content .js-lazy-loaded') if example.nil? || example.metadata.key?(:js)
end
- def upload_file_to_wiki(container, user, file_name)
- opts = {
+ def upload_file_to_wiki(wiki, user, file_name)
+ params = {
file_name: file_name,
file_content: File.read(expand_fixture_path(file_name))
}
::Wikis::CreateAttachmentService.new(
- container: container,
+ container: wiki.container,
current_user: user,
- params: opts
- ).execute[:result][:file_path]
+ params: params
+ ).execute.dig(:result, :file_path)
end
end
diff --git a/spec/support/matchers/be_sorted.rb b/spec/support/matchers/be_sorted.rb
new file mode 100644
index 00000000000..1455060fe71
--- /dev/null
+++ b/spec/support/matchers/be_sorted.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+# Assert that this collection is sorted by argument and order
+#
+# By default, this checks that the collection is sorted ascending
+# but you can check order by specific field and order by passing
+# them, eg:
+#
+# ```
+# expect(collection).to be_sorted(:field, :desc)
+# ```
+RSpec::Matchers.define :be_sorted do |by, order = :asc|
+ match do |actual|
+ next true unless actual.present? # emtpy collection is sorted
+
+ actual
+ .then { |collection| by ? collection.sort_by(&by) : collection.sort }
+ .then { |sorted_collection| order.to_sym == :desc ? sorted_collection.reverse : sorted_collection }
+ .then { |sorted_collection| sorted_collection == actual }
+ end
+end
diff --git a/spec/support/migrations_helpers/schema_version_finder.rb b/spec/support/migrations_helpers/schema_version_finder.rb
new file mode 100644
index 00000000000..b677db7ea26
--- /dev/null
+++ b/spec/support/migrations_helpers/schema_version_finder.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+# Sometimes data migration specs require adding invalid test data in order to test
+# the migration (e.g. adding a row with null foreign key). Certain db migrations that
+# add constraints (e.g. NOT NULL constraint) prevent invalid records from being added
+# and data migration from being tested. For this reason, SchemaVersionFinder can be used
+# to find and use schema prior to specified one.
+#
+# @example
+# RSpec.describe CleanupThings, :migration, schema: MigrationHelpers::SchemaVersionFinder.migration_prior(AddNotNullConstraint) do ...
+#
+# SchemaVersionFinder returns schema version prior to the one specified, which allows to then add
+# invalid records to the database, which in return allows to properly test data migration.
+module MigrationHelpers
+ class SchemaVersionFinder
+ def self.migrations_paths
+ ActiveRecord::Migrator.migrations_paths
+ end
+
+ def self.migration_context
+ ActiveRecord::MigrationContext.new(migrations_paths, ActiveRecord::SchemaMigration)
+ end
+
+ def self.migrations
+ migration_context.migrations
+ end
+
+ def self.migration_prior(migration_klass)
+ migrations.each_cons(2) do |previous, migration|
+ break previous.version if migration.name == migration_klass.name
+ end
+ end
+ end
+end
diff --git a/spec/support/models/merge_request_without_merge_request_diff.rb b/spec/support/models/merge_request_without_merge_request_diff.rb
new file mode 100644
index 00000000000..5cdf1feb7a5
--- /dev/null
+++ b/spec/support/models/merge_request_without_merge_request_diff.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+class MergeRequestWithoutMergeRequestDiff < ::MergeRequest
+ self.inheritance_column = :_type_disabled
+
+ def ensure_merge_request_diff; end
+end
diff --git a/spec/support/shared_contexts/cache_allowed_users_in_namespace_shared_context.rb b/spec/support/shared_contexts/cache_allowed_users_in_namespace_shared_context.rb
index e3c1d0afa53..bfb719fd840 100644
--- a/spec/support/shared_contexts/cache_allowed_users_in_namespace_shared_context.rb
+++ b/spec/support/shared_contexts/cache_allowed_users_in_namespace_shared_context.rb
@@ -25,7 +25,7 @@ RSpec.shared_examples 'allowed user IDs are cached' do
expect(described_class.l1_cache_backend).to receive(:fetch).and_call_original
expect(described_class.l2_cache_backend).to receive(:fetch).and_call_original
expect(subject).to be_truthy
- end.not_to exceed_query_limit(2)
+ end.not_to exceed_query_limit(3)
end
end
end
diff --git a/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb b/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
index 113252a6ab5..84910d0dfe4 100644
--- a/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
+++ b/spec/support/shared_examples/boards/multiple_issue_boards_shared_examples.rb
@@ -85,7 +85,7 @@ RSpec.shared_examples 'multiple issue boards' do
wait_for_requests
- expect(page).to have_selector('.board', count: 3)
+ expect(page).to have_selector('.board', count: 5)
in_boards_switcher_dropdown do
click_link board.name
@@ -93,7 +93,7 @@ RSpec.shared_examples 'multiple issue boards' do
wait_for_requests
- expect(page).to have_selector('.board', count: 2)
+ expect(page).to have_selector('.board', count: 4)
end
it 'maintains sidebar state over board switch' do
diff --git a/spec/support/shared_examples/controllers/known_sign_in_shared_examples.rb b/spec/support/shared_examples/controllers/known_sign_in_shared_examples.rb
index 7f26155f9d6..3f147f942ba 100644
--- a/spec/support/shared_examples/controllers/known_sign_in_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/known_sign_in_shared_examples.rb
@@ -59,7 +59,7 @@ RSpec.shared_examples 'known sign in' do
it 'notifies the user when the cookie is expired' do
stub_cookie
- Timecop.freeze((KnownSignIn::KNOWN_SIGN_IN_COOKIE_EXPIRY + 1.day).from_now) do
+ travel_to((KnownSignIn::KNOWN_SIGN_IN_COOKIE_EXPIRY + 1.day).from_now) do
expect_next_instance_of(NotificationService) do |instance|
expect(instance).to receive(:unknown_sign_in)
end
diff --git a/spec/support/shared_examples/controllers/milestone_tabs_shared_examples.rb b/spec/support/shared_examples/controllers/milestone_tabs_shared_examples.rb
index 925c45005f0..2d35b1681ea 100644
--- a/spec/support/shared_examples/controllers/milestone_tabs_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/milestone_tabs_shared_examples.rb
@@ -2,9 +2,28 @@
RSpec.shared_examples 'milestone tabs' do
def go(path, extra_params = {})
- params = { namespace_id: project.namespace.to_param, project_id: project, id: milestone.iid }
+ get path, params: request_params.merge(extra_params)
+ end
+
+ describe '#issues' do
+ context 'as html' do
+ before do
+ go(:issues, format: 'html')
+ end
- get path, params: params.merge(extra_params)
+ it 'redirects to milestone#show' do
+ expect(response).to redirect_to(milestone_path)
+ end
+ end
+
+ context 'as json' do
+ it 'renders the issues tab template to a string' do
+ go(:issues, format: 'json')
+
+ expect(response).to render_template('shared/milestones/_issues_tab')
+ expect(json_response).to have_key('html')
+ end
+ end
end
describe '#merge_requests' do
diff --git a/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb b/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
index 4ca400dd87b..1cf31c47df8 100644
--- a/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
@@ -226,7 +226,7 @@ RSpec.shared_examples 'wiki controller actions' do
where(:file_name) { ['dk.png', 'unsanitized.svg', 'git-cheat-sheet.pdf'] }
with_them do
- let(:id) { upload_file_to_wiki(container, user, file_name) }
+ let(:id) { upload_file_to_wiki(wiki, user, file_name) }
it 'delivers the file with the correct headers' do
subject
diff --git a/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb b/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb
index c9910487798..218f1afce98 100644
--- a/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb
+++ b/spec/support/shared_examples/features/editable_merge_request_shared_examples.rb
@@ -11,6 +11,15 @@ RSpec.shared_examples 'an editable merge request' do
expect(page).to have_content user.name
end
+ find('.js-reviewer-search').click
+ page.within '.dropdown-menu-user' do
+ click_link user.name
+ end
+ expect(find('input[name="merge_request[reviewer_ids][]"]', visible: false).value).to match(user.id.to_s)
+ page.within '.js-reviewer-search' do
+ expect(page).to have_content user.name
+ end
+
click_button 'Milestone'
page.within '.issue-milestone' do
click_link milestone.title
@@ -38,6 +47,10 @@ RSpec.shared_examples 'an editable merge request' do
expect(page).to have_content user.name
end
+ page.within '.reviewer' do
+ expect(page).to have_content user.name
+ end
+
page.within '.milestone' do
expect(page).to have_content milestone.title
end
@@ -124,16 +137,3 @@ end
def get_textarea_height
page.evaluate_script('document.getElementById("merge_request_description").offsetHeight')
end
-
-RSpec.shared_examples 'an editable merge request with reviewers' do
- it 'updates merge request', :js do
- find('.js-reviewer-search').click
- page.within '.dropdown-menu-user' do
- click_link user.name
- end
- expect(find('input[name="merge_request[reviewer_ids][]"]', visible: false).value).to match(user.id.to_s)
- page.within '.js-reviewer-search' do
- expect(page).to have_content user.name
- end
- end
-end
diff --git a/spec/support/shared_examples/features/navbar_shared_examples.rb b/spec/support/shared_examples/features/navbar_shared_examples.rb
index 91a4048fa7c..c768e95c45a 100644
--- a/spec/support/shared_examples/features/navbar_shared_examples.rb
+++ b/spec/support/shared_examples/features/navbar_shared_examples.rb
@@ -3,12 +3,14 @@
RSpec.shared_examples 'verified navigation bar' do
let(:expected_structure) do
structure.compact!
- structure.each { |s| s[:nav_sub_items].compact! }
+ structure.each { |s| s[:nav_sub_items]&.compact! }
structure
end
it 'renders correctly' do
current_structure = page.all('.sidebar-top-level-items > li', class: ['!hidden']).map do |item|
+ next if item.find_all('a').empty?
+
nav_item = item.find_all('a').first.text.gsub(/\s+\d+$/, '') # remove counts at the end
nav_sub_items = item.all('.sidebar-sub-level-items > li', class: ['!fly-out-top-item']).map do |list_item|
@@ -16,7 +18,7 @@ RSpec.shared_examples 'verified navigation bar' do
end
{ nav_item: nav_item, nav_sub_items: nav_sub_items }
- end
+ end.compact
expect(current_structure).to eq(expected_structure)
end
diff --git a/spec/support/shared_examples/features/packages_shared_examples.rb b/spec/support/shared_examples/features/packages_shared_examples.rb
index f201421e827..4d2e13aa5bc 100644
--- a/spec/support/shared_examples/features/packages_shared_examples.rb
+++ b/spec/support/shared_examples/features/packages_shared_examples.rb
@@ -84,11 +84,11 @@ RSpec.shared_examples 'shared package sorting' do
let(:packages) { [package_two, package_one] }
end
- it_behaves_like 'correctly sorted packages list', 'Created' do
+ it_behaves_like 'correctly sorted packages list', 'Published' do
let(:packages) { [package_two, package_one] }
end
- it_behaves_like 'correctly sorted packages list', 'Created', ascending: true do
+ it_behaves_like 'correctly sorted packages list', 'Published', ascending: true do
let(:packages) { [package_one, package_two] }
end
end
diff --git a/spec/support/shared_examples/features/page_description_shared_examples.rb b/spec/support/shared_examples/features/page_description_shared_examples.rb
new file mode 100644
index 00000000000..81653220b4c
--- /dev/null
+++ b/spec/support/shared_examples/features/page_description_shared_examples.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'page meta description' do |expected_description|
+ it 'renders the page with description, og:description, and twitter:description meta tags that contains a plain-text version of the markdown', :aggregate_failures do
+ %w(name='description' property='og:description' property='twitter:description').each do |selector|
+ expect(page).to have_selector("meta[#{selector}][content='#{expected_description}']", visible: false)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/protected_branches_with_deploy_keys_examples.rb b/spec/support/shared_examples/features/protected_branches_with_deploy_keys_examples.rb
new file mode 100644
index 00000000000..a2d2143271c
--- /dev/null
+++ b/spec/support/shared_examples/features/protected_branches_with_deploy_keys_examples.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'when the deploy_keys_on_protected_branches FF is turned on' do
+ before do
+ stub_feature_flags(deploy_keys_on_protected_branches: true)
+ project.add_maintainer(user)
+ sign_in(user)
+ end
+
+ let(:dropdown_sections_minus_deploy_keys) { all_dropdown_sections - ['Deploy Keys'] }
+
+ context 'when deploy keys are enabled to this project' do
+ let!(:deploy_key_1) { create(:deploy_key, title: 'title 1', projects: [project]) }
+ let!(:deploy_key_2) { create(:deploy_key, title: 'title 2', projects: [project]) }
+
+ context 'when only one deploy key can push' do
+ before do
+ deploy_key_1.deploy_keys_projects.first.update!(can_push: true)
+ end
+
+ it "shows all dropdown sections in the 'Allowed to push' main dropdown, with only one deploy key" do
+ visit project_protected_branches_path(project)
+
+ find(".js-allowed-to-push").click
+ wait_for_requests
+
+ within('.qa-allowed-to-push-dropdown') do
+ dropdown_headers = page.all('.dropdown-header').map(&:text)
+
+ expect(dropdown_headers).to contain_exactly(*all_dropdown_sections)
+ expect(page).to have_content('title 1')
+ expect(page).not_to have_content('title 2')
+ end
+ end
+
+ it "shows all sections but not deploy keys in the 'Allowed to merge' main dropdown" do
+ visit project_protected_branches_path(project)
+
+ find(".js-allowed-to-merge").click
+ wait_for_requests
+
+ within('.qa-allowed-to-merge-dropdown') do
+ dropdown_headers = page.all('.dropdown-header').map(&:text)
+
+ expect(dropdown_headers).to contain_exactly(*dropdown_sections_minus_deploy_keys)
+ end
+ end
+
+ it "shows all sections in the 'Allowed to push' update dropdown" do
+ create(:protected_branch, :no_one_can_push, project: project, name: 'master')
+
+ visit project_protected_branches_path(project)
+
+ within(".js-protected-branch-edit-form") do
+ find(".js-allowed-to-push").click
+ wait_for_requests
+
+ dropdown_headers = page.all('.dropdown-header').map(&:text)
+
+ expect(dropdown_headers).to contain_exactly(*all_dropdown_sections)
+ end
+ end
+ end
+
+ context 'when no deploy key can push' do
+ it "just shows all sections but not deploy keys in the 'Allowed to push' dropdown" do
+ visit project_protected_branches_path(project)
+
+ find(".js-allowed-to-push").click
+ wait_for_requests
+
+ within('.qa-allowed-to-push-dropdown') do
+ dropdown_headers = page.all('.dropdown-header').map(&:text)
+
+ expect(dropdown_headers).to contain_exactly(*dropdown_sections_minus_deploy_keys)
+ end
+ end
+
+ it "just shows all sections but not deploy keys in the 'Allowed to push' update dropdown" do
+ create(:protected_branch, :no_one_can_push, project: project, name: 'master')
+
+ visit project_protected_branches_path(project)
+
+ within(".js-protected-branch-edit-form") do
+ find(".js-allowed-to-push").click
+ wait_for_requests
+
+ dropdown_headers = page.all('.dropdown-header').map(&:text)
+
+ expect(dropdown_headers).to contain_exactly(*dropdown_sections_minus_deploy_keys)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/wiki_file_attachments_shared_examples.rb b/spec/support/shared_examples/features/wiki/file_attachments_shared_examples.rb
index d30e8241da0..0ef1ccdfe57 100644
--- a/spec/support/shared_examples/features/wiki_file_attachments_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/file_attachments_shared_examples.rb
@@ -1,14 +1,12 @@
# frozen_string_literal: true
# Requires a context containing:
-# project
+# wiki
RSpec.shared_examples 'wiki file attachments' do
include DropzoneHelper
context 'uploading attachments', :js do
- let(:wiki) { project.wiki }
-
def attach_with_dropzone(wait = false)
dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, wait)
end
diff --git a/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
new file mode 100644
index 00000000000..44d82d2e753
--- /dev/null
+++ b/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
@@ -0,0 +1,245 @@
+# frozen_string_literal: true
+
+# Requires a context containing:
+# wiki
+# user
+
+RSpec.shared_examples 'User creates wiki page' do
+ include WikiHelpers
+
+ before do
+ sign_in(user)
+ end
+
+ context "when wiki is empty" do
+ before do |example|
+ visit wiki_path(wiki)
+
+ wait_for_svg_to_be_loaded(example)
+
+ click_link "Create your first page"
+ end
+
+ it "shows validation error message" do
+ page.within(".wiki-form") do
+ fill_in(:wiki_content, with: "")
+
+ click_on("Create page")
+ end
+
+ expect(page).to have_content("The form contains the following error:").and have_content("Content can't be blank")
+
+ page.within(".wiki-form") do
+ fill_in(:wiki_content, with: "[link test](test)")
+
+ click_on("Create page")
+ end
+
+ expect(page).to have_content("Home").and have_content("link test")
+
+ click_link("link test")
+
+ expect(page).to have_content("Create New Page")
+ end
+
+ it "shows non-escaped link in the pages list" do
+ fill_in(:wiki_title, with: "one/two/three-test")
+
+ page.within(".wiki-form") do
+ fill_in(:wiki_content, with: "wiki content")
+
+ click_on("Create page")
+ end
+
+ expect(current_path).to include("one/two/three-test")
+ expect(page).to have_link(href: wiki_page_path(wiki, 'one/two/three-test'))
+ end
+
+ it "has `Create home` as a commit message", :js do
+ wait_for_requests
+
+ expect(page).to have_field("wiki[message]", with: "Create home")
+ end
+
+ it "creates a page from the home page" do
+ fill_in(:wiki_content, with: "[test](test)\n[GitLab API doc](api)\n[Rake tasks](raketasks)\n# Wiki header\n")
+ fill_in(:wiki_message, with: "Adding links to wiki")
+
+ page.within(".wiki-form") do
+ click_button("Create page")
+ end
+
+ expect(current_path).to eq(wiki_page_path(wiki, "home"))
+ expect(page).to have_content("test GitLab API doc Rake tasks Wiki header")
+ .and have_content("Home")
+ .and have_content("Last edited by #{user.name}")
+ .and have_header_with_correct_id_and_link(1, "Wiki header", "wiki-header")
+
+ click_link("test")
+
+ expect(current_path).to eq(wiki_page_path(wiki, "test"))
+
+ page.within(:css, ".nav-text") do
+ expect(page).to have_content("Create New Page")
+ end
+
+ click_link("Home")
+
+ expect(current_path).to eq(wiki_page_path(wiki, "home"))
+
+ click_link("GitLab API")
+
+ expect(current_path).to eq(wiki_page_path(wiki, "api"))
+
+ page.within(:css, ".nav-text") do
+ expect(page).to have_content("Create")
+ end
+
+ click_link("Home")
+
+ expect(current_path).to eq(wiki_page_path(wiki, "home"))
+
+ click_link("Rake tasks")
+
+ expect(current_path).to eq(wiki_page_path(wiki, "raketasks"))
+
+ page.within(:css, ".nav-text") do
+ expect(page).to have_content("Create")
+ end
+ end
+
+ it "creates ASCII wiki with LaTeX blocks", :js do
+ stub_application_setting(plantuml_url: "http://localhost", plantuml_enabled: true)
+
+ ascii_content = <<~MD
+ :stem: latexmath
+
+ [stem]
+ ++++
+ \\sqrt{4} = 2
+ ++++
+
+ another part
+
+ [latexmath]
+ ++++
+ \\beta_x \\gamma
+ ++++
+
+ stem:[2+2] is 4
+ MD
+
+ find("#wiki_format option[value=asciidoc]").select_option
+
+ fill_in(:wiki_content, with: ascii_content)
+
+ page.within(".wiki-form") do
+ click_button("Create page")
+ end
+
+ page.within ".md" do
+ expect(page).to have_selector(".katex", count: 3).and have_content("2+2 is 4")
+ end
+ end
+
+ it 'creates a wiki page with Org markup', :aggregate_failures do
+ org_content = <<~ORG
+ * Heading
+ ** Subheading
+ [[home][Link to Home]]
+ ORG
+
+ page.within('.wiki-form') do
+ find('#wiki_format option[value=org]').select_option
+ fill_in(:wiki_content, with: org_content)
+ click_button('Create page')
+ end
+
+ expect(page).to have_selector('h1', text: 'Heading')
+ expect(page).to have_selector('h2', text: 'Subheading')
+ expect(page).to have_link(href: wiki_page_path(wiki, 'home'))
+ end
+
+ it_behaves_like 'wiki file attachments'
+ end
+
+ context "when wiki is not empty", :js do
+ before do
+ create(:wiki_page, wiki: wiki, title: 'home', content: 'Home page')
+
+ visit wiki_path(wiki)
+ end
+
+ context "via the `new wiki page` page" do
+ it "creates a page with a single word" do
+ click_link("New page")
+
+ page.within(".wiki-form") do
+ fill_in(:wiki_title, with: "foo")
+ fill_in(:wiki_content, with: "My awesome wiki!")
+ end
+
+ # Commit message field should have correct value.
+ expect(page).to have_field("wiki[message]", with: "Create foo")
+
+ click_button("Create page")
+
+ expect(page).to have_content("foo")
+ .and have_content("Last edited by #{user.name}")
+ .and have_content("My awesome wiki!")
+ end
+
+ it "creates a page with spaces in the name" do
+ click_link("New page")
+
+ page.within(".wiki-form") do
+ fill_in(:wiki_title, with: "Spaces in the name")
+ fill_in(:wiki_content, with: "My awesome wiki!")
+ end
+
+ # Commit message field should have correct value.
+ expect(page).to have_field("wiki[message]", with: "Create Spaces in the name")
+
+ click_button("Create page")
+
+ expect(page).to have_content("Spaces in the name")
+ .and have_content("Last edited by #{user.name}")
+ .and have_content("My awesome wiki!")
+ end
+
+ it "creates a page with hyphens in the name" do
+ click_link("New page")
+
+ page.within(".wiki-form") do
+ fill_in(:wiki_title, with: "hyphens-in-the-name")
+ fill_in(:wiki_content, with: "My awesome wiki!")
+ end
+
+ # Commit message field should have correct value.
+ expect(page).to have_field("wiki[message]", with: "Create hyphens in the name")
+
+ page.within(".wiki-form") do
+ fill_in(:wiki_content, with: "My awesome wiki!")
+
+ click_button("Create page")
+ end
+
+ expect(page).to have_content("hyphens in the name")
+ .and have_content("Last edited by #{user.name}")
+ .and have_content("My awesome wiki!")
+ end
+ end
+
+ it "shows the emoji autocompletion dropdown" do
+ click_link("New page")
+
+ page.within(".wiki-form") do
+ find("#wiki_content").native.send_keys("")
+
+ fill_in(:wiki_content, with: ":")
+ end
+
+ expect(page).to have_selector(".atwho-view")
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/wiki/user_deletes_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_deletes_wiki_page_shared_examples.rb
new file mode 100644
index 00000000000..e1fd9c8dbec
--- /dev/null
+++ b/spec/support/shared_examples/features/wiki/user_deletes_wiki_page_shared_examples.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+# Requires a context containing:
+# wiki
+# user
+
+RSpec.shared_examples 'User deletes wiki page' do
+ include WikiHelpers
+
+ let(:wiki_page) { create(:wiki_page, wiki: wiki) }
+
+ before do
+ sign_in(user)
+ visit wiki_page_path(wiki, wiki_page)
+ end
+
+ it 'deletes a page', :js do
+ click_on('Edit')
+ click_on('Delete')
+ find('.modal-footer .btn-danger').click
+
+ expect(page).to have_content('Page was successfully deleted')
+ end
+end
diff --git a/spec/support/shared_examples/features/wiki/user_previews_wiki_changes_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_previews_wiki_changes_shared_examples.rb
new file mode 100644
index 00000000000..cf6ae6cfa5b
--- /dev/null
+++ b/spec/support/shared_examples/features/wiki/user_previews_wiki_changes_shared_examples.rb
@@ -0,0 +1,110 @@
+# frozen_string_literal: true
+
+# Requires a context containing:
+# wiki
+# user
+
+RSpec.shared_examples 'User previews wiki changes' do
+ let(:wiki_page) { build(:wiki_page, wiki: wiki) }
+
+ before do
+ sign_in(user)
+ end
+
+ shared_examples 'relative links' do
+ let_it_be(:page_content) do
+ <<~HEREDOC
+ Some text so key event for [ does not trigger an incorrect replacement.
+ [regular link](regular)
+ [relative link 1](../relative)
+ [relative link 2](./relative)
+ [relative link 3](./e/f/relative)
+ [spaced link](title with spaces)
+ HEREDOC
+ end
+
+ def relative_path(path)
+ (Pathname.new(wiki.wiki_base_path) + File.dirname(wiki_page.path).tr(' ', '-') + path).to_s
+ end
+
+ shared_examples "rewrites relative links" do
+ specify do
+ expect(element).to have_link('regular link', href: wiki.wiki_base_path + '/regular')
+ expect(element).to have_link('spaced link', href: wiki.wiki_base_path + '/title%20with%20spaces')
+
+ expect(element).to have_link('relative link 1', href: relative_path('../relative'))
+ expect(element).to have_link('relative link 2', href: relative_path('./relative'))
+ expect(element).to have_link('relative link 3', href: relative_path('./e/f/relative'))
+ end
+ end
+
+ context "when there are no spaces or hyphens in the page name" do
+ let(:wiki_page) { build(:wiki_page, wiki: wiki, title: 'a/b/c/d', content: page_content) }
+
+ it_behaves_like 'rewrites relative links'
+ end
+
+ context "when there are spaces in the page name" do
+ let(:wiki_page) { build(:wiki_page, wiki: wiki, title: 'a page/b page/c page/d page', content: page_content) }
+
+ it_behaves_like 'rewrites relative links'
+ end
+
+ context "when there are hyphens in the page name" do
+ let(:wiki_page) { build(:wiki_page, wiki: wiki, title: 'a-page/b-page/c-page/d-page', content: page_content) }
+
+ it_behaves_like 'rewrites relative links'
+ end
+ end
+
+ context "when rendering a new wiki page", :js do
+ before do
+ wiki_page.create # rubocop:disable Rails/SaveBang
+ visit wiki_page_path(wiki, wiki_page)
+ end
+
+ it_behaves_like 'relative links' do
+ let(:element) { page.find('.js-wiki-page-content') }
+ end
+ end
+
+ context "when previewing an existing wiki page", :js do
+ let(:preview) { page.find('.md-preview-holder') }
+
+ before do
+ wiki_page.create # rubocop:disable Rails/SaveBang
+ visit wiki_page_path(wiki, wiki_page, action: :edit)
+ end
+
+ it_behaves_like 'relative links' do
+ before do
+ click_on 'Preview'
+ end
+
+ let(:element) { preview }
+ end
+
+ it 'renders content with CommonMark' do
+ fill_in :wiki_content, with: "1. one\n - sublist\n"
+ click_on "Preview"
+
+ # the above generates two separate lists (not embedded) in CommonMark
+ expect(preview).to have_content("sublist")
+ expect(preview).not_to have_xpath("//ol//li//ul")
+ end
+
+ it "does not linkify double brackets inside code blocks as expected" do
+ fill_in :wiki_content, with: <<-HEREDOC
+ `[[do_not_linkify]]`
+ ```
+ [[also_do_not_linkify]]
+ ```
+ HEREDOC
+ click_on "Preview"
+
+ expect(preview).to have_content("do_not_linkify")
+ expect(preview).to have_content('[[do_not_linkify]]')
+ expect(preview).to have_content('[[also_do_not_linkify]]')
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
new file mode 100644
index 00000000000..fee455207f1
--- /dev/null
+++ b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
@@ -0,0 +1,231 @@
+# frozen_string_literal: true
+
+# Requires a context containing:
+# wiki
+# user
+
+RSpec.shared_examples 'User updates wiki page' do
+ include WikiHelpers
+
+ before do
+ sign_in(user)
+ end
+
+ context 'when wiki is empty' do
+ before do |example|
+ visit(wiki_path(wiki))
+
+ wait_for_svg_to_be_loaded(example)
+
+ click_link "Create your first page"
+ end
+
+ it 'redirects back to the home edit page' do
+ page.within(:css, '.wiki-form .form-actions') do
+ click_on('Cancel')
+ end
+
+ expect(current_path).to eq wiki_path(wiki)
+ end
+
+ it 'updates a page that has a path', :js do
+ fill_in(:wiki_title, with: 'one/two/three-test')
+
+ page.within '.wiki-form' do
+ fill_in(:wiki_content, with: 'wiki content')
+ click_on('Create page')
+ end
+
+ expect(current_path).to include('one/two/three-test')
+ expect(find('.wiki-pages')).to have_content('three')
+
+ first(:link, text: 'three').click
+
+ expect(find('.nav-text')).to have_content('three')
+
+ click_on('Edit')
+
+ expect(current_path).to include('one/two/three-test')
+ expect(page).to have_content('Edit Page')
+
+ fill_in('Content', with: 'Updated Wiki Content')
+ click_on('Save changes')
+
+ expect(page).to have_content('Updated Wiki Content')
+ end
+
+ it_behaves_like 'wiki file attachments'
+ end
+
+ context 'when wiki is not empty' do
+ let!(:wiki_page) { create(:wiki_page, wiki: wiki, title: 'home', content: 'Home page') }
+
+ before do
+ visit(wiki_path(wiki))
+
+ click_link('Edit')
+ end
+
+ it 'updates a page', :js do
+ # Commit message field should have correct value.
+ expect(page).to have_field('wiki[message]', with: 'Update home')
+
+ fill_in(:wiki_content, with: 'My awesome wiki!')
+ click_button('Save changes')
+
+ expect(page).to have_content('Home')
+ expect(page).to have_content("Last edited by #{user.name}")
+ expect(page).to have_content('My awesome wiki!')
+ end
+
+ it 'updates the commit message as the title is changed', :js do
+ fill_in(:wiki_title, with: '& < > \ \ { } &')
+
+ expect(page).to have_field('wiki[message]', with: 'Update & < > \ \ { } &')
+ end
+
+ it 'correctly escapes the commit message entities', :js do
+ fill_in(:wiki_title, with: 'Wiki title')
+
+ expect(page).to have_field('wiki[message]', with: 'Update Wiki title')
+ end
+
+ it 'shows a validation error message' do
+ fill_in(:wiki_content, with: '')
+ click_button('Save changes')
+
+ expect(page).to have_selector('.wiki-form')
+ expect(page).to have_content('Edit Page')
+ expect(page).to have_content('The form contains the following error:')
+ expect(page).to have_content("Content can't be blank")
+ expect(find('textarea#wiki_content').value).to eq('')
+ end
+
+ it 'shows the emoji autocompletion dropdown', :js do
+ find('#wiki_content').native.send_keys('')
+ fill_in(:wiki_content, with: ':')
+
+ expect(page).to have_selector('.atwho-view')
+ end
+
+ it 'shows the error message' do
+ wiki_page.update(content: 'Update') # rubocop:disable Rails/SaveBang
+
+ click_button('Save changes')
+
+ expect(page).to have_content('Someone edited the page the same time you did.')
+ end
+
+ it 'updates a page' do
+ fill_in('Content', with: 'Updated Wiki Content')
+ click_on('Save changes')
+
+ expect(page).to have_content('Updated Wiki Content')
+ end
+
+ it 'cancels editing of a page' do
+ page.within(:css, '.wiki-form .form-actions') do
+ click_on('Cancel')
+ end
+
+ expect(current_path).to eq(wiki_page_path(wiki, wiki_page))
+ end
+
+ it_behaves_like 'wiki file attachments'
+ end
+
+ context 'when the page is in a subdir' do
+ let(:page_name) { 'page_name' }
+ let(:page_dir) { "foo/bar/#{page_name}" }
+ let!(:wiki_page) { create(:wiki_page, wiki: wiki, title: page_dir, content: 'Home page') }
+
+ before do
+ visit wiki_page_path(wiki, wiki_page, action: :edit)
+ end
+
+ it 'moves the page to the root folder' do
+ fill_in(:wiki_title, with: "/#{page_name}")
+
+ click_button('Save changes')
+
+ expect(current_path).to eq(wiki_page_path(wiki, page_name))
+ end
+
+ it 'moves the page to other dir' do
+ new_page_dir = "foo1/bar1/#{page_name}"
+
+ fill_in(:wiki_title, with: new_page_dir)
+
+ click_button('Save changes')
+
+ expect(current_path).to eq(wiki_page_path(wiki, new_page_dir))
+ end
+
+ it 'remains in the same place if title has not changed' do
+ original_path = wiki_page_path(wiki, wiki_page)
+
+ fill_in(:wiki_title, with: page_name)
+
+ click_button('Save changes')
+
+ expect(current_path).to eq(original_path)
+ end
+
+ it 'can be moved to a different dir with a different name' do
+ new_page_dir = "foo1/bar1/new_page_name"
+
+ fill_in(:wiki_title, with: new_page_dir)
+
+ click_button('Save changes')
+
+ expect(current_path).to eq(wiki_page_path(wiki, new_page_dir))
+ end
+
+ it 'can be renamed and moved to the root folder' do
+ new_name = 'new_page_name'
+
+ fill_in(:wiki_title, with: "/#{new_name}")
+
+ click_button('Save changes')
+
+ expect(current_path).to eq(wiki_page_path(wiki, new_name))
+ end
+
+ it 'squishes the title before creating the page' do
+ new_page_dir = " foo1 / bar1 / #{page_name} "
+
+ fill_in(:wiki_title, with: new_page_dir)
+
+ click_button('Save changes')
+
+ expect(current_path).to eq(wiki_page_path(wiki, "foo1/bar1/#{page_name}"))
+ end
+
+ it_behaves_like 'wiki file attachments'
+ end
+
+ context 'when an existing page exceeds the content size limit' do
+ let!(:wiki_page) { create(:wiki_page, wiki: wiki, content: "one\ntwo\nthree") }
+
+ before do
+ stub_application_setting(wiki_page_max_content_bytes: 10)
+
+ visit wiki_page_path(wiki_page.wiki, wiki_page, action: :edit)
+ end
+
+ it 'allows changing the title if the content does not change' do
+ fill_in 'Title', with: 'new title'
+ click_on 'Save changes'
+
+ expect(page).to have_content('Wiki was successfully updated.')
+ end
+
+ it 'shows a validation error when trying to change the content' do
+ fill_in 'Content', with: 'new content'
+ click_on 'Save changes'
+
+ expect(page).to have_content('The form contains the following error:')
+ expect(page).to have_content('Content is too long (11 Bytes). The maximum size is 10 Bytes.')
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/wiki/user_uses_wiki_shortcuts_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_uses_wiki_shortcuts_shared_examples.rb
new file mode 100644
index 00000000000..0330b345a18
--- /dev/null
+++ b/spec/support/shared_examples/features/wiki/user_uses_wiki_shortcuts_shared_examples.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+# Requires a context containing:
+# wiki
+# user
+
+RSpec.shared_examples 'User uses wiki shortcuts' do
+ let(:wiki_page) { create(:wiki_page, wiki: wiki, title: 'home', content: 'Home page') }
+
+ before do
+ sign_in(user)
+ visit wiki_page_path(wiki, wiki_page)
+ end
+
+ it 'Visit edit wiki page using "e" keyboard shortcut', :js do
+ find('body').native.send_key('e')
+
+ expect(find('.wiki-page-title')).to have_content('Edit Page')
+ end
+end
diff --git a/spec/features/projects/wiki/users_views_asciidoc_page_with_includes_spec.rb b/spec/support/shared_examples/features/wiki/user_views_asciidoc_page_with_includes_shared_examples.rb
index 5c45e34595f..3b2fda4e05b 100644
--- a/spec/features/projects/wiki/users_views_asciidoc_page_with_includes_spec.rb
+++ b/spec/support/shared_examples/features/wiki/user_views_asciidoc_page_with_includes_shared_examples.rb
@@ -1,11 +1,7 @@
# frozen_string_literal: true
-require 'spec_helper'
-
-RSpec.describe 'User views AsciiDoc page with includes', :js do
- let_it_be(:user) { create(:user) }
+RSpec.shared_examples 'User views AsciiDoc page with includes' do
let_it_be(:wiki_content_selector) { '[data-qa-selector=wiki_page_content]' }
- let(:project) { create(:project, :public, :wiki_repo) }
let!(:included_wiki_page) { create_wiki_page('included_page', content: 'Content from the included page')}
let!(:wiki_page) { create_wiki_page('home', content: "Content from the main page.\ninclude::included_page.asciidoc[]") }
@@ -16,16 +12,16 @@ RSpec.describe 'User views AsciiDoc page with includes', :js do
format: :asciidoc
}
- create(:wiki_page, wiki: project.wiki, **attrs)
+ create(:wiki_page, wiki: wiki, **attrs)
end
before do
sign_in(user)
end
- context 'when the file being included exists' do
+ context 'when the file being included exists', :js do
it 'includes the file contents' do
- visit(project_wiki_path(project, wiki_page))
+ visit(wiki_page_path(wiki, wiki_page))
page.within(:css, wiki_content_selector) do
expect(page).to have_content('Content from the main page. Content from the included page')
@@ -34,8 +30,10 @@ RSpec.describe 'User views AsciiDoc page with includes', :js do
context 'when there are multiple versions of the wiki pages' do
before do
+ # rubocop:disable Rails/SaveBang
included_wiki_page.update(message: 'updated included file', content: 'Updated content from the included page')
wiki_page.update(message: 'updated wiki page', content: "Updated content from the main page.\ninclude::included_page.asciidoc[]")
+ # rubocop:enable Rails/SaveBang
end
let(:latest_version_id) { wiki_page.versions.first.id }
@@ -43,7 +41,7 @@ RSpec.describe 'User views AsciiDoc page with includes', :js do
context 'viewing the latest version' do
it 'includes the latest content' do
- visit(project_wiki_path(project, wiki_page, version_id: latest_version_id))
+ visit(wiki_page_path(wiki, wiki_page, version_id: latest_version_id))
page.within(:css, wiki_content_selector) do
expect(page).to have_content('Updated content from the main page. Updated content from the included page')
@@ -53,7 +51,7 @@ RSpec.describe 'User views AsciiDoc page with includes', :js do
context 'viewing the original version' do
it 'includes the content from the original version' do
- visit(project_wiki_path(project, wiki_page, version_id: oldest_version_id))
+ visit(wiki_page_path(wiki, wiki_page, version_id: oldest_version_id))
page.within(:css, wiki_content_selector) do
expect(page).to have_content('Content from the main page. Content from the included page')
@@ -63,13 +61,13 @@ RSpec.describe 'User views AsciiDoc page with includes', :js do
end
end
- context 'when the file being included does not exist' do
+ context 'when the file being included does not exist', :js do
before do
included_wiki_page.delete
end
it 'outputs an error' do
- visit(project_wiki_path(project, wiki_page))
+ visit(wiki_page_path(wiki, wiki_page))
page.within(:css, wiki_content_selector) do
expect(page).to have_content('Content from the main page. [ERROR: include::included_page.asciidoc[] - unresolved directive]')
diff --git a/spec/support/shared_examples/features/wiki/user_views_wiki_empty_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_views_wiki_empty_shared_examples.rb
new file mode 100644
index 00000000000..d7f5b485a82
--- /dev/null
+++ b/spec/support/shared_examples/features/wiki/user_views_wiki_empty_shared_examples.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+# Requires a context containing:
+# wiki
+
+RSpec.shared_examples 'User views empty wiki' do
+ let(:element) { page.find('.row.empty-state') }
+ let(:container_name) { wiki.container.class.name.humanize(capitalize: false) }
+ let(:confluence_link) { 'Enable the Confluence Wiki integration' }
+
+ shared_examples 'wiki is not found' do
+ it 'shows an error message' do
+ visit wiki_path(wiki)
+
+ if @current_user
+ expect(page).to have_content('Page Not Found')
+ else
+ expect(page).to have_content('You need to sign in')
+ end
+ end
+ end
+
+ shared_examples 'empty wiki message' do |writable: false, issuable: false, confluence: false|
+ # This mirrors the logic in:
+ # - app/views/shared/empty_states/_wikis.html.haml
+ # - WikiHelper#wiki_empty_state_messages
+ it 'shows the empty state message with the expected elements' do
+ visit wiki_path(wiki)
+
+ if writable
+ expect(element).to have_content("The wiki lets you write documentation for your #{container_name}")
+ else
+ expect(element).to have_content("This #{container_name} has no wiki pages")
+ expect(element).to have_content("You must be a #{container_name} member")
+ end
+
+ if issuable && !writable
+ expect(element).to have_content("improve the wiki for this #{container_name}")
+ expect(element).to have_link("issue tracker", href: project_issues_path(project))
+ expect(element).to have_link("Suggest wiki improvement", href: new_project_issue_path(project))
+ else
+ expect(element).not_to have_content("improve the wiki for this #{container_name}")
+ expect(element).not_to have_link("issue tracker")
+ expect(element).not_to have_link("Suggest wiki improvement")
+ end
+
+ if confluence
+ expect(element).to have_link(confluence_link)
+ else
+ expect(element).not_to have_link(confluence_link)
+ end
+
+ if writable
+ element.click_link 'Create your first page'
+
+ expect(page).to have_button('Create page')
+ else
+ expect(element).not_to have_link('Create your first page')
+ end
+ end
+ end
+end
diff --git a/spec/features/projects/wiki/user_views_wiki_page_spec.rb b/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb
index e93689af0aa..40998ebde4c 100644
--- a/spec/features/projects/wiki/user_views_wiki_page_spec.rb
+++ b/spec/support/shared_examples/features/wiki/user_views_wiki_page_shared_examples.rb
@@ -1,14 +1,13 @@
# frozen_string_literal: true
-require 'spec_helper'
+# Requires a context containing:
+# wiki
+# user
-RSpec.describe 'User views a wiki page' do
+RSpec.shared_examples 'User views a wiki page' do
include WikiHelpers
- let(:user) { create(:user) }
- let(:project) { create(:project, :wiki_repo, namespace: user.namespace) }
let(:path) { 'image.png' }
- let(:wiki) { project.wiki }
let(:wiki_page) do
create(:wiki_page,
wiki: wiki,
@@ -16,13 +15,12 @@ RSpec.describe 'User views a wiki page' do
end
before do
- project.add_maintainer(user)
sign_in(user)
end
context 'when wiki is empty', :js do
before do
- visit project_wikis_path(project)
+ visit wiki_path(wiki)
wait_for_svg_to_be_loaded
@@ -83,7 +81,7 @@ RSpec.describe 'User views a wiki page' do
context 'when a page does not have history' do
before do
- visit(project_wiki_path(project, wiki_page))
+ visit(wiki_page_path(wiki, wiki_page))
end
it 'shows all the pages' do
@@ -92,7 +90,7 @@ RSpec.describe 'User views a wiki page' do
end
context 'shows a file stored in a page' do
- let(:path) { upload_file_to_wiki(project, user, 'dk.png') }
+ let(:path) { upload_file_to_wiki(wiki, user, 'dk.png') }
it do
expect(page).to have_xpath("//img[@data-src='#{wiki.wiki_base_path}/#{path}']")
@@ -121,7 +119,7 @@ RSpec.describe 'User views a wiki page' do
end
it 'shows the page history' do
- visit(project_wiki_path(project, wiki_page))
+ visit(wiki_page_path(wiki, wiki_page))
expect(page).to have_selector('a.btn', text: 'Edit')
@@ -133,12 +131,16 @@ RSpec.describe 'User views a wiki page' do
end
it 'does not show the "Edit" button' do
- visit(project_wiki_path(project, wiki_page, version_id: wiki_page.versions.last.id))
+ visit(wiki_page_path(wiki, wiki_page, version_id: wiki_page.versions.last.id))
expect(page).not_to have_selector('a.btn', text: 'Edit')
end
context 'show the diff' do
+ before do
+ skip('Diffing for group wikis will be implemented in https://gitlab.com/gitlab-org/gitlab/-/merge_requests/42610') if wiki.container.is_a?(Group)
+ end
+
def expect_diff_links(commit)
diff_path = wiki_page_path(wiki, wiki_page, version_id: commit, action: :diff)
@@ -150,7 +152,7 @@ RSpec.describe 'User views a wiki page' do
end
it 'links to the correct diffs' do
- visit project_wiki_history_path(project, wiki_page)
+ visit wiki_page_path(wiki, wiki_page, action: :history)
commit1 = wiki.commit('HEAD^')
commit2 = wiki.commit
@@ -208,7 +210,7 @@ RSpec.describe 'User views a wiki page' do
end
it 'preserves the special characters' do
- visit(project_wiki_path(project, wiki_page))
+ visit(wiki_page_path(wiki, wiki_page))
expect(page).to have_css('.wiki-page-title', text: title)
expect(page).to have_css('.wiki-pages li', text: title)
@@ -223,7 +225,7 @@ RSpec.describe 'User views a wiki page' do
end
it 'safely displays the page' do
- visit(project_wiki_path(project, wiki_page))
+ visit(wiki_page_path(wiki, wiki_page))
expect(page).to have_css('.wiki-page-title', text: title)
expect(page).to have_content('foo bar')
@@ -236,7 +238,7 @@ RSpec.describe 'User views a wiki page' do
end
it 'safely displays the message' do
- visit(project_wiki_history_path(project, wiki_page))
+ visit(wiki_page_path(wiki, wiki_page, action: :history))
expect(page).to have_content('<script>alert(true)<script>')
end
@@ -248,7 +250,7 @@ RSpec.describe 'User views a wiki page' do
before do
allow(Gitlab::EncodingHelper).to receive(:encode!).and_return(content)
- visit(project_wiki_path(project, wiki_page))
+ visit(wiki_page_path(wiki, wiki_page))
end
it 'does not show "Edit" button' do
@@ -263,7 +265,7 @@ RSpec.describe 'User views a wiki page' do
end
it 'opens a default wiki page', :js do
- visit project_path(project)
+ visit wiki.container.web_url
find('.shortcuts-wiki').click
diff --git a/spec/features/projects/wiki/user_views_wiki_pages_spec.rb b/spec/support/shared_examples/features/wiki/user_views_wiki_pages_shared_examples.rb
index 4f29ae0cc8a..314c2074eee 100644
--- a/spec/features/projects/wiki/user_views_wiki_pages_spec.rb
+++ b/spec/support/shared_examples/features/wiki/user_views_wiki_pages_shared_examples.rb
@@ -1,23 +1,22 @@
# frozen_string_literal: true
-require 'spec_helper'
+# Requires a context containing:
+# wiki
+# user
-RSpec.describe 'User views wiki pages' do
+RSpec.shared_examples 'User views wiki pages' do
include WikiHelpers
- let(:user) { create(:user) }
- let(:project) { create(:project, :wiki_repo, namespace: user.namespace) }
-
let!(:wiki_page1) do
- create(:wiki_page, wiki: project.wiki, title: '3 home', content: '3')
+ create(:wiki_page, wiki: wiki, title: '3 home', content: '3')
end
let!(:wiki_page2) do
- create(:wiki_page, wiki: project.wiki, title: '1 home', content: '1')
+ create(:wiki_page, wiki: wiki, title: '1 home', content: '1')
end
let!(:wiki_page3) do
- create(:wiki_page, wiki: project.wiki, title: '2 home', content: '2')
+ create(:wiki_page, wiki: wiki, title: '2 home', content: '2')
end
let(:pages) do
@@ -25,9 +24,8 @@ RSpec.describe 'User views wiki pages' do
end
before do
- project.add_maintainer(user)
sign_in(user)
- visit(project_wikis_pages_path(project))
+ visit(wiki_path(wiki, action: :pages))
end
context 'ordered by title' do
diff --git a/spec/support/shared_examples/features/wiki/user_views_wiki_sidebar_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_views_wiki_sidebar_shared_examples.rb
new file mode 100644
index 00000000000..a7ba7a8ad07
--- /dev/null
+++ b/spec/support/shared_examples/features/wiki/user_views_wiki_sidebar_shared_examples.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+# Requires a context containing:
+# wiki
+# user
+
+RSpec.shared_examples 'User views wiki sidebar' do
+ include WikiHelpers
+
+ before do
+ sign_in(user)
+ end
+
+ context 'when there are some existing pages' do
+ before do
+ create(:wiki_page, wiki: wiki, title: 'home', content: 'home')
+ create(:wiki_page, wiki: wiki, title: 'another', content: 'another')
+ end
+
+ it 'renders a default sidebar when there is no customized sidebar' do
+ visit wiki_path(wiki)
+
+ expect(page).to have_content('another')
+ expect(page).not_to have_link('View All Pages')
+ end
+
+ context 'when there is a customized sidebar' do
+ before do
+ create(:wiki_page, wiki: wiki, title: '_sidebar', content: 'My customized sidebar')
+ end
+
+ it 'renders my customized sidebar instead of the default one' do
+ visit wiki_path(wiki)
+
+ expect(page).to have_content('My customized sidebar')
+ expect(page).not_to have_content('Another')
+ end
+ end
+ end
+
+ context 'when there are 15 existing pages' do
+ before do
+ (1..5).each { |i| create(:wiki_page, wiki: wiki, title: "my page #{i}") }
+ (6..10).each { |i| create(:wiki_page, wiki: wiki, title: "parent/my page #{i}") }
+ (11..15).each { |i| create(:wiki_page, wiki: wiki, title: "grandparent/parent/my page #{i}") }
+ end
+
+ it 'shows all pages in the sidebar' do
+ visit wiki_path(wiki)
+
+ (1..15).each { |i| expect(page).to have_content("my page #{i}") }
+ expect(page).not_to have_link('View All Pages')
+ end
+
+ context 'when there are more than 15 existing pages' do
+ before do
+ create(:wiki_page, wiki: wiki, title: 'my page 16')
+ end
+
+ it 'shows the first 15 pages in the sidebar' do
+ visit wiki_path(wiki)
+
+ expect(page).to have_text('my page', count: 15)
+ expect(page).to have_link('View All Pages')
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb
index e93077c42e1..7707e79386c 100644
--- a/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb
@@ -1,12 +1,13 @@
# frozen_string_literal: true
-RSpec.shared_examples 'resource mentions migration' do |migration_class, resource_class|
+RSpec.shared_examples 'resource mentions migration' do |migration_class, resource_class_name|
it 'migrates resource mentions' do
join = migration_class::JOIN
conditions = migration_class::QUERY_CONDITIONS
+ resource_class = "#{Gitlab::BackgroundMigration::UserMentions::Models}::#{resource_class_name}".constantize
expect do
- subject.perform(resource_class.name, join, conditions, false, resource_class.minimum(:id), resource_class.maximum(:id))
+ subject.perform(resource_class_name, join, conditions, false, resource_class.minimum(:id), resource_class.maximum(:id))
end.to change { user_mentions.count }.by(1)
user_mention = user_mentions.last
@@ -16,23 +17,23 @@ RSpec.shared_examples 'resource mentions migration' do |migration_class, resourc
# check that performing the same job twice does not fail and does not change counts
expect do
- subject.perform(resource_class.name, join, conditions, false, resource_class.minimum(:id), resource_class.maximum(:id))
+ subject.perform(resource_class_name, join, conditions, false, resource_class.minimum(:id), resource_class.maximum(:id))
end.to change { user_mentions.count }.by(0)
end
end
-RSpec.shared_examples 'resource notes mentions migration' do |migration_class, resource_class|
+RSpec.shared_examples 'resource notes mentions migration' do |migration_class, resource_class_name|
it 'migrates mentions from note' do
join = migration_class::JOIN
conditions = migration_class::QUERY_CONDITIONS
# there are 5 notes for each noteable_type, but two do not have mentions and
# another one's noteable_id points to an inexistent resource
- expect(notes.where(noteable_type: resource_class.to_s).count).to eq 5
+ expect(notes.where(noteable_type: resource_class_name).count).to eq 5
expect(user_mentions.count).to eq 0
expect do
- subject.perform(resource_class.name, join, conditions, true, Note.minimum(:id), Note.maximum(:id))
+ subject.perform(resource_class_name, join, conditions, true, Note.minimum(:id), Note.maximum(:id))
end.to change { user_mentions.count }.by(2)
# check that the user_mention for regular note is created
@@ -51,7 +52,7 @@ RSpec.shared_examples 'resource notes mentions migration' do |migration_class, r
# check that performing the same job twice does not fail and does not change counts
expect do
- subject.perform(resource_class.name, join, conditions, true, Note.minimum(:id), Note.maximum(:id))
+ subject.perform(resource_class_name, join, conditions, true, Note.minimum(:id), Note.maximum(:id))
end.to change { user_mentions.count }.by(0)
end
end
@@ -83,24 +84,25 @@ RSpec.shared_examples 'schedules resource mentions migration' do |resource_class
end
end
-RSpec.shared_examples 'resource migration not run' do |migration_class, resource_class|
+RSpec.shared_examples 'resource migration not run' do |migration_class, resource_class_name|
it 'does not migrate mentions' do
join = migration_class::JOIN
conditions = migration_class::QUERY_CONDITIONS
+ resource_class = "#{Gitlab::BackgroundMigration::UserMentions::Models}::#{resource_class_name}".constantize
expect do
- subject.perform(resource_class.name, join, conditions, false, resource_class.minimum(:id), resource_class.maximum(:id))
+ subject.perform(resource_class_name, join, conditions, false, resource_class.minimum(:id), resource_class.maximum(:id))
end.to change { user_mentions.count }.by(0)
end
end
-RSpec.shared_examples 'resource notes migration not run' do |migration_class, resource_class|
+RSpec.shared_examples 'resource notes migration not run' do |migration_class, resource_class_name|
it 'does not migrate mentions' do
join = migration_class::JOIN
conditions = migration_class::QUERY_CONDITIONS
expect do
- subject.perform(resource_class.name, join, conditions, true, Note.minimum(:id), Note.maximum(:id))
+ subject.perform(resource_class_name, join, conditions, true, Note.minimum(:id), Note.maximum(:id))
end.to change { user_mentions.count }.by(0)
end
end
diff --git a/spec/support/shared_examples/lib/gitlab/diff_file_collections_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/diff_file_collections_shared_examples.rb
index e43ce936b90..469c0c287b1 100644
--- a/spec/support/shared_examples/lib/gitlab/diff_file_collections_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/diff_file_collections_shared_examples.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
RSpec.shared_examples 'diff statistics' do |test_include_stats_flag: true|
- subject { described_class.new(diffable, collection_default_args) }
+ subject { described_class.new(diffable, **collection_default_args) }
def stub_stats_find_by_path(path, stats_mock)
expect_next_instance_of(Gitlab::Git::DiffStatsCollection) do |collection|
diff --git a/spec/support/shared_examples/lib/gitlab/import_export/relation_factory_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/import_export/relation_factory_shared_examples.rb
new file mode 100644
index 00000000000..33061f17bde
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/import_export/relation_factory_shared_examples.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+# required context:
+# - importable: group or project
+# - relation_hash: a note relation that's being imported
+# - created_object: the object created with the relation factory
+RSpec.shared_examples 'Notes user references' do
+ let(:relation_sym) { :notes }
+ let(:mapped_user) { create(:user) }
+ let(:exported_member) do
+ {
+ 'id' => 111,
+ 'access_level' => 30,
+ 'source_id' => 1,
+ 'source_type' => importable.class.name == 'Project' ? 'Project' : 'Namespace',
+ 'user_id' => 3,
+ 'notification_level' => 3,
+ 'created_at' => '2016-11-18T09:29:42.634Z',
+ 'updated_at' => '2016-11-18T09:29:42.634Z',
+ 'user' => {
+ 'id' => 999,
+ 'email' => mapped_user.email,
+ 'username' => mapped_user.username
+ }
+ }
+ end
+
+ let(:members_mapper) do
+ Gitlab::ImportExport::MembersMapper.new(
+ exported_members: [exported_member].compact,
+ user: importer_user,
+ importable: importable
+ )
+ end
+
+ shared_examples 'sets the note author to the importer user' do
+ it { expect(created_object.author).to eq(importer_user) }
+ end
+
+ shared_examples 'sets the note author to the mapped user' do
+ it { expect(created_object.author).to eq(mapped_user) }
+ end
+
+ shared_examples 'does not add original autor note' do
+ it { expect(created_object.note).not_to include('*By Administrator') }
+ end
+
+ shared_examples 'adds original autor note' do
+ it { expect(created_object.note).to include('*By Administrator') }
+ end
+
+ context 'when the importer is admin' do
+ let(:importer_user) { create(:admin) }
+
+ context 'and the note author is not mapped' do
+ let(:exported_member) { nil }
+
+ include_examples 'sets the note author to the importer user'
+
+ include_examples 'adds original autor note'
+ end
+
+ context 'and the note author is the importer user' do
+ let(:mapped_user) { importer_user }
+
+ include_examples 'sets the note author to the mapped user'
+
+ include_examples 'does not add original autor note'
+ end
+
+ context 'and the note author exists in the target instance' do
+ let(:mapped_user) { create(:user) }
+
+ include_examples 'sets the note author to the mapped user'
+
+ include_examples 'does not add original autor note'
+ end
+ end
+
+ context 'when the importer is not admin' do
+ let(:importer_user) { create(:user) }
+
+ context 'and the note author is not mapped' do
+ let(:exported_member) { nil }
+
+ include_examples 'sets the note author to the importer user'
+
+ include_examples 'adds original autor note'
+ end
+
+ context 'and the note author is the importer user' do
+ let(:mapped_user) { importer_user }
+
+ include_examples 'sets the note author to the importer user'
+
+ include_examples 'adds original autor note'
+ end
+
+ context 'and the note author exists in the target instance' do
+ let(:mapped_user) { create(:user) }
+
+ include_examples 'sets the note author to the importer user'
+
+ include_examples 'adds original autor note'
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/search_confidential_filter_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/search_confidential_filter_shared_examples.rb
new file mode 100644
index 00000000000..6c8ab38413d
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/search_confidential_filter_shared_examples.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'search results filtered by confidential' do
+ context 'filter not provided (all behavior)' do
+ let(:filters) { {} }
+
+ context 'when Feature search_filter_by_confidential enabled' do
+ it 'returns confidential and not confidential results', :aggregate_failures do
+ expect(results.objects('issues')).to include confidential_result
+ expect(results.objects('issues')).to include opened_result
+ end
+ end
+
+ context 'when Feature search_filter_by_confidential not enabled' do
+ before do
+ stub_feature_flags(search_filter_by_confidential: false)
+ end
+
+ it 'returns confidential and not confidential results', :aggregate_failures do
+ expect(results.objects('issues')).to include confidential_result
+ expect(results.objects('issues')).to include opened_result
+ end
+ end
+ end
+
+ context 'confidential filter' do
+ let(:filters) { { confidential: 'yes' } }
+
+ context 'when Feature search_filter_by_confidential enabled' do
+ it 'returns only confidential results', :aggregate_failures do
+ expect(results.objects('issues')).to include confidential_result
+ expect(results.objects('issues')).not_to include opened_result
+ end
+ end
+
+ context 'when Feature search_filter_by_confidential not enabled' do
+ before do
+ stub_feature_flags(search_filter_by_confidential: false)
+ end
+
+ it 'returns confidential and not confidential results', :aggregate_failures do
+ expect(results.objects('issues')).to include confidential_result
+ expect(results.objects('issues')).to include opened_result
+ end
+ end
+ end
+
+ context 'not confidential filter' do
+ let(:filters) { { confidential: 'no' } }
+
+ context 'when Feature search_filter_by_confidential enabled' do
+ it 'returns not confidential results', :aggregate_failures do
+ expect(results.objects('issues')).not_to include confidential_result
+ expect(results.objects('issues')).to include opened_result
+ end
+ end
+
+ context 'when Feature search_filter_by_confidential not enabled' do
+ before do
+ stub_feature_flags(search_filter_by_confidential: false)
+ end
+
+ it 'returns confidential and not confidential results', :aggregate_failures do
+ expect(results.objects('issues')).to include confidential_result
+ expect(results.objects('issues')).to include opened_result
+ end
+ end
+ end
+
+ context 'unsupported filter' do
+ let(:filters) { { confidential: 'goodbye' } }
+
+ context 'when Feature search_filter_by_confidential enabled' do
+ it 'returns confidential and not confidential results', :aggregate_failures do
+ expect(results.objects('issues')).to include confidential_result
+ expect(results.objects('issues')).to include opened_result
+ end
+ end
+
+ context 'when Feature search_filter_by_confidential not enabled' do
+ before do
+ stub_feature_flags(search_filter_by_confidential: false)
+ end
+
+ it 'returns confidential and not confidential results', :aggregate_failures do
+ expect(results.objects('issues')).to include confidential_result
+ expect(results.objects('issues')).to include opened_result
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/search_results_sorted_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/search_results_sorted_shared_examples.rb
new file mode 100644
index 00000000000..765279a78fe
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/search_results_sorted_shared_examples.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'search results sorted' do
+ context 'sort: newest' do
+ let(:sort) { 'newest' }
+
+ it 'sorts results by created_at' do
+ expect(results.objects(scope).map(&:id)).to eq([new_result.id, old_result.id, very_old_result.id])
+ end
+ end
+
+ context 'sort: oldest' do
+ let(:sort) { 'oldest' }
+
+ it 'sorts results by created_at' do
+ expect(results.objects(scope).map(&:id)).to eq([very_old_result.id, old_result.id, new_result.id])
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/search_issue_state_filter_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/search_state_filter_shared_examples.rb
index e80ec516407..e80ec516407 100644
--- a/spec/support/shared_examples/lib/gitlab/search_issue_state_filter_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/search_state_filter_shared_examples.rb
diff --git a/spec/support/shared_examples/models/mentionable_shared_examples.rb b/spec/support/shared_examples/models/mentionable_shared_examples.rb
index 94c52bdaaa6..0ee0b7e6d88 100644
--- a/spec/support/shared_examples/models/mentionable_shared_examples.rb
+++ b/spec/support/shared_examples/models/mentionable_shared_examples.rb
@@ -207,29 +207,8 @@ RSpec.shared_examples 'an editable mentionable' do
end
RSpec.shared_examples 'mentions in description' do |mentionable_type|
- describe 'when store_mentioned_users_to_db feature disabled' do
+ describe 'when storing user mentions' do
before do
- stub_feature_flags(store_mentioned_users_to_db: false)
- mentionable.store_mentions!
- end
-
- context 'when mentionable description contains mentions' do
- let(:user) { create(:user) }
- let(:mentionable) { create(mentionable_type, description: "#{user.to_reference} some description") }
-
- it 'stores no mentions' do
- expect(mentionable.user_mentions.count).to eq 0
- end
-
- it 'renders description_html correctly' do
- expect(mentionable.description_html).to include("<a href=\"/#{user.username}\" data-user=\"#{user.id}\"")
- end
- end
- end
-
- describe 'when store_mentioned_users_to_db feature enabled' do
- before do
- stub_feature_flags(store_mentioned_users_to_db: true)
mentionable.store_mentions!
end
diff --git a/spec/support/shared_examples/models/relative_positioning_shared_examples.rb b/spec/support/shared_examples/models/relative_positioning_shared_examples.rb
index d1437244082..7cf014e8023 100644
--- a/spec/support/shared_examples/models/relative_positioning_shared_examples.rb
+++ b/spec/support/shared_examples/models/relative_positioning_shared_examples.rb
@@ -31,6 +31,41 @@ RSpec.shared_examples 'a class that supports relative positioning' do
end
end
+ def as_item(item)
+ item # Override to perform a transformation, if necessary
+ end
+
+ def as_items(items)
+ items.map { |item| as_item(item) }
+ end
+
+ describe '#scoped_items' do
+ it 'includes all items with the same scope' do
+ scope = as_items([item1, item2, new_item, create_item])
+ irrelevant = create(factory, {}) # This should not share the scope
+ context = RelativePositioning.mover.context(item1)
+
+ same_scope = as_items(context.scoped_items)
+
+ expect(same_scope).to include(*scope)
+ expect(same_scope).not_to include(as_item(irrelevant))
+ end
+ end
+
+ describe '#relative_siblings' do
+ it 'includes all items with the same scope, except self' do
+ scope = as_items([item2, new_item, create_item])
+ irrelevant = create(factory, {}) # This should not share the scope
+ context = RelativePositioning.mover.context(item1)
+
+ siblings = as_items(context.relative_siblings)
+
+ expect(siblings).to include(*scope)
+ expect(siblings).not_to include(as_item(item1))
+ expect(siblings).not_to include(as_item(irrelevant))
+ end
+ end
+
describe '.move_nulls_to_end' do
let(:item3) { create_item }
let(:sibling_query) { item1.class.relative_positioning_query_base(item1) }
@@ -47,7 +82,7 @@ RSpec.shared_examples 'a class that supports relative positioning' do
expect(item1.relative_position).to be(1000)
expect(sibling_query.where(relative_position: nil)).not_to exist
- expect(sibling_query.reorder(:relative_position, :id)).to eq([item1, item2, item3])
+ expect(as_items(sibling_query.reorder(:relative_position, :id))).to eq(as_items([item1, item2, item3]))
end
it 'preserves relative position' do
@@ -120,16 +155,16 @@ RSpec.shared_examples 'a class that supports relative positioning' do
it 'does not have an N+1 issue' do
create_items_with_positions(10..12)
- a, b, c, d, e, f = create_items_with_positions([nil, nil, nil, nil, nil, nil])
+ a, b, c, d, e, f, *xs = create_items_with_positions([nil] * 10)
baseline = ActiveRecord::QueryRecorder.new do
- described_class.move_nulls_to_end([a, e])
+ described_class.move_nulls_to_end([a, b])
end
- expect { described_class.move_nulls_to_end([b, c, d]) }
+ expect { described_class.move_nulls_to_end([c, d, e, f]) }
.not_to exceed_query_limit(baseline)
- expect { described_class.move_nulls_to_end([f]) }
+ expect { described_class.move_nulls_to_end(xs) }
.not_to exceed_query_limit(baseline.count)
end
end
@@ -149,7 +184,7 @@ RSpec.shared_examples 'a class that supports relative positioning' do
expect(items.sort_by(&:relative_position)).to eq(items)
expect(sibling_query.where(relative_position: nil)).not_to exist
- expect(sibling_query.reorder(:relative_position, :id)).to eq(items)
+ expect(as_items(sibling_query.reorder(:relative_position, :id))).to eq(as_items(items))
expect(item3.relative_position).to be(1000)
end
@@ -652,3 +687,119 @@ RSpec.shared_examples 'a class that supports relative positioning' do
(RelativePositioning::MIN_POSITION..).take(size)
end
end
+
+RSpec.shared_examples 'no-op relative positioning' do
+ def create_item(**params)
+ create(factory, params.merge(default_params))
+ end
+
+ let_it_be(:item1) { create_item }
+ let_it_be(:item2) { create_item }
+ let_it_be(:new_item) { create_item(relative_position: nil) }
+
+ def any_relative_positions
+ new_item.class.reorder(:relative_position, :id).pluck(:id, :relative_position)
+ end
+
+ shared_examples 'a no-op method' do
+ it 'does not raise errors' do
+ expect { perform }.not_to raise_error
+ end
+
+ it 'does not perform any DB queries' do
+ expect { perform }.not_to exceed_query_limit(0)
+ end
+
+ it 'does not change any relative_position' do
+ expect { perform }.not_to change { any_relative_positions }
+ end
+ end
+
+ describe '.scoped_items' do
+ subject { RelativePositioning.mover.context(item1).scoped_items }
+
+ it 'is empty' do
+ expect(subject).to be_empty
+ end
+ end
+
+ describe '.relative_siblings' do
+ subject { RelativePositioning.mover.context(item1).relative_siblings }
+
+ it 'is empty' do
+ expect(subject).to be_empty
+ end
+ end
+
+ describe '.move_nulls_to_end' do
+ subject { item1.class.move_nulls_to_end([new_item, item1]) }
+
+ it_behaves_like 'a no-op method' do
+ def perform
+ subject
+ end
+ end
+
+ it 'does not move any items' do
+ expect(subject).to eq(0)
+ end
+ end
+
+ describe '.move_nulls_to_start' do
+ subject { item1.class.move_nulls_to_start([new_item, item1]) }
+
+ it_behaves_like 'a no-op method' do
+ def perform
+ subject
+ end
+ end
+
+ it 'does not move any items' do
+ expect(subject).to eq(0)
+ end
+ end
+
+ describe 'instance methods' do
+ subject { new_item }
+
+ describe '#move_to_start' do
+ it_behaves_like 'a no-op method' do
+ def perform
+ subject.move_to_start
+ end
+ end
+ end
+
+ describe '#move_to_end' do
+ it_behaves_like 'a no-op method' do
+ def perform
+ subject.move_to_end
+ end
+ end
+ end
+
+ describe '#move_between' do
+ it_behaves_like 'a no-op method' do
+ def perform
+ subject.move_between(item1, item2)
+ end
+ end
+ end
+
+ describe '#move_before' do
+ it_behaves_like 'a no-op method' do
+ def perform
+ subject.move_before(item1)
+ end
+ end
+ end
+
+ describe '#move_after' do
+ it_behaves_like 'a no-op method' do
+ def perform
+ subject.move_after(item1)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/resource_timebox_event_shared_examples.rb b/spec/support/shared_examples/models/resource_timebox_event_shared_examples.rb
index 07552b62cdd..5198508d48b 100644
--- a/spec/support/shared_examples/models/resource_timebox_event_shared_examples.rb
+++ b/spec/support/shared_examples/models/resource_timebox_event_shared_examples.rb
@@ -73,3 +73,13 @@ RSpec.shared_examples 'timebox resource event actions' do
end
end
end
+
+RSpec.shared_examples 'timebox resource tracks issue metrics' do |type|
+ describe '#usage_metrics' do
+ it 'tracks usage' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:"track_issue_#{type}_changed_action")
+
+ create(described_class.name.underscore.to_sym, issue: create(:issue))
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/throttled_touch_shared_examples.rb b/spec/support/shared_examples/models/throttled_touch_shared_examples.rb
index 14b851d2828..e869cbce6ae 100644
--- a/spec/support/shared_examples/models/throttled_touch_shared_examples.rb
+++ b/spec/support/shared_examples/models/throttled_touch_shared_examples.rb
@@ -13,8 +13,8 @@ RSpec.shared_examples 'throttled touch' do
first_updated_at = Time.zone.now - (ThrottledTouch::TOUCH_INTERVAL * 2)
second_updated_at = Time.zone.now - (ThrottledTouch::TOUCH_INTERVAL * 1.5)
- Timecop.freeze(first_updated_at) { subject.touch }
- Timecop.freeze(second_updated_at) { subject.touch }
+ travel_to(first_updated_at) { subject.touch }
+ travel_to(second_updated_at) { subject.touch }
expect(subject.updated_at).to be_like_time(first_updated_at)
end
diff --git a/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb b/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
index 557025569b8..7b591ad84d1 100644
--- a/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
+++ b/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-RSpec.shared_examples 'UpdateProjectStatistics' do
+RSpec.shared_examples 'UpdateProjectStatistics' do |with_counter_attribute|
let(:project) { subject.project }
let(:project_statistics_name) { described_class.project_statistics_name }
let(:statistic_attribute) { described_class.statistic_attribute }
@@ -13,108 +13,230 @@ RSpec.shared_examples 'UpdateProjectStatistics' do
subject.read_attribute(statistic_attribute).to_i
end
- it { is_expected.to be_new_record }
+ def read_pending_increment
+ Gitlab::Redis::SharedState.with do |redis|
+ key = project.statistics.counter_key(project_statistics_name)
+ redis.get(key).to_i
+ end
+ end
- context 'when creating' do
- it 'updates the project statistics' do
- delta0 = reload_stat
+ it { is_expected.to be_new_record }
- subject.save!
+ context 'when feature flag efficient_counter_attribute is disabled' do
+ before do
+ stub_feature_flags(efficient_counter_attribute: false)
+ end
- delta1 = reload_stat
+ context 'when creating' do
+ it 'updates the project statistics' do
+ delta0 = reload_stat
- expect(delta1).to eq(delta0 + read_attribute)
- expect(delta1).to be > delta0
- end
+ subject.save!
- it 'schedules a namespace statistics worker' do
- expect(Namespaces::ScheduleAggregationWorker)
- .to receive(:perform_async).once
+ delta1 = reload_stat
- subject.save!
- end
- end
+ expect(delta1).to eq(delta0 + read_attribute)
+ expect(delta1).to be > delta0
+ end
- context 'when updating' do
- let(:delta) { 42 }
+ it 'schedules a namespace statistics worker' do
+ expect(Namespaces::ScheduleAggregationWorker)
+ .to receive(:perform_async).once
- before do
- subject.save!
+ subject.save!
+ end
end
- it 'updates project statistics' do
- expect(ProjectStatistics)
- .to receive(:increment_statistic)
- .and_call_original
+ context 'when updating' do
+ let(:delta) { 42 }
- subject.write_attribute(statistic_attribute, read_attribute + delta)
+ before do
+ subject.save!
+ end
- expect { subject.save! }
- .to change { reload_stat }
- .by(delta)
- end
+ it 'updates project statistics' do
+ expect(ProjectStatistics)
+ .to receive(:increment_statistic)
+ .and_call_original
- it 'schedules a namespace statistics worker' do
- expect(Namespaces::ScheduleAggregationWorker)
- .to receive(:perform_async).once
+ subject.write_attribute(statistic_attribute, read_attribute + delta)
- subject.write_attribute(statistic_attribute, read_attribute + delta)
- subject.save!
- end
+ expect { subject.save! }
+ .to change { reload_stat }
+ .by(delta)
+ end
- it 'avoids N + 1 queries' do
- subject.write_attribute(statistic_attribute, read_attribute + delta)
+ it 'schedules a namespace statistics worker' do
+ expect(Namespaces::ScheduleAggregationWorker)
+ .to receive(:perform_async).once
- control_count = ActiveRecord::QueryRecorder.new do
+ subject.write_attribute(statistic_attribute, read_attribute + delta)
subject.save!
end
- subject.write_attribute(statistic_attribute, read_attribute + delta)
+ it 'avoids N + 1 queries' do
+ subject.write_attribute(statistic_attribute, read_attribute + delta)
- expect do
- subject.save!
- end.not_to exceed_query_limit(control_count)
- end
- end
+ control_count = ActiveRecord::QueryRecorder.new do
+ subject.save!
+ end
- context 'when destroying' do
- before do
- subject.save!
+ subject.write_attribute(statistic_attribute, read_attribute + delta)
+
+ expect do
+ subject.save!
+ end.not_to exceed_query_limit(control_count)
+ end
end
- it 'updates the project statistics' do
- delta0 = reload_stat
+ context 'when destroying' do
+ before do
+ subject.save!
+ end
- subject.destroy!
+ it 'updates the project statistics' do
+ delta0 = reload_stat
- delta1 = reload_stat
+ subject.destroy!
- expect(delta1).to eq(delta0 - read_attribute)
- expect(delta1).to be < delta0
- end
+ delta1 = reload_stat
+
+ expect(delta1).to eq(delta0 - read_attribute)
+ expect(delta1).to be < delta0
+ end
+
+ it 'schedules a namespace statistics worker' do
+ expect(Namespaces::ScheduleAggregationWorker)
+ .to receive(:perform_async).once
- it 'schedules a namespace statistics worker' do
- expect(Namespaces::ScheduleAggregationWorker)
- .to receive(:perform_async).once
+ subject.destroy!
+ end
+
+ context 'when it is destroyed from the project level' do
+ it 'does not update the project statistics' do
+ expect(ProjectStatistics)
+ .not_to receive(:increment_statistic)
+
+ project.update!(pending_delete: true)
+ project.destroy!
+ end
+
+ it 'does not schedule a namespace statistics worker' do
+ expect(Namespaces::ScheduleAggregationWorker)
+ .not_to receive(:perform_async)
- subject.destroy!
+ project.update!(pending_delete: true)
+ project.destroy!
+ end
+ end
end
+ end
- context 'when it is destroyed from the project level' do
- it 'does not update the project statistics' do
- expect(ProjectStatistics)
- .not_to receive(:increment_statistic)
+ def expect_flush_counter_increments_worker_performed
+ expect(FlushCounterIncrementsWorker)
+ .to receive(:perform_in)
+ .with(CounterAttribute::WORKER_DELAY, project.statistics.class.name, project.statistics.id, project_statistics_name)
+ expect(FlushCounterIncrementsWorker)
+ .to receive(:perform_in)
+ .with(CounterAttribute::WORKER_DELAY, project.statistics.class.name, project.statistics.id, :storage_size)
- project.update!(pending_delete: true)
- project.destroy!
+ yield
+
+ # simulate worker running now
+ expect(Namespaces::ScheduleAggregationWorker).to receive(:perform_async)
+ FlushCounterIncrementsWorker.new.perform(project.statistics.class.name, project.statistics.id, project_statistics_name)
+ end
+
+ if with_counter_attribute
+ context 'when statistic is a counter attribute', :clean_gitlab_redis_shared_state do
+ context 'when creating' do
+ it 'stores pending increments for async update' do
+ initial_stat = reload_stat
+ expected_increment = read_attribute
+
+ expect_flush_counter_increments_worker_performed do
+ subject.save!
+
+ expect(read_pending_increment).to eq(expected_increment)
+ expect(expected_increment).to be > initial_stat
+ expect(expected_increment).to be_positive
+ end
+ end
end
- it 'does not schedule a namespace statistics worker' do
- expect(Namespaces::ScheduleAggregationWorker)
- .not_to receive(:perform_async)
+ context 'when updating' do
+ let(:delta) { 42 }
+
+ before do
+ subject.save!
+ redis_shared_state_cleanup!
+ end
+
+ it 'stores pending increments for async update' do
+ expect(ProjectStatistics)
+ .to receive(:increment_statistic)
+ .and_call_original
+
+ subject.write_attribute(statistic_attribute, read_attribute + delta)
+
+ expect_flush_counter_increments_worker_performed do
+ subject.save!
+
+ expect(read_pending_increment).to eq(delta)
+ end
+ end
+
+ it 'avoids N + 1 queries' do
+ subject.write_attribute(statistic_attribute, read_attribute + delta)
+
+ control_count = ActiveRecord::QueryRecorder.new do
+ subject.save!
+ end
+
+ subject.write_attribute(statistic_attribute, read_attribute + delta)
+
+ expect do
+ subject.save!
+ end.not_to exceed_query_limit(control_count)
+ end
+ end
- project.update!(pending_delete: true)
- project.destroy!
+ context 'when destroying' do
+ before do
+ subject.save!
+ redis_shared_state_cleanup!
+ end
+
+ it 'stores pending increment for async update' do
+ initial_stat = reload_stat
+ expected_increment = -read_attribute
+
+ expect_flush_counter_increments_worker_performed do
+ subject.destroy!
+
+ expect(read_pending_increment).to eq(expected_increment)
+ expect(expected_increment).to be < initial_stat
+ expect(expected_increment).to be_negative
+ end
+ end
+
+ context 'when it is destroyed from the project level' do
+ it 'does not update the project statistics' do
+ expect(ProjectStatistics)
+ .not_to receive(:increment_statistic)
+
+ project.update!(pending_delete: true)
+ project.destroy!
+ end
+
+ it 'does not schedule a namespace statistics worker' do
+ expect(Namespaces::ScheduleAggregationWorker)
+ .not_to receive(:perform_async)
+
+ project.update!(pending_delete: true)
+ project.destroy!
+ end
+ end
end
end
end
diff --git a/spec/support/shared_examples/models/wiki_shared_examples.rb b/spec/support/shared_examples/models/wiki_shared_examples.rb
index b87f7fe97e1..336a734a25f 100644
--- a/spec/support/shared_examples/models/wiki_shared_examples.rb
+++ b/spec/support/shared_examples/models/wiki_shared_examples.rb
@@ -164,7 +164,7 @@ RSpec.shared_examples 'wiki model' do
def total_pages(entries)
entries.sum do |entry|
- entry.is_a?(WikiDirectory) ? entry.pages.size : 1
+ entry.is_a?(WikiDirectory) ? total_pages(entry.entries) : 1
end
end
diff --git a/spec/support/shared_examples/policies/resource_access_token_shared_examples.rb b/spec/support/shared_examples/policies/resource_access_token_shared_examples.rb
new file mode 100644
index 00000000000..7710e756e5b
--- /dev/null
+++ b/spec/support/shared_examples/policies/resource_access_token_shared_examples.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'Self-managed Core resource access tokens' do
+ before do
+ allow(::Gitlab).to receive(:com?).and_return(false)
+ end
+
+ context 'with owner' do
+ let(:current_user) { owner }
+
+ it { is_expected.to be_allowed(:admin_resource_access_tokens) }
+ end
+
+ context 'with developer' do
+ let(:current_user) { developer }
+
+ it { is_expected.not_to be_allowed(:admin_resource_access_tokens) }
+ end
+end
+
+RSpec.shared_examples 'GitLab.com Core resource access tokens' do
+ before do
+ allow(::Gitlab).to receive(:com?).and_return(true)
+ stub_ee_application_setting(should_check_namespace_plan: true)
+ end
+
+ context 'with owner' do
+ let(:current_user) { owner }
+
+ it { is_expected.not_to be_allowed(:admin_resource_access_tokens) }
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb
index 5c122b4b5d6..4b5299cebec 100644
--- a/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb
@@ -75,7 +75,7 @@ RSpec.shared_examples 'Composer package creation' do |user_type, status, add_mem
expect(response).to have_gitlab_http_status(status)
end
- it_behaves_like 'a gitlab tracking event', described_class.name, 'push_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'push_package'
end
end
diff --git a/spec/support/shared_examples/requests/api/container_repositories_shared_examples.rb b/spec/support/shared_examples/requests/api/container_repositories_shared_examples.rb
index 3e058838773..e776cf13217 100644
--- a/spec/support/shared_examples/requests/api/container_repositories_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/container_repositories_shared_examples.rb
@@ -79,11 +79,3 @@ RSpec.shared_examples 'returns repositories for allowed users' do |user_type, sc
end
end
end
-
-RSpec.shared_examples 'a gitlab tracking event' do |category, action|
- it "creates a gitlab tracking event #{action}" do
- expect(Gitlab::Tracking).to receive(:event).with(category, action, {})
-
- subject
- end
-end
diff --git a/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
new file mode 100644
index 00000000000..ec32cb4b2ff
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/debian_packages_shared_examples.rb
@@ -0,0 +1,309 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'Debian repository shared context' do |object_type|
+ before do
+ stub_feature_flags(debian_packages: true)
+ end
+
+ if object_type == :project
+ let(:project) { create(:project, :public) }
+ elsif object_type == :group
+ let(:group) { create(:group, :public) }
+ end
+
+ let(:user) { create(:user) }
+ let(:personal_access_token) { create(:personal_access_token, user: user) }
+
+ let(:distribution) { 'bullseye' }
+ let(:component) { 'main' }
+ let(:architecture) { 'amd64' }
+ let(:source_package) { 'sample' }
+ let(:letter) { source_package[0..2] == 'lib' ? source_package[0..3] : source_package[0] }
+ let(:package_name) { 'libsample0' }
+ let(:package_version) { '1.2.3~alpha2-1' }
+ let(:file_name) { "#{package_name}_#{package_version}_#{architecture}.deb" }
+
+ let(:method) { :get }
+
+ let(:workhorse_params) do
+ if method == :put
+ file_upload = fixture_file_upload("spec/fixtures/packages/debian/#{file_name}")
+ { file: file_upload }
+ else
+ {}
+ end
+ end
+
+ let(:params) { workhorse_params }
+
+ let(:auth_headers) { {} }
+ let(:workhorse_headers) do
+ if method == :put
+ workhorse_token = JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256')
+ { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => workhorse_token }
+ else
+ {}
+ end
+ end
+
+ let(:headers) { auth_headers.merge(workhorse_headers) }
+
+ let(:send_rewritten_field) { true }
+
+ subject do
+ if method == :put
+ workhorse_finalize(
+ api(url),
+ method: method,
+ file_key: :file,
+ params: params,
+ headers: headers,
+ send_rewritten_field: send_rewritten_field
+ )
+ else
+ send method, api(url), headers: headers, params: params
+ end
+ end
+end
+
+RSpec.shared_context 'Debian repository auth headers' do |user_role, user_token, auth_method = :token|
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+
+ let(:auth_headers) do
+ if user_role == :anonymous
+ {}
+ elsif auth_method == :token
+ { 'Private-Token' => token }
+ else
+ basic_auth_header(user.username, token)
+ end
+ end
+end
+
+RSpec.shared_context 'Debian repository project access' do |project_visibility_level, user_role, user_token, auth_method|
+ include_context 'Debian repository auth headers', user_role, user_token, auth_method do
+ before do
+ project.update_column(:visibility_level, Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
+ end
+ end
+end
+
+RSpec.shared_examples 'Debian project repository GET request' do |user_role, add_member, status, body|
+ context "for user type #{user_role}" do
+ before do
+ project.send("add_#{user_role}", user) if add_member && user_role != :anonymous
+ end
+
+ and_body = body.nil? ? '' : ' and expected body'
+
+ it "returns #{status}#{and_body}" do
+ subject
+
+ expect(response).to have_gitlab_http_status(status)
+
+ unless body.nil?
+ expect(response.body).to eq(body)
+ end
+ end
+ end
+end
+
+RSpec.shared_examples 'Debian project repository PUT request' do |user_role, add_member, status, body|
+ context "for user type #{user_role}" do
+ before do
+ project.send("add_#{user_role}", user) if add_member && user_role != :anonymous
+ end
+
+ and_body = body.nil? ? '' : ' and expected body'
+
+ if status == :created
+ it 'creates package files' do
+ pending "Debian package creation not implemented"
+ expect { subject }
+ .to change { project.packages.debian.count }.by(1)
+
+ expect(response).to have_gitlab_http_status(status)
+
+ unless body.nil?
+ expect(response.body).to eq(body)
+ end
+ end
+ it_behaves_like 'a package tracking event', described_class.name, 'push_package'
+ else
+ it "returns #{status}#{and_body}" do
+ subject
+
+ expect(response).to have_gitlab_http_status(status)
+
+ unless body.nil?
+ expect(response.body).to eq(body)
+ end
+ end
+ end
+ end
+end
+
+RSpec.shared_examples 'rejects Debian access with unknown project id' do
+ context 'with an unknown project' do
+ let(:project) { double(id: non_existing_record_id) }
+
+ context 'as anonymous' do
+ it_behaves_like 'Debian project repository GET request', :anonymous, true, :not_found, nil
+ end
+
+ context 'as authenticated user' do
+ subject { get api(url), headers: basic_auth_header(user.username, personal_access_token.token) }
+
+ it_behaves_like 'Debian project repository GET request', :anonymous, true, :not_found, nil
+ end
+ end
+end
+
+RSpec.shared_examples 'Debian project repository GET endpoint' do |success_status, success_body|
+ context 'with valid project' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:project_visibility_level, :user_role, :member, :user_token, :expected_status, :expected_body) do
+ 'PUBLIC' | :developer | true | true | success_status | success_body
+ 'PUBLIC' | :guest | true | true | success_status | success_body
+ 'PUBLIC' | :developer | true | false | success_status | success_body
+ 'PUBLIC' | :guest | true | false | success_status | success_body
+ 'PUBLIC' | :developer | false | true | success_status | success_body
+ 'PUBLIC' | :guest | false | true | success_status | success_body
+ 'PUBLIC' | :developer | false | false | success_status | success_body
+ 'PUBLIC' | :guest | false | false | success_status | success_body
+ 'PUBLIC' | :anonymous | false | true | success_status | success_body
+ 'PRIVATE' | :developer | true | true | success_status | success_body
+ 'PRIVATE' | :guest | true | true | :forbidden | nil
+ 'PRIVATE' | :developer | true | false | :not_found | nil
+ 'PRIVATE' | :guest | true | false | :not_found | nil
+ 'PRIVATE' | :developer | false | true | :not_found | nil
+ 'PRIVATE' | :guest | false | true | :not_found | nil
+ 'PRIVATE' | :developer | false | false | :not_found | nil
+ 'PRIVATE' | :guest | false | false | :not_found | nil
+ 'PRIVATE' | :anonymous | false | true | :not_found | nil
+ end
+
+ with_them do
+ include_context 'Debian repository project access', params[:project_visibility_level], params[:user_role], params[:user_token], :basic do
+ it_behaves_like 'Debian project repository GET request', params[:user_role], params[:member], params[:expected_status], params[:expected_body]
+ end
+ end
+ end
+
+ it_behaves_like 'rejects Debian access with unknown project id'
+end
+
+RSpec.shared_examples 'Debian project repository PUT endpoint' do |success_status, success_body|
+ context 'with valid project' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:project_visibility_level, :user_role, :member, :user_token, :expected_status, :expected_body) do
+ 'PUBLIC' | :developer | true | true | success_status | nil
+ 'PUBLIC' | :guest | true | true | :forbidden | nil
+ 'PUBLIC' | :developer | true | false | :unauthorized | nil
+ 'PUBLIC' | :guest | true | false | :unauthorized | nil
+ 'PUBLIC' | :developer | false | true | :forbidden | nil
+ 'PUBLIC' | :guest | false | true | :forbidden | nil
+ 'PUBLIC' | :developer | false | false | :unauthorized | nil
+ 'PUBLIC' | :guest | false | false | :unauthorized | nil
+ 'PUBLIC' | :anonymous | false | true | :unauthorized | nil
+ 'PRIVATE' | :developer | true | true | success_status | nil
+ 'PRIVATE' | :guest | true | true | :forbidden | nil
+ 'PRIVATE' | :developer | true | false | :not_found | nil
+ 'PRIVATE' | :guest | true | false | :not_found | nil
+ 'PRIVATE' | :developer | false | true | :not_found | nil
+ 'PRIVATE' | :guest | false | true | :not_found | nil
+ 'PRIVATE' | :developer | false | false | :not_found | nil
+ 'PRIVATE' | :guest | false | false | :not_found | nil
+ 'PRIVATE' | :anonymous | false | true | :not_found | nil
+ end
+
+ with_them do
+ include_context 'Debian repository project access', params[:project_visibility_level], params[:user_role], params[:user_token], :basic do
+ it_behaves_like 'Debian project repository PUT request', params[:user_role], params[:member], params[:expected_status], params[:expected_body]
+ end
+ end
+ end
+
+ it_behaves_like 'rejects Debian access with unknown project id'
+end
+
+RSpec.shared_context 'Debian repository group access' do |group_visibility_level, user_role, user_token, auth_method|
+ include_context 'Debian repository auth headers', user_role, user_token, auth_method do
+ before do
+ group.update_column(:visibility_level, Gitlab::VisibilityLevel.const_get(group_visibility_level, false))
+ end
+ end
+end
+
+RSpec.shared_examples 'Debian group repository GET request' do |user_role, add_member, status, body|
+ context "for user type #{user_role}" do
+ before do
+ group.send("add_#{user_role}", user) if add_member && user_role != :anonymous
+ end
+
+ and_body = body.nil? ? '' : ' and expected body'
+
+ it "returns #{status}#{and_body}" do
+ subject
+
+ expect(response).to have_gitlab_http_status(status)
+
+ unless body.nil?
+ expect(response.body).to eq(body)
+ end
+ end
+ end
+end
+
+RSpec.shared_examples 'rejects Debian access with unknown group id' do
+ context 'with an unknown group' do
+ let(:group) { double(id: non_existing_record_id) }
+
+ context 'as anonymous' do
+ it_behaves_like 'Debian group repository GET request', :anonymous, true, :not_found, nil
+ end
+
+ context 'as authenticated user' do
+ subject { get api(url), headers: basic_auth_header(user.username, personal_access_token.token) }
+
+ it_behaves_like 'Debian group repository GET request', :anonymous, true, :not_found, nil
+ end
+ end
+end
+
+RSpec.shared_examples 'Debian group repository GET endpoint' do |success_status, success_body|
+ context 'with valid group' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:group_visibility_level, :user_role, :member, :user_token, :expected_status, :expected_body) do
+ 'PUBLIC' | :developer | true | true | success_status | success_body
+ 'PUBLIC' | :guest | true | true | success_status | success_body
+ 'PUBLIC' | :developer | true | false | success_status | success_body
+ 'PUBLIC' | :guest | true | false | success_status | success_body
+ 'PUBLIC' | :developer | false | true | success_status | success_body
+ 'PUBLIC' | :guest | false | true | success_status | success_body
+ 'PUBLIC' | :developer | false | false | success_status | success_body
+ 'PUBLIC' | :guest | false | false | success_status | success_body
+ 'PUBLIC' | :anonymous | false | true | success_status | success_body
+ 'PRIVATE' | :developer | true | true | success_status | success_body
+ 'PRIVATE' | :guest | true | true | :forbidden | nil
+ 'PRIVATE' | :developer | true | false | :not_found | nil
+ 'PRIVATE' | :guest | true | false | :not_found | nil
+ 'PRIVATE' | :developer | false | true | :not_found | nil
+ 'PRIVATE' | :guest | false | true | :not_found | nil
+ 'PRIVATE' | :developer | false | false | :not_found | nil
+ 'PRIVATE' | :guest | false | false | :not_found | nil
+ 'PRIVATE' | :anonymous | false | true | :not_found | nil
+ end
+
+ with_them do
+ include_context 'Debian repository group access', params[:group_visibility_level], params[:user_role], params[:user_token], :basic do
+ it_behaves_like 'Debian group repository GET request', params[:user_role], params[:member], params[:expected_status], params[:expected_body]
+ end
+ end
+ end
+
+ it_behaves_like 'rejects Debian access with unknown group id'
+end
diff --git a/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb
index f26af6cb766..5145880ef9a 100644
--- a/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/graphql/group_and_project_boards_query_shared_examples.rb
@@ -90,7 +90,7 @@ RSpec.shared_examples 'group and project boards query' do
it_behaves_like 'a working graphql query' do
before do
- post_graphql(query_single_board, current_user: current_user)
+ post_graphql(query_single_board("id: \"gid://gitlab/Board/1\""), current_user: current_user)
end
end
diff --git a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
index 6aac51a5903..58e99776fd9 100644
--- a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
@@ -26,7 +26,7 @@ RSpec.shared_examples 'process nuget service index request' do |user_type, statu
it_behaves_like 'returning response status', status
- it_behaves_like 'a gitlab tracking event', described_class.name, 'nuget_service_index'
+ it_behaves_like 'a package tracking event', described_class.name, 'cli_metadata'
it 'returns a valid json response' do
subject
@@ -169,7 +169,7 @@ RSpec.shared_examples 'process nuget upload' do |user_type, status, add_member =
context 'with correct params' do
it_behaves_like 'package workhorse uploads'
it_behaves_like 'creates nuget package files'
- it_behaves_like 'a gitlab tracking event', described_class.name, 'push_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'push_package'
end
end
@@ -286,7 +286,7 @@ RSpec.shared_examples 'process nuget download content request' do |user_type, st
it_behaves_like 'returning response status', status
- it_behaves_like 'a gitlab tracking event', described_class.name, 'pull_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_package'
it 'returns a valid package archive' do
subject
@@ -336,7 +336,7 @@ RSpec.shared_examples 'process nuget search request' do |user_type, status, add_
it_behaves_like 'returns a valid json search response', status, 4, [1, 5, 5, 1]
- it_behaves_like 'a gitlab tracking event', described_class.name, 'search_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'search_package'
context 'with skip set to 2' do
let(:skip) { 2 }
diff --git a/spec/support/shared_examples/requests/api/packages_shared_examples.rb b/spec/support/shared_examples/requests/api/packages_shared_examples.rb
index c9a33701161..d730ed53109 100644
--- a/spec/support/shared_examples/requests/api/packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/packages_shared_examples.rb
@@ -126,3 +126,11 @@ RSpec.shared_examples 'job token for package uploads' do
end
end
end
+
+RSpec.shared_examples 'a package tracking event' do |category, action|
+ it "creates a gitlab tracking event #{action}" do
+ expect(Gitlab::Tracking).to receive(:event).with(category, action, {})
+
+ expect { subject }.to change { Packages::Event.count }.by(1)
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
index 715c494840e..40bedc84366 100644
--- a/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
@@ -52,7 +52,7 @@ RSpec.shared_examples 'PyPi package creation' do |user_type, status, add_member
context 'with correct params' do
it_behaves_like 'package workhorse uploads'
it_behaves_like 'creating pypi package files'
- it_behaves_like 'a gitlab tracking event', described_class.name, 'push_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'push_package'
end
end
@@ -119,7 +119,7 @@ RSpec.shared_examples 'PyPi package versions' do |user_type, status, add_member
end
it_behaves_like 'returning response status', status
- it_behaves_like 'a gitlab tracking event', described_class.name, 'list_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'list_package'
end
end
@@ -136,7 +136,7 @@ RSpec.shared_examples 'PyPi package download' do |user_type, status, add_member
end
it_behaves_like 'returning response status', status
- it_behaves_like 'a gitlab tracking event', described_class.name, 'pull_package'
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_package'
end
end
diff --git a/spec/support/shared_examples/requests/api/tracking_shared_examples.rb b/spec/support/shared_examples/requests/api/tracking_shared_examples.rb
new file mode 100644
index 00000000000..2e6feae3f98
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/tracking_shared_examples.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a gitlab tracking event' do |category, action|
+ it "creates a gitlab tracking event #{action}" do
+ expect(Gitlab::Tracking).to receive(:event).with(category, action, {})
+
+ subject
+ end
+end
diff --git a/spec/support/shared_examples/services/boards/boards_create_service_shared_examples.rb b/spec/support/shared_examples/services/boards/boards_create_service_shared_examples.rb
index fced2e59ace..f28c78aec97 100644
--- a/spec/support/shared_examples/services/boards/boards_create_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/boards/boards_create_service_shared_examples.rb
@@ -7,7 +7,7 @@ RSpec.shared_examples 'boards create service' do
end
it 'creates the default lists' do
- board = service.execute
+ board = service.execute.payload
expect(board.lists.size).to eq 2
expect(board.lists.first).to be_backlog
diff --git a/spec/support/shared_examples/services/merge_request_shared_examples.rb b/spec/support/shared_examples/services/merge_request_shared_examples.rb
index a7032640217..2bd06ac3e9c 100644
--- a/spec/support/shared_examples/services/merge_request_shared_examples.rb
+++ b/spec/support/shared_examples/services/merge_request_shared_examples.rb
@@ -13,11 +13,10 @@ RSpec.shared_examples 'reviewer_ids filter' do
end
context 'with reviewer_ids' do
- let(:reviewer_ids_param) { { reviewer_ids: [reviewer1.id, reviewer2.id, reviewer3.id] } }
+ let(:reviewer_ids_param) { { reviewer_ids: [reviewer1.id, reviewer2.id] } }
let(:reviewer1) { create(:user) }
let(:reviewer2) { create(:user) }
- let(:reviewer3) { create(:user) }
context 'when the current user can admin the merge_request' do
context 'when merge_request_reviewer feature is enabled' do
@@ -25,14 +24,13 @@ RSpec.shared_examples 'reviewer_ids filter' do
stub_feature_flags(merge_request_reviewer: true)
end
- context 'with reviewers who can read the merge_request' do
+ context 'with a reviewer who can read the merge_request' do
before do
project.add_developer(reviewer1)
- project.add_developer(reviewer2)
end
it 'contains reviewers who can read the merge_request' do
- expect(execute.reviewers).to contain_exactly(reviewer1, reviewer2)
+ expect(execute.reviewers).to contain_exactly(reviewer1)
end
end
end
diff --git a/spec/support/shared_examples/services/packages_shared_examples.rb b/spec/support/shared_examples/services/packages_shared_examples.rb
index 7fd59c3d963..c00a087311c 100644
--- a/spec/support/shared_examples/services/packages_shared_examples.rb
+++ b/spec/support/shared_examples/services/packages_shared_examples.rb
@@ -170,6 +170,7 @@ RSpec.shared_examples 'filters on each package_type' do |is_project: false|
let_it_be(:package5) { create(:pypi_package, project: project) }
let_it_be(:package6) { create(:composer_package, project: project) }
let_it_be(:package7) { create(:generic_package, project: project) }
+ let_it_be(:package8) { create(:golang_package, project: project) }
Packages::Package.package_types.keys.each do |package_type|
context "for package type #{package_type}" do
diff --git a/spec/support/shared_examples/services/projects/urls_with_escaped_elements_shared_example.rb b/spec/support/shared_examples/services/projects/urls_with_escaped_elements_shared_example.rb
new file mode 100644
index 00000000000..df8b1f91629
--- /dev/null
+++ b/spec/support/shared_examples/services/projects/urls_with_escaped_elements_shared_example.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+# Shared examples that test requests against URLs with escaped elements
+#
+RSpec.shared_examples "URLs containing escaped elements return expected status" do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:url, :result_status) do
+ "https://user:0a%23@test.example.com/project.git" | :success
+ "https://git.example.com:1%2F%2F@source.developers.google.com/project.git" | :success
+ CGI.escape("git://localhost:1234/some-path?some-query=some-val\#@example.com/") | :error
+ CGI.escape(CGI.escape("https://user:0a%23@test.example.com/project.git")) | :error
+ end
+
+ with_them do
+ it "returns expected status" do
+ expect(result[:status]).to eq(result_status)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/validators/ip_address_validator_shared_examples.rb b/spec/support/shared_examples/validators/ip_address_validator_shared_examples.rb
new file mode 100644
index 00000000000..5680d4f772c
--- /dev/null
+++ b/spec/support/shared_examples/validators/ip_address_validator_shared_examples.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'validates IP address' do
+ subject { object }
+
+ it { is_expected.to allow_value('192.168.17.43').for(attribute.to_sym) }
+ it { is_expected.to allow_value('2001:0db8:85a3:0000:0000:8a2e:0370:7334').for(attribute.to_sym) }
+
+ it { is_expected.not_to allow_value('invalid IP').for(attribute.to_sym) }
+end
diff --git a/spec/support_specs/helpers/stub_feature_flags_spec.rb b/spec/support_specs/helpers/stub_feature_flags_spec.rb
index 5d1e4e1627d..7434929d665 100644
--- a/spec/support_specs/helpers/stub_feature_flags_spec.rb
+++ b/spec/support_specs/helpers/stub_feature_flags_spec.rb
@@ -3,12 +3,31 @@
require 'spec_helper'
RSpec.describe StubFeatureFlags do
- let(:feature_name) { :test_feature }
+ DUMMY_FEATURE_FLAG = :dummy_feature_flag # rubocop:disable RSpec/LeakyConstantDeclaration
+
+ # We inject dummy feature flag defintion
+ # to ensure that we strong validate it's usage
+ # as well
+ before(:all) do
+ definition = Feature::Definition.new(
+ nil,
+ name: DUMMY_FEATURE_FLAG,
+ type: 'development',
+ # we allow ambigious usage of `default_enabled:`
+ default_enabled: [false, true]
+ )
+
+ Feature::Definition.definitions[DUMMY_FEATURE_FLAG] = definition
+ end
+
+ after(:all) do
+ Feature::Definition.definitions.delete(DUMMY_FEATURE_FLAG)
+ end
describe '#stub_feature_flags' do
using RSpec::Parameterized::TableSyntax
- let(:feature_name) { :test_feature }
+ let(:feature_name) { DUMMY_FEATURE_FLAG }
context 'when checking global state' do
where(:feature_actors, :expected_result) do
@@ -121,14 +140,14 @@ RSpec.describe StubFeatureFlags do
describe 'stub timing' do
context 'let_it_be variable' do
- let_it_be(:let_it_be_var) { Feature.enabled?(:any_feature_flag) }
+ let_it_be(:let_it_be_var) { Feature.enabled?(DUMMY_FEATURE_FLAG) }
it { expect(let_it_be_var).to eq true }
end
context 'before_all variable' do
before_all do
- @suite_var = Feature.enabled?(:any_feature_flag)
+ @suite_var = Feature.enabled?(DUMMY_FEATURE_FLAG)
end
it { expect(@suite_var).to eq true }
@@ -136,14 +155,14 @@ RSpec.describe StubFeatureFlags do
context 'before(:all) variable' do
before(:all) do
- @suite_var = Feature.enabled?(:any_feature_flag)
+ @suite_var = Feature.enabled?(DUMMY_FEATURE_FLAG)
end
it { expect(@suite_var).to eq true }
end
context 'with stub_feature_flags meta' do
- let(:var) { Feature.enabled?(:any_feature_flag) }
+ let(:var) { Feature.enabled?(DUMMY_FEATURE_FLAG) }
context 'as true', :stub_feature_flags do
it { expect(var).to eq true }
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index a2cc2b12e5e..212a7f74b40 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -325,11 +325,13 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
let!(:project_a) { create(:project, :repository) }
+ let!(:project_a_wiki_page) { create(:wiki_page, container: project_a) }
+ let!(:project_a_design) { create(:design, :with_file, issue: create(:issue, project: project_a)) }
let!(:project_b) { create(:project, :repository, repository_storage: 'test_second_storage') }
let!(:b_storage_dir) { File.join(test_second_storage_dir, File.dirname(project_b.disk_path)) }
- context 'no concurrency' do
- it 'includes repositories in all repository storages' do
+ shared_examples 'includes repositories in all repository storages' do
+ specify :aggregate_failures do
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
tar_contents, exit_status = Gitlab::Popen.popen(
@@ -337,27 +339,25 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
)
expect(exit_status).to eq(0)
- expect(tar_contents).to match("repositories/#{project_a.disk_path}.bundle")
- expect(tar_contents).to match("repositories/#{project_b.disk_path}.bundle")
+ expect(tar_contents).to include(
+ "repositories/#{project_a.disk_path}.bundle",
+ "repositories/#{project_a.disk_path}.wiki.bundle",
+ "repositories/#{project_a.disk_path}.design.bundle",
+ "repositories/#{project_b.disk_path}.bundle"
+ )
end
end
+ context 'no concurrency' do
+ it_behaves_like 'includes repositories in all repository storages'
+ end
+
context 'with concurrency' do
before do
stub_env('GITLAB_BACKUP_MAX_CONCURRENCY', 4)
end
- it 'includes repositories in all repository storages' do
- expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
-
- tar_contents, exit_status = Gitlab::Popen.popen(
- %W{tar -tvf #{backup_tar} repositories}
- )
-
- expect(exit_status).to eq(0)
- expect(tar_contents).to match("repositories/#{project_a.disk_path}.bundle")
- expect(tar_contents).to match("repositories/#{project_b.disk_path}.bundle")
- end
+ it_behaves_like 'includes repositories in all repository storages'
end
end
@@ -370,7 +370,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
it 'has defaults' do
- expect_next_instance_of(::Backup::Repository) do |instance|
+ expect_next_instance_of(::Backup::Repositories) do |instance|
expect(instance).to receive(:dump)
.with(max_concurrency: 1, max_storage_concurrency: 1)
.and_call_original
@@ -383,7 +383,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
stub_env('GITLAB_BACKUP_MAX_CONCURRENCY', 5)
stub_env('GITLAB_BACKUP_MAX_STORAGE_CONCURRENCY', 2)
- expect_next_instance_of(::Backup::Repository) do |instance|
+ expect_next_instance_of(::Backup::Repositories) do |instance|
expect(instance).to receive(:dump)
.with(max_concurrency: 5, max_storage_concurrency: 2)
.and_call_original
diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb
index 99efd394e83..e4630aefb85 100644
--- a/spec/tasks/gitlab/db_rake_spec.rb
+++ b/spec/tasks/gitlab/db_rake_spec.rb
@@ -164,25 +164,77 @@ RSpec.describe 'gitlab:db namespace rake task' do
end
end
+ describe 'drop_tables' do
+ subject { run_rake_task('gitlab:db:drop_tables') }
+
+ let(:tables) { %w(one two) }
+ let(:views) { %w(three four) }
+ let(:connection) { ActiveRecord::Base.connection }
+
+ before do
+ allow(connection).to receive(:execute).and_return(nil)
+
+ allow(connection).to receive(:tables).and_return(tables)
+ allow(connection).to receive(:views).and_return(views)
+ end
+
+ it 'drops all tables, except schema_migrations' do
+ expect(connection).to receive(:execute).with('DROP TABLE IF EXISTS "one" CASCADE')
+ expect(connection).to receive(:execute).with('DROP TABLE IF EXISTS "two" CASCADE')
+
+ subject
+ end
+
+ it 'drops all views' do
+ expect(connection).to receive(:execute).with('DROP VIEW IF EXISTS "three" CASCADE')
+ expect(connection).to receive(:execute).with('DROP VIEW IF EXISTS "four" CASCADE')
+
+ subject
+ end
+
+ it 'truncates schema_migrations table' do
+ expect(connection).to receive(:execute).with('TRUNCATE schema_migrations')
+
+ subject
+ end
+
+ it 'drops extra schemas' do
+ Gitlab::Database::EXTRA_SCHEMAS.each do |schema|
+ expect(connection).to receive(:execute).with("DROP SCHEMA IF EXISTS \"#{schema}\"")
+ end
+
+ subject
+ end
+ end
+
describe 'reindex' do
+ let(:reindex) { double('reindex') }
+ let(:indexes) { double('indexes') }
+
context 'when no index_name is given' do
- it 'raises an error' do
- expect do
- run_rake_task('gitlab:db:reindex', '')
- end.to raise_error(ArgumentError, /must give the index name/)
+ it 'rebuilds a random number of large indexes' do
+ expect(Gitlab::Database::Reindexing).to receive_message_chain('candidate_indexes.random_few').and_return(indexes)
+ expect(Gitlab::Database::Reindexing).to receive(:perform).with(indexes)
+
+ run_rake_task('gitlab:db:reindex')
end
end
- it 'calls the index rebuilder with the proper arguments' do
- reindex = double('rebuilder')
+ context 'with index name given' do
+ let(:index) { double('index') }
- expect(Gitlab::Database::ConcurrentReindex).to receive(:new)
- .with('some_index_name', logger: instance_of(Logger))
- .and_return(reindex)
+ it 'calls the index rebuilder with the proper arguments' do
+ expect(Gitlab::Database::PostgresIndex).to receive(:by_identifier).with('public.foo_idx').and_return(index)
+ expect(Gitlab::Database::Reindexing).to receive(:perform).with(index)
- expect(reindex).to receive(:execute)
+ run_rake_task('gitlab:db:reindex', '[public.foo_idx]')
+ end
+
+ it 'raises an error if the index does not exist' do
+ expect(Gitlab::Database::PostgresIndex).to receive(:by_identifier).with('public.absent_index').and_raise(ActiveRecord::RecordNotFound)
- run_rake_task('gitlab:db:reindex', '[some_index_name]')
+ expect { run_rake_task('gitlab:db:reindex', '[public.absent_index]') }.to raise_error(ActiveRecord::RecordNotFound)
+ end
end
end
diff --git a/spec/uploaders/pages/deployment_uploader_spec.rb b/spec/uploaders/pages/deployment_uploader_spec.rb
new file mode 100644
index 00000000000..1d9a0efe228
--- /dev/null
+++ b/spec/uploaders/pages/deployment_uploader_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Pages::DeploymentUploader do
+ let(:pages_deployment) { create(:pages_deployment) }
+ let(:uploader) { described_class.new(pages_deployment, :file) }
+
+ subject { uploader }
+
+ it_behaves_like "builds correct paths",
+ store_dir: %r[/\h{2}/\h{2}/\h{64}/pages_deployments/\d+],
+ cache_dir: %r[pages/@hashed/tmp/cache],
+ work_dir: %r[pages/@hashed/tmp/work]
+
+ context 'when object store is REMOTE' do
+ before do
+ stub_pages_object_storage
+ end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like 'builds correct paths', store_dir: %r[\A\h{2}/\h{2}/\h{64}/pages_deployments/\d+\z]
+ end
+
+ context 'when file is stored in valid local_path' do
+ let(:file) do
+ fixture_file_upload("spec/fixtures/pages.zip")
+ end
+
+ before do
+ uploader.store!(file)
+ end
+
+ subject { uploader.file.path }
+
+ it { is_expected.to match(%r[#{uploader.root}/@hashed/\h{2}/\h{2}/\h{64}/pages_deployments/#{pages_deployment.id}/pages.zip]) }
+ end
+end
diff --git a/spec/validators/ip_address_validator_spec.rb b/spec/validators/ip_address_validator_spec.rb
new file mode 100644
index 00000000000..382250378c2
--- /dev/null
+++ b/spec/validators/ip_address_validator_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IpAddressValidator do
+ let(:model) do
+ Class.new do
+ include ActiveModel::Model
+ include ActiveModel::Validations
+
+ attr_accessor :ip_address
+ alias_method :ip_address_before_type_cast, :ip_address
+
+ validates :ip_address, ip_address: true
+ end.new
+ end
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:ip_address, :validity, :errors) do
+ 'invalid IP' | false | { ip_address: ['must be a valid IPv4 or IPv6 address'] }
+ '192.168.17.43' | true | {}
+ '2001:0db8:85a3::8a2e:0370:7334' | true | {}
+ nil | true | {}
+ '' | true | {}
+ end
+
+ with_them do
+ before do
+ model.ip_address = ip_address
+ model.validate
+ end
+
+ it { expect(model.valid?).to eq(validity) }
+ it { expect(model.errors.messages).to eq(errors) }
+ end
+end
diff --git a/spec/views/admin/dashboard/index.html.haml_spec.rb b/spec/views/admin/dashboard/index.html.haml_spec.rb
index 70fb77944cc..e9223c84674 100644
--- a/spec/views/admin/dashboard/index.html.haml_spec.rb
+++ b/spec/views/admin/dashboard/index.html.haml_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe 'admin/dashboard/index.html.haml' do
render
expect(rendered).not_to have_content "Users in License"
- expect(rendered).not_to have_content "Active Users"
+ expect(rendered).not_to have_content "Billable Users"
expect(rendered).not_to have_content "Maximum Users"
expect(rendered).not_to have_content "Users over License"
end
diff --git a/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb
index 777dc0c8571..2c37565328a 100644
--- a/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb
@@ -92,7 +92,11 @@ RSpec.describe 'layouts/nav/sidebar/_admin' do
end
context 'on settings' do
+ let(:gitlab_com?) { false }
+
before do
+ allow(::Gitlab).to receive(:com?) { gitlab_com? }
+
render
end
@@ -100,6 +104,20 @@ RSpec.describe 'layouts/nav/sidebar/_admin' do
expect(rendered).to have_link('General', href: general_admin_application_settings_path)
end
+ context 'when GitLab.com' do
+ let(:gitlab_com?) { true }
+
+ it 'does not include Integrations link' do
+ expect(rendered).not_to have_link('Integrations', href: integrations_admin_application_settings_path)
+ end
+ end
+
+ context 'when not GitLab.com' do
+ it 'includes Integrations link' do
+ expect(rendered).to have_link('Integrations', href: integrations_admin_application_settings_path)
+ end
+ end
+
context 'when GitLab FOSS' do
it 'does not include Templates link' do
expect(rendered).not_to have_link('Templates', href: '/admin/application_settings/templates')
diff --git a/spec/views/profiles/preferences/show.html.haml_spec.rb b/spec/views/profiles/preferences/show.html.haml_spec.rb
index 1b8b28367c1..1dae953227d 100644
--- a/spec/views/profiles/preferences/show.html.haml_spec.rb
+++ b/spec/views/profiles/preferences/show.html.haml_spec.rb
@@ -20,6 +20,14 @@ RSpec.describe 'profiles/preferences/show' do
it 'has an id for anchoring' do
expect(rendered).to have_css('#navigation-theme')
end
+
+ it 'has correct stylesheet tags' do
+ Gitlab::Themes.each do |theme|
+ next unless theme.css_filename
+
+ expect(rendered).to have_selector("link[href*=\"themes/#{theme.css_filename}\"]", visible: false)
+ end
+ end
end
context 'syntax highlighting theme' do
diff --git a/spec/views/projects/merge_requests/diffs/_diffs.html.haml_spec.rb b/spec/views/projects/merge_requests/diffs/_diffs.html.haml_spec.rb
deleted file mode 100644
index 7cdc817d784..00000000000
--- a/spec/views/projects/merge_requests/diffs/_diffs.html.haml_spec.rb
+++ /dev/null
@@ -1,38 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'projects/merge_requests/diffs/_diffs.html.haml' do
- include Devise::Test::ControllerHelpers
-
- let(:user) { create(:user) }
- let(:project) { create(:project, :public, :repository) }
- let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project, author: user) }
-
- before do
- allow(view).to receive(:url_for).and_return(controller.request.fullpath)
-
- assign(:merge_request, merge_request)
- assign(:environment, merge_request.environments_for(user).last)
- assign(:diffs, merge_request.diffs)
- assign(:merge_request_diffs, merge_request.diffs)
- assign(:diff_notes_disabled, true) # disable note creation
- assign(:use_legacy_diff_notes, false)
- assign(:grouped_diff_discussions, {})
- assign(:notes, [])
- end
-
- context 'for a commit' do
- let(:commit) { merge_request.commits.last }
-
- before do
- assign(:commit, commit)
- end
-
- it "shows the commit scope" do
- render
-
- expect(rendered).to have_content "Only comments from the following commit are shown below"
- end
- end
-end
diff --git a/spec/views/search/_results.html.haml_spec.rb b/spec/views/search/_results.html.haml_spec.rb
index 9e95dc40ff8..033b2304e33 100644
--- a/spec/views/search/_results.html.haml_spec.rb
+++ b/spec/views/search/_results.html.haml_spec.rb
@@ -60,6 +60,28 @@ RSpec.describe 'search/_results' do
expect(rendered).to have_selector('#js-search-filter-by-state')
end
+
+ context 'Feature search_filter_by_confidential' do
+ context 'when disabled' do
+ before do
+ stub_feature_flags(search_filter_by_confidential: false)
+ end
+
+ it 'does not render the confidential drop down' do
+ render
+
+ expect(rendered).not_to have_selector('#js-search-filter-by-confidential')
+ end
+ end
+
+ context 'when enabled' do
+ it 'renders the confidential drop down' do
+ render
+
+ expect(rendered).to have_selector('#js-search-filter-by-confidential')
+ end
+ end
+ end
end
end
end
diff --git a/spec/views/shared/milestones/_issuables.html.haml_spec.rb b/spec/views/shared/milestones/_issuables.html.haml_spec.rb
index 70ab6914580..5eed2c96a45 100644
--- a/spec/views/shared/milestones/_issuables.html.haml_spec.rb
+++ b/spec/views/shared/milestones/_issuables.html.haml_spec.rb
@@ -6,8 +6,7 @@ RSpec.describe 'shared/milestones/_issuables.html.haml' do
let(:issuables_size) { 100 }
before do
- allow(view).to receive_messages(title: nil, id: nil, show_project_name: nil,
- show_full_project_name: nil, dom_class: '',
+ allow(view).to receive_messages(title: nil, id: nil, show_project_name: nil, dom_class: '',
issuables: double(length: issuables_size).as_null_object)
stub_template 'shared/milestones/_issuable.html.haml' => ''
diff --git a/spec/workers/analytics/instance_statistics/counter_job_worker_spec.rb b/spec/workers/analytics/instance_statistics/counter_job_worker_spec.rb
index 8db86071dc4..667ec0bcb75 100644
--- a/spec/workers/analytics/instance_statistics/counter_job_worker_spec.rb
+++ b/spec/workers/analytics/instance_statistics/counter_job_worker_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Analytics::InstanceStatistics::CounterJobWorker do
it 'counts a scope and stores the result' do
subject
- measurement = Analytics::InstanceStatistics::Measurement.first
+ measurement = Analytics::InstanceStatistics::Measurement.users.first
expect(measurement.recorded_at).to be_like_time(recorded_at)
expect(measurement.identifier).to eq('users')
expect(measurement.count).to eq(2)
@@ -33,7 +33,7 @@ RSpec.describe Analytics::InstanceStatistics::CounterJobWorker do
it 'sets 0 as the count' do
subject
- measurement = Analytics::InstanceStatistics::Measurement.first
+ measurement = Analytics::InstanceStatistics::Measurement.groups.first
expect(measurement.recorded_at).to be_like_time(recorded_at)
expect(measurement.identifier).to eq('groups')
expect(measurement.count).to eq(0)
@@ -51,4 +51,20 @@ RSpec.describe Analytics::InstanceStatistics::CounterJobWorker do
expect { subject }.not_to change { Analytics::InstanceStatistics::Measurement.count }
end
+
+ context 'when pipelines_succeeded identifier is passed' do
+ let_it_be(:pipeline) { create(:ci_pipeline, :success) }
+
+ let(:successful_pipelines_measurement_identifier) { ::Analytics::InstanceStatistics::Measurement.identifiers.fetch(:pipelines_succeeded) }
+ let(:job_args) { [successful_pipelines_measurement_identifier, pipeline.id, pipeline.id, recorded_at] }
+
+ it 'counts successful pipelines' do
+ subject
+
+ measurement = Analytics::InstanceStatistics::Measurement.pipelines_succeeded.first
+ expect(measurement.recorded_at).to be_like_time(recorded_at)
+ expect(measurement.identifier).to eq('pipelines_succeeded')
+ expect(measurement.count).to eq(1)
+ end
+ end
end
diff --git a/spec/workers/authorized_project_update/periodic_recalculate_worker_spec.rb b/spec/workers/authorized_project_update/periodic_recalculate_worker_spec.rb
index 2d633828ae3..9d4d48d0568 100644
--- a/spec/workers/authorized_project_update/periodic_recalculate_worker_spec.rb
+++ b/spec/workers/authorized_project_update/periodic_recalculate_worker_spec.rb
@@ -11,17 +11,5 @@ RSpec.describe AuthorizedProjectUpdate::PeriodicRecalculateWorker do
subject.perform
end
-
- context 'feature flag :periodic_project_authorization_recalculation is disabled' do
- before do
- stub_feature_flags(periodic_project_authorization_recalculation: false)
- end
-
- it 'does not call AuthorizedProjectUpdate::PeriodicRecalculateService' do
- expect(AuthorizedProjectUpdate::PeriodicRecalculateService).not_to receive(:new)
-
- subject.perform
- end
- end
end
end
diff --git a/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb b/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb
index c49e4c453bf..a27c431523e 100644
--- a/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb
+++ b/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb
@@ -14,17 +14,5 @@ RSpec.describe AuthorizedProjectUpdate::UserRefreshOverUserRangeWorker do
subject.perform(start_user_id, end_user_id)
end
-
- context 'feature flag :periodic_project_authorization_recalculation is disabled' do
- before do
- stub_feature_flags(periodic_project_authorization_recalculation: false)
- end
-
- it 'does not call AuthorizedProjectUpdate::RecalculateForUserRangeService' do
- expect(AuthorizedProjectUpdate::RecalculateForUserRangeService).not_to receive(:new)
-
- subject.perform(start_user_id, end_user_id)
- end
- end
end
end
diff --git a/spec/workers/cleanup_container_repository_worker_spec.rb b/spec/workers/cleanup_container_repository_worker_spec.rb
index 0545f7a35e4..9cf8974a2a1 100644
--- a/spec/workers/cleanup_container_repository_worker_spec.rb
+++ b/spec/workers/cleanup_container_repository_worker_spec.rb
@@ -40,14 +40,35 @@ RSpec.describe CleanupContainerRepositoryWorker, :clean_gitlab_redis_shared_stat
context 'container expiration policy' do
let(:params) { { key: 'value', 'container_expiration_policy' => true } }
+ before do
+ allow(ContainerRepository)
+ .to receive(:find_by_id).with(repository.id).and_return(repository)
+ end
+
it 'executes the destroy service' do
+ expect(repository).to receive(:start_expiration_policy!).and_call_original
+ expect(repository).to receive(:reset_expiration_policy_started_at!).and_call_original
expect(Projects::ContainerRepository::CleanupTagsService).to receive(:new)
.with(project, nil, params.merge('container_expiration_policy' => true))
.and_return(service)
- expect(service).to receive(:execute)
+ expect(service).to receive(:execute).and_return(status: :success)
+
+ subject.perform(nil, repository.id, params)
+ expect(repository.reload.expiration_policy_started_at).to be_nil
+ end
+
+ it "doesn't reset the expiration policy started at if the destroy service returns an error" do
+ expect(repository).to receive(:start_expiration_policy!).and_call_original
+ expect(repository).not_to receive(:reset_expiration_policy_started_at!)
+ expect(Projects::ContainerRepository::CleanupTagsService).to receive(:new)
+ .with(project, nil, params.merge('container_expiration_policy' => true))
+ .and_return(service)
+
+ expect(service).to receive(:execute).and_return(status: :error, message: 'timeout while deleting tags')
subject.perform(nil, repository.id, params)
+ expect(repository.reload.expiration_policy_started_at).not_to be_nil
end
end
end
diff --git a/spec/workers/concerns/limited_capacity/job_tracker_spec.rb b/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
new file mode 100644
index 00000000000..2c79f347903
--- /dev/null
+++ b/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
@@ -0,0 +1,100 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe LimitedCapacity::JobTracker, :clean_gitlab_redis_queues do
+ let(:job_tracker) do
+ described_class.new('namespace')
+ end
+
+ describe '#register' do
+ it 'adds jid to the set' do
+ job_tracker.register('a-job-id')
+
+ expect(job_tracker.running_jids).to contain_exactly('a-job-id')
+ end
+
+ it 'updates the counter' do
+ expect { job_tracker.register('a-job-id') }
+ .to change { job_tracker.count }
+ .from(0)
+ .to(1)
+ end
+
+ it 'does it in only one Redis call' do
+ expect(job_tracker).to receive(:with_redis).once.and_call_original
+
+ job_tracker.register('a-job-id')
+ end
+ end
+
+ describe '#remove' do
+ before do
+ job_tracker.register(%w[a-job-id other-job-id])
+ end
+
+ it 'removes jid from the set' do
+ job_tracker.remove('other-job-id')
+
+ expect(job_tracker.running_jids).to contain_exactly('a-job-id')
+ end
+
+ it 'updates the counter' do
+ expect { job_tracker.remove('other-job-id') }
+ .to change { job_tracker.count }
+ .from(2)
+ .to(1)
+ end
+
+ it 'does it in only one Redis call' do
+ expect(job_tracker).to receive(:with_redis).once.and_call_original
+
+ job_tracker.remove('other-job-id')
+ end
+ end
+
+ describe '#clean_up' do
+ before do
+ job_tracker.register('a-job-id')
+ end
+
+ context 'with running jobs' do
+ before do
+ expect(Gitlab::SidekiqStatus).to receive(:completed_jids)
+ .with(%w[a-job-id])
+ .and_return([])
+ end
+
+ it 'does not remove the jid from the set' do
+ expect { job_tracker.clean_up }
+ .not_to change { job_tracker.running_jids.include?('a-job-id') }
+ end
+
+ it 'does only one Redis call to get the job ids' do
+ expect(job_tracker).to receive(:with_redis).once.and_call_original
+
+ job_tracker.clean_up
+ end
+ end
+
+ context 'with completed jobs' do
+ it 'removes the jid from the set' do
+ expect { job_tracker.clean_up }
+ .to change { job_tracker.running_jids.include?('a-job-id') }
+ end
+
+ it 'updates the counter' do
+ expect { job_tracker.clean_up }
+ .to change { job_tracker.count }
+ .from(1)
+ .to(0)
+ end
+
+ it 'gets the job ids, removes them, and updates the counter with only two Redis calls' do
+ expect(job_tracker).to receive(:with_redis).twice.and_call_original
+
+ job_tracker.clean_up
+ end
+ end
+ end
+end
diff --git a/spec/workers/concerns/limited_capacity/worker_spec.rb b/spec/workers/concerns/limited_capacity/worker_spec.rb
new file mode 100644
index 00000000000..8a15675c04d
--- /dev/null
+++ b/spec/workers/concerns/limited_capacity/worker_spec.rb
@@ -0,0 +1,285 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_failures do
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ 'DummyWorker'
+ end
+
+ include ApplicationWorker
+ include LimitedCapacity::Worker
+ end
+ end
+
+ let(:worker) { worker_class.new }
+
+ let(:job_tracker) do
+ LimitedCapacity::JobTracker.new(worker_class.name)
+ end
+
+ before do
+ worker.jid = 'my-jid'
+ allow(worker).to receive(:job_tracker).and_return(job_tracker)
+ end
+
+ describe 'required methods' do
+ it { expect { worker.perform_work }.to raise_error(NotImplementedError) }
+ it { expect { worker.remaining_work_count }.to raise_error(NotImplementedError) }
+ it { expect { worker.max_running_jobs }.to raise_error(NotImplementedError) }
+ end
+
+ describe 'Sidekiq options' do
+ it 'does not retry failed jobs' do
+ expect(worker_class.sidekiq_options['retry']).to eq(0)
+ end
+
+ it 'does not deduplicate jobs' do
+ expect(worker_class.get_deduplicate_strategy).to eq(:none)
+ end
+ end
+
+ describe '.perform_with_capacity' do
+ subject(:perform_with_capacity) { worker_class.perform_with_capacity(:arg) }
+
+ before do
+ expect_next_instance_of(worker_class) do |instance|
+ expect(instance).to receive(:remove_failed_jobs)
+ expect(instance).to receive(:report_prometheus_metrics)
+
+ allow(instance).to receive(:remaining_work_count).and_return(remaining_work_count)
+ allow(instance).to receive(:remaining_capacity).and_return(remaining_capacity)
+ end
+ end
+
+ context 'when capacity is larger than work' do
+ let(:remaining_work_count) { 2 }
+ let(:remaining_capacity) { 3 }
+
+ it 'enqueues jobs for remaining work' do
+ expect(worker_class)
+ .to receive(:bulk_perform_async)
+ .with([[:arg], [:arg]])
+
+ perform_with_capacity
+ end
+ end
+
+ context 'when capacity is lower than work' do
+ let(:remaining_work_count) { 5 }
+ let(:remaining_capacity) { 3 }
+
+ it 'enqueues jobs for remaining work' do
+ expect(worker_class)
+ .to receive(:bulk_perform_async)
+ .with([[:arg], [:arg], [:arg]])
+
+ perform_with_capacity
+ end
+ end
+ end
+
+ describe '#perform' do
+ subject(:perform) { worker.perform(:arg) }
+
+ context 'with capacity' do
+ before do
+ allow(worker).to receive(:max_running_jobs).and_return(10)
+ allow(worker).to receive(:running_jobs_count).and_return(0)
+ allow(worker).to receive(:remaining_work_count).and_return(0)
+ end
+
+ it 'calls perform_work' do
+ expect(worker).to receive(:perform_work).with(:arg)
+
+ perform
+ end
+
+ it 're-enqueues itself' do
+ allow(worker).to receive(:perform_work)
+ expect(worker).to receive(:re_enqueue).with(:arg)
+
+ perform
+ end
+
+ it 'registers itself in the running set' do
+ allow(worker).to receive(:perform_work)
+ expect(job_tracker).to receive(:register).with('my-jid')
+
+ perform
+ end
+
+ it 'removes itself from the running set' do
+ expect(job_tracker).to receive(:remove).with('my-jid')
+
+ allow(worker).to receive(:perform_work)
+
+ perform
+ end
+
+ it 'reports prometheus metrics' do
+ allow(worker).to receive(:perform_work)
+ expect(worker).to receive(:report_prometheus_metrics)
+
+ perform
+ end
+ end
+
+ context 'with capacity and without work' do
+ before do
+ allow(worker).to receive(:max_running_jobs).and_return(10)
+ allow(worker).to receive(:running_jobs_count).and_return(0)
+ allow(worker).to receive(:remaining_work_count).and_return(0)
+ allow(worker).to receive(:perform_work)
+ end
+
+ it 'does not re-enqueue itself' do
+ expect(worker_class).not_to receive(:perform_async)
+
+ perform
+ end
+ end
+
+ context 'without capacity' do
+ before do
+ allow(worker).to receive(:max_running_jobs).and_return(10)
+ allow(worker).to receive(:running_jobs_count).and_return(15)
+ allow(worker).to receive(:remaining_work_count).and_return(10)
+ end
+
+ it 'does not call perform_work' do
+ expect(worker).not_to receive(:perform_work)
+
+ perform
+ end
+
+ it 'does not re-enqueue itself' do
+ expect(worker_class).not_to receive(:perform_async)
+
+ perform
+ end
+
+ it 'does not register in the running set' do
+ expect(job_tracker).not_to receive(:register)
+
+ perform
+ end
+
+ it 'removes itself from the running set' do
+ expect(job_tracker).to receive(:remove).with('my-jid')
+
+ perform
+ end
+
+ it 'reports prometheus metrics' do
+ expect(worker).to receive(:report_prometheus_metrics)
+
+ perform
+ end
+ end
+
+ context 'when perform_work fails' do
+ it 'does not re-enqueue itself' do
+ expect(worker).not_to receive(:re_enqueue)
+
+ expect { perform }.to raise_error(NotImplementedError)
+ end
+
+ it 'removes itself from the running set' do
+ expect(job_tracker).to receive(:remove)
+
+ expect { perform }.to raise_error(NotImplementedError)
+ end
+
+ it 'reports prometheus metrics' do
+ expect(worker).to receive(:report_prometheus_metrics)
+
+ expect { perform }.to raise_error(NotImplementedError)
+ end
+ end
+ end
+
+ describe '#remaining_capacity' do
+ subject(:remaining_capacity) { worker.remaining_capacity }
+
+ before do
+ expect(worker).to receive(:max_running_jobs).and_return(max_capacity)
+ end
+
+ context 'when changing the capacity to a lower value' do
+ let(:max_capacity) { -1 }
+
+ it { expect(remaining_capacity).to eq(0) }
+ end
+
+ context 'when registering new jobs' do
+ let(:max_capacity) { 2 }
+
+ before do
+ job_tracker.register('a-job-id')
+ end
+
+ it { expect(remaining_capacity).to eq(1) }
+ end
+
+ context 'with jobs in the queue' do
+ let(:max_capacity) { 2 }
+
+ before do
+ expect(worker_class).to receive(:queue_size).and_return(1)
+ end
+
+ it { expect(remaining_capacity).to eq(1) }
+ end
+
+ context 'with both running jobs and queued jobs' do
+ let(:max_capacity) { 10 }
+
+ before do
+ expect(worker_class).to receive(:queue_size).and_return(5)
+ expect(worker).to receive(:running_jobs_count).and_return(3)
+ end
+
+ it { expect(remaining_capacity).to eq(2) }
+ end
+ end
+
+ describe '#remove_failed_jobs' do
+ subject(:remove_failed_jobs) { worker.remove_failed_jobs }
+
+ before do
+ job_tracker.register('a-job-id')
+ allow(worker).to receive(:max_running_jobs).and_return(2)
+
+ expect(job_tracker).to receive(:clean_up).and_call_original
+ end
+
+ context 'with failed jobs' do
+ it 'update the available capacity' do
+ expect { remove_failed_jobs }.to change { worker.remaining_capacity }.by(1)
+ end
+ end
+ end
+
+ describe '#report_prometheus_metrics' do
+ subject(:report_prometheus_metrics) { worker.report_prometheus_metrics }
+
+ before do
+ allow(worker).to receive(:running_jobs_count).and_return(5)
+ allow(worker).to receive(:max_running_jobs).and_return(7)
+ allow(worker).to receive(:remaining_work_count).and_return(9)
+ end
+
+ it 'reports number of running jobs' do
+ labels = { worker: 'DummyWorker' }
+
+ report_prometheus_metrics
+
+ expect(Gitlab::Metrics.registry.get(:limited_capacity_worker_running_jobs).get(labels)).to eq(5)
+ expect(Gitlab::Metrics.registry.get(:limited_capacity_worker_max_running_jobs).get(labels)).to eq(7)
+ expect(Gitlab::Metrics.registry.get(:limited_capacity_worker_remaining_work_count).get(labels)).to eq(9)
+ end
+ end
+end
diff --git a/spec/workers/design_management/copy_design_collection_worker_spec.rb b/spec/workers/design_management/copy_design_collection_worker_spec.rb
new file mode 100644
index 00000000000..45bfc47ca7e
--- /dev/null
+++ b/spec/workers/design_management/copy_design_collection_worker_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DesignManagement::CopyDesignCollectionWorker, :clean_gitlab_redis_shared_state do
+ describe '#perform' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:issue) { create(:issue) }
+ let_it_be(:target_issue) { create(:issue) }
+
+ subject { described_class.new.perform(user.id, issue.id, target_issue.id) }
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [user.id, issue.id, target_issue.id] }
+
+ specify { subject }
+ end
+
+ it 'calls DesignManagement::CopyDesignCollection::CopyService' do
+ expect_next_instance_of(DesignManagement::CopyDesignCollection::CopyService) do |service|
+ expect(service).to receive(:execute).and_return(ServiceResponse.success)
+ end
+
+ subject
+ end
+
+ it 'logs if there was an error calling the service' do
+ message = 'Error message'
+
+ allow_next_instance_of(DesignManagement::CopyDesignCollection::CopyService) do |service|
+ allow(service).to receive(:execute).and_return(ServiceResponse.error(message: message))
+ end
+
+ expect(Gitlab::AppLogger).to receive(:warn).with(message)
+
+ subject
+ end
+ end
+end
diff --git a/spec/workers/design_management/new_version_worker_spec.rb b/spec/workers/design_management/new_version_worker_spec.rb
index 4d57c46487e..3320d7a062d 100644
--- a/spec/workers/design_management/new_version_worker_spec.rb
+++ b/spec/workers/design_management/new_version_worker_spec.rb
@@ -36,6 +36,10 @@ RSpec.describe DesignManagement::NewVersionWorker do
expect { worker.perform(version.id) }.to change { Note.system.count }.by(1)
end
+ it 'does not create a system note if skip_system_notes is true' do
+ expect { worker.perform(version.id, true) }.not_to change { Note.system.count }
+ end
+
it 'invokes GenerateImageVersionsService' do
expect_next_instance_of(DesignManagement::GenerateImageVersionsService) do |service|
expect(service).to receive(:execute)
diff --git a/spec/workers/git_garbage_collect_worker_spec.rb b/spec/workers/git_garbage_collect_worker_spec.rb
index 1be6e86b650..018971e288c 100644
--- a/spec/workers/git_garbage_collect_worker_spec.rb
+++ b/spec/workers/git_garbage_collect_worker_spec.rb
@@ -129,46 +129,36 @@ RSpec.describe GitGarbageCollectWorker do
let_it_be(:lfs_reference) { create(:lfs_objects_project, project: project) }
let(:lfs_object) { lfs_reference.lfs_object }
- context 'with cleanup_lfs_during_gc feature flag enabled' do
- before do
- stub_feature_flags(cleanup_lfs_during_gc: true)
+ it 'cleans up unreferenced LFS objects' do
+ expect_next_instance_of(Gitlab::Cleanup::OrphanLfsFileReferences) do |svc|
+ expect(svc.project).to eq(project)
+ expect(svc.dry_run).to be_falsy
+ expect(svc).to receive(:run!).and_call_original
end
- it 'cleans up unreferenced LFS objects' do
- expect_next_instance_of(Gitlab::Cleanup::OrphanLfsFileReferences) do |svc|
- expect(svc.project).to eq(project)
- expect(svc.dry_run).to be_falsy
- expect(svc).to receive(:run!).and_call_original
- end
-
- subject.perform(*params)
-
- expect(project.lfs_objects.reload).not_to include(lfs_object)
- end
+ subject.perform(*params)
- it 'does nothing if the database is read-only' do
- allow(Gitlab::Database).to receive(:read_only?) { true }
+ expect(project.lfs_objects.reload).not_to include(lfs_object)
+ end
- expect_any_instance_of(Gitlab::Cleanup::OrphanLfsFileReferences).not_to receive(:run!)
+ it 'catches and logs exceptions' do
+ expect_any_instance_of(Gitlab::Cleanup::OrphanLfsFileReferences)
+ .to receive(:run!)
+ .and_raise(/Failed/)
- subject.perform(*params)
+ expect(Gitlab::GitLogger).to receive(:warn)
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
- expect(project.lfs_objects.reload).to include(lfs_object)
- end
+ subject.perform(*params)
end
- context 'with cleanup_lfs_during_gc feature flag disabled' do
- before do
- stub_feature_flags(cleanup_lfs_during_gc: false)
- end
-
- it 'does not clean up unreferenced LFS objects' do
- expect_any_instance_of(Gitlab::Cleanup::OrphanLfsFileReferences).not_to receive(:run!)
+ it 'does nothing if the database is read-only' do
+ allow(Gitlab::Database).to receive(:read_only?) { true }
+ expect_any_instance_of(Gitlab::Cleanup::OrphanLfsFileReferences).not_to receive(:run!)
- subject.perform(*params)
+ subject.perform(*params)
- expect(project.lfs_objects.reload).to include(lfs_object)
- end
+ expect(project.lfs_objects.reload).to include(lfs_object)
end
end
end
diff --git a/spec/workers/group_import_worker_spec.rb b/spec/workers/group_import_worker_spec.rb
index fb2d49c21af..3fa24ecd7bc 100644
--- a/spec/workers/group_import_worker_spec.rb
+++ b/spec/workers/group_import_worker_spec.rb
@@ -3,12 +3,14 @@
require 'spec_helper'
RSpec.describe GroupImportWorker do
- let!(:user) { create(:user) }
- let!(:group) { create(:group) }
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
subject { described_class.new }
before do
+ create(:group_import_state, group: group, user: user)
+
allow_next_instance_of(described_class) do |job|
allow(job).to receive(:jid).and_return(SecureRandom.hex(8))
end
@@ -26,44 +28,11 @@ RSpec.describe GroupImportWorker do
subject.perform(user.id, group.id)
end
- context 'when the import state does not exist' do
- it 'creates group import' do
- expect(group.import_state).to be_nil
-
- subject.perform(user.id, group.id)
- import_state = group.reload.import_state
-
- expect(import_state).to be_instance_of(GroupImportState)
- expect(import_state.status_name).to eq(:finished)
- expect(import_state.jid).not_to be_empty
- end
-
- it 'sets the group import status to started' do
- expect_next_instance_of(GroupImportState) do |import|
- expect(import).to receive(:start!).and_call_original
- end
-
- subject.perform(user.id, group.id)
- end
-
- it 'sets the group import status to finished' do
- expect_next_instance_of(GroupImportState) do |import|
- expect(import).to receive(:finish!).and_call_original
- end
+ it 'updates the existing state' do
+ expect { subject.perform(user.id, group.id) }
+ .not_to change { GroupImportState.count }
- subject.perform(user.id, group.id)
- end
- end
-
- context 'when the import state already exists' do
- it 'updates the existing state' do
- existing_state = create(:group_import_state, group: group)
-
- expect { subject.perform(user.id, group.id) }
- .not_to change { GroupImportState.count }
-
- expect(existing_state.reload).to be_finished
- end
+ expect(group.import_state.reload).to be_finished
end
end
@@ -83,11 +52,9 @@ RSpec.describe GroupImportWorker do
end
it 'sets the group import status to failed' do
- expect_next_instance_of(GroupImportState) do |import|
- expect(import).to receive(:fail_op).and_call_original
- end
-
expect { subject.perform(user.id, group.id) }.to raise_exception(Gitlab::ImportExport::Error)
+
+ expect(group.import_state.reload.status).to eq(-1)
end
end
end
diff --git a/spec/workers/incident_management/add_severity_system_note_worker_spec.rb b/spec/workers/incident_management/add_severity_system_note_worker_spec.rb
new file mode 100644
index 00000000000..203c62ffe6f
--- /dev/null
+++ b/spec/workers/incident_management/add_severity_system_note_worker_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IncidentManagement::AddSeveritySystemNoteWorker do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:incident) { create(:incident, project: project) }
+ let_it_be(:issuable_severity) { create(:issuable_severity, issue: incident, severity: :medium) }
+
+ describe '#perform' do
+ let(:incident_id) { incident.id }
+ let(:user_id) { user.id }
+
+ subject(:perform) { described_class.new.perform(incident_id, user_id) }
+
+ shared_examples 'does not add a system note' do
+ it 'does not change incident notes count' do
+ expect { perform }.not_to change { incident.notes.count }
+ end
+ end
+
+ context 'when incident and user exist' do
+ it 'creates a system note' do
+ expect { perform }.to change { incident.notes.where(author: user).count }.by(1)
+ end
+ end
+
+ context 'when incident does not exist' do
+ let(:incident_id) { -1 }
+
+ it_behaves_like 'does not add a system note'
+ end
+
+ context 'when incident_id is nil' do
+ let(:incident_id) { nil }
+
+ it_behaves_like 'does not add a system note'
+ end
+
+ context 'when issue is not an incident' do
+ let_it_be(:issue) { create(:issue, project: project) }
+ let(:incident_id) { issue.id }
+
+ it_behaves_like 'does not add a system note'
+ end
+
+ context 'when user does not exist' do
+ let(:user_id) { -1 }
+
+ it_behaves_like 'does not add a system note'
+ end
+
+ context 'when user_id is nil' do
+ let(:user_id) { nil }
+
+ it_behaves_like 'does not add a system note'
+ end
+ end
+end
diff --git a/spec/workers/incident_management/process_alert_worker_spec.rb b/spec/workers/incident_management/process_alert_worker_spec.rb
index 20ab283b49b..41d4f31da24 100644
--- a/spec/workers/incident_management/process_alert_worker_spec.rb
+++ b/spec/workers/incident_management/process_alert_worker_spec.rb
@@ -9,7 +9,6 @@ RSpec.describe IncidentManagement::ProcessAlertWorker do
describe '#perform' do
let_it_be(:started_at) { Time.now.rfc3339 }
let_it_be(:payload) { { 'title' => 'title', 'start_time' => started_at } }
- let_it_be(:parsed_payload) { Gitlab::Alerting::NotificationPayloadParser.call(payload, project) }
let_it_be(:alert) { create(:alert_management_alert, project: project, payload: payload, started_at: started_at) }
let(:created_issue) { Issue.last! }
@@ -68,7 +67,6 @@ RSpec.describe IncidentManagement::ProcessAlertWorker do
context 'prometheus alert' do
let_it_be(:alert) { create(:alert_management_alert, :prometheus, project: project, started_at: started_at) }
- let_it_be(:parsed_payload) { alert.payload }
it_behaves_like 'creates issue successfully'
end
diff --git a/spec/workers/incident_management/process_prometheus_alert_worker_spec.rb b/spec/workers/incident_management/process_prometheus_alert_worker_spec.rb
index c294892a66f..2ca4193aa72 100644
--- a/spec/workers/incident_management/process_prometheus_alert_worker_spec.rb
+++ b/spec/workers/incident_management/process_prometheus_alert_worker_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe IncidentManagement::ProcessPrometheusAlertWorker do
describe '#perform' do
let_it_be(:project) { create(:project) }
let_it_be(:prometheus_alert) { create(:prometheus_alert, project: project) }
- let(:payload_key) { Gitlab::Alerting::Alert.new(project: project, payload: alert_params).gitlab_fingerprint }
+ let(:payload_key) { Gitlab::AlertManagement::Payload::Prometheus.new(project: project, payload: alert_params).gitlab_fingerprint }
let!(:prometheus_alert_event) { create(:prometheus_alert_event, prometheus_alert: prometheus_alert, payload_key: payload_key) }
let!(:settings) { create(:project_incident_management_setting, project: project, create_issue: true) }
diff --git a/spec/workers/member_invitation_reminder_emails_worker_spec.rb b/spec/workers/member_invitation_reminder_emails_worker_spec.rb
new file mode 100644
index 00000000000..64d096f84f5
--- /dev/null
+++ b/spec/workers/member_invitation_reminder_emails_worker_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MemberInvitationReminderEmailsWorker do
+ describe '#perform' do
+ subject { described_class.new.perform }
+
+ context 'feature flag disabled' do
+ before do
+ stub_experiment(invitation_reminders: false)
+ end
+
+ it 'does not raise an error' do
+ expect { subject }.not_to raise_error
+ end
+ end
+
+ context 'feature flag enabled' do
+ before do
+ stub_experiment(invitation_reminders: true)
+ end
+
+ it 'does not raise an error' do
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/workers/metrics/dashboard/sync_dashboards_worker_spec.rb b/spec/workers/metrics/dashboard/sync_dashboards_worker_spec.rb
new file mode 100644
index 00000000000..19b79835825
--- /dev/null
+++ b/spec/workers/metrics/dashboard/sync_dashboards_worker_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Metrics::Dashboard::SyncDashboardsWorker do
+ include MetricsDashboardHelpers
+ subject(:worker) { described_class.new }
+
+ let(:project) { project_with_dashboard(dashboard_path) }
+ let(:dashboard_path) { '.gitlab/dashboards/test.yml' }
+
+ describe ".perform" do
+ it 'imports metrics' do
+ expect { worker.perform(project.id) }.to change(PrometheusMetric, :count).by(3)
+ end
+
+ it 'is idempotent' do
+ 2.times do
+ worker.perform(project.id)
+ end
+
+ expect(PrometheusMetric.count).to eq(3)
+ end
+ end
+end
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index 50d164d1705..cb626b08456 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -290,7 +290,7 @@ RSpec.describe PostReceive do
# MySQL drops milliseconds in the timestamps, so advance at least
# a second to ensure we see changes.
- Timecop.freeze(1.second.from_now) do
+ travel_to(1.second.from_now) do
expect do
perform
project.reload
diff --git a/spec/workers/propagate_integration_group_worker_spec.rb b/spec/workers/propagate_integration_group_worker_spec.rb
new file mode 100644
index 00000000000..b3c9255db57
--- /dev/null
+++ b/spec/workers/propagate_integration_group_worker_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe PropagateIntegrationGroupWorker do
+ describe '#perform' do
+ let_it_be(:group1) { create(:group) }
+ let_it_be(:group2) { create(:group) }
+ let_it_be(:integration) { create(:redmine_service, :instance) }
+
+ before do
+ allow(BulkCreateIntegrationService).to receive(:new)
+ .with(integration, match_array([group1, group2]), 'group')
+ .and_return(double(execute: nil))
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [integration.id, group1.id, group2.id] }
+
+ it 'calls to BulkCreateIntegrationService' do
+ expect(BulkCreateIntegrationService).to receive(:new)
+ .with(integration, match_array([group1, group2]), 'group')
+ .and_return(double(execute: nil))
+
+ subject
+ end
+ end
+
+ context 'with an invalid integration id' do
+ it 'returns without failure' do
+ expect(BulkCreateIntegrationService).not_to receive(:new)
+
+ subject.perform(0, group1.id, group2.id)
+ end
+ end
+ end
+end
diff --git a/spec/workers/propagate_integration_inherit_worker_spec.rb b/spec/workers/propagate_integration_inherit_worker_spec.rb
new file mode 100644
index 00000000000..88946b9926e
--- /dev/null
+++ b/spec/workers/propagate_integration_inherit_worker_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe PropagateIntegrationInheritWorker do
+ describe '#perform' do
+ let_it_be(:integration) { create(:redmine_service, :instance) }
+ let_it_be(:integration1) { create(:redmine_service, inherit_from_id: integration.id) }
+ let_it_be(:integration2) { create(:bugzilla_service, inherit_from_id: integration.id) }
+ let_it_be(:integration3) { create(:redmine_service) }
+
+ before do
+ allow(BulkUpdateIntegrationService).to receive(:new)
+ .with(integration, match_array(integration1))
+ .and_return(double(execute: nil))
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [integration.id, integration1.id, integration3.id] }
+
+ it 'calls to BulkCreateIntegrationService' do
+ expect(BulkUpdateIntegrationService).to receive(:new)
+ .with(integration, match_array(integration1))
+ .and_return(double(execute: nil))
+
+ subject
+ end
+ end
+
+ context 'with an invalid integration id' do
+ it 'returns without failure' do
+ expect(BulkUpdateIntegrationService).not_to receive(:new)
+
+ subject.perform(0, integration1.id, integration3.id)
+ end
+ end
+ end
+end
diff --git a/spec/workers/propagate_integration_project_worker_spec.rb b/spec/workers/propagate_integration_project_worker_spec.rb
new file mode 100644
index 00000000000..3742ce5fde8
--- /dev/null
+++ b/spec/workers/propagate_integration_project_worker_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe PropagateIntegrationProjectWorker do
+ describe '#perform' do
+ let_it_be(:project1) { create(:project) }
+ let_it_be(:project2) { create(:project) }
+ let_it_be(:integration) { create(:redmine_service, :instance) }
+
+ before do
+ allow(BulkCreateIntegrationService).to receive(:new)
+ .with(integration, match_array([project1, project2]), 'project')
+ .and_return(double(execute: nil))
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [integration.id, project1.id, project2.id] }
+
+ it 'calls to BulkCreateIntegrationService' do
+ expect(BulkCreateIntegrationService).to receive(:new)
+ .with(integration, match_array([project1, project2]), 'project')
+ .and_return(double(execute: nil))
+
+ subject
+ end
+ end
+
+ context 'with an invalid integration id' do
+ it 'returns without failure' do
+ expect(BulkCreateIntegrationService).not_to receive(:new)
+
+ subject.perform(0, project1.id, project2.id)
+ end
+ end
+ end
+end