summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-08-20 18:42:06 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-08-20 18:42:06 +0000
commit6e4e1050d9dba2b7b2523fdd1768823ab85feef4 (patch)
tree78be5963ec075d80116a932011d695dd33910b4e /spec
parent1ce776de4ae122aba3f349c02c17cebeaa8ecf07 (diff)
downloadgitlab-ce-6e4e1050d9dba2b7b2523fdd1768823ab85feef4.tar.gz
Add latest changes from gitlab-org/gitlab@13-3-stable-ee
Diffstat (limited to 'spec')
-rw-r--r--spec/bin/feature_flag_spec.rb91
-rw-r--r--spec/config/mail_room_spec.rb2
-rw-r--r--spec/config/object_store_settings_spec.rb1
-rw-r--r--spec/controllers/admin/application_settings_controller_spec.rb31
-rw-r--r--spec/controllers/admin/integrations_controller_spec.rb10
-rw-r--r--spec/controllers/application_controller_spec.rb16
-rw-r--r--spec/controllers/boards/issues_controller_spec.rb8
-rw-r--r--spec/controllers/concerns/checks_collaboration_spec.rb2
-rw-r--r--spec/controllers/concerns/graceful_timeout_handling_spec.rb42
-rw-r--r--spec/controllers/concerns/metrics_dashboard_spec.rb5
-rw-r--r--spec/controllers/concerns/send_file_upload_spec.rb93
-rw-r--r--spec/controllers/dashboard/todos_controller_spec.rb9
-rw-r--r--spec/controllers/explore/projects_controller_spec.rb29
-rw-r--r--spec/controllers/groups/milestones_controller_spec.rb1
-rw-r--r--spec/controllers/groups/releases_controller_spec.rb75
-rw-r--r--spec/controllers/groups/settings/repository_controller_spec.rb3
-rw-r--r--spec/controllers/groups/shared_projects_controller_spec.rb6
-rw-r--r--spec/controllers/help_controller_spec.rb9
-rw-r--r--spec/controllers/import/available_namespaces_controller_spec.rb41
-rw-r--r--spec/controllers/import/bitbucket_server_controller_spec.rb6
-rw-r--r--spec/controllers/import/gitea_controller_spec.rb8
-rw-r--r--spec/controllers/import/github_controller_spec.rb98
-rw-r--r--spec/controllers/import/gitlab_controller_spec.rb1
-rw-r--r--spec/controllers/import/manifest_controller_spec.rb57
-rw-r--r--spec/controllers/invites_controller_spec.rb21
-rw-r--r--spec/controllers/omniauth_callbacks_controller_spec.rb17
-rw-r--r--spec/controllers/projects/ci/daily_build_group_report_results_controller_spec.rb4
-rw-r--r--spec/controllers/projects/ci/lints_controller_spec.rb73
-rw-r--r--spec/controllers/projects/cycle_analytics/events_controller_spec.rb2
-rw-r--r--spec/controllers/projects/cycle_analytics_controller_spec.rb2
-rw-r--r--spec/controllers/projects/environments_controller_spec.rb36
-rw-r--r--spec/controllers/projects/forks_controller_spec.rb19
-rw-r--r--spec/controllers/projects/hooks_controller_spec.rb3
-rw-r--r--spec/controllers/projects/incidents_controller_spec.rb46
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb70
-rw-r--r--spec/controllers/projects/logs_controller_spec.rb26
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb4
-rw-r--r--spec/controllers/projects/notes_controller_spec.rb1
-rw-r--r--spec/controllers/projects/pipelines/tests_controller_spec.rb32
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb224
-rw-r--r--spec/controllers/projects/product_analytics_controller_spec.rb95
-rw-r--r--spec/controllers/projects/prometheus/alerts_controller_spec.rb18
-rw-r--r--spec/controllers/projects/protected_branches_controller_spec.rb1
-rw-r--r--spec/controllers/projects/serverless/functions_controller_spec.rb1
-rw-r--r--spec/controllers/projects/services_controller_spec.rb2
-rw-r--r--spec/controllers/projects/settings/operations_controller_spec.rb4
-rw-r--r--spec/controllers/projects/settings/repository_controller_spec.rb3
-rw-r--r--spec/controllers/projects/snippets_controller_spec.rb27
-rw-r--r--spec/controllers/projects/tags_controller_spec.rb10
-rw-r--r--spec/controllers/projects/variables_controller_spec.rb1
-rw-r--r--spec/controllers/registrations/experience_levels_controller_spec.rb1
-rw-r--r--spec/controllers/registrations_controller_spec.rb155
-rw-r--r--spec/controllers/repositories/git_http_controller_spec.rb1
-rw-r--r--spec/controllers/repositories/lfs_storage_controller_spec.rb160
-rw-r--r--spec/controllers/root_controller_spec.rb24
-rw-r--r--spec/controllers/search_controller_spec.rb19
-rw-r--r--spec/controllers/sent_notifications_controller_spec.rb1
-rw-r--r--spec/controllers/sessions_controller_spec.rb39
-rw-r--r--spec/db/schema_spec.rb5
-rw-r--r--spec/factories/alert_management/alerts.rb2
-rw-r--r--spec/factories/audit_events.rb78
-rw-r--r--spec/factories/boards.rb2
-rw-r--r--spec/factories/ci/pipeline_artifacts.rb17
-rw-r--r--spec/factories/ci/pipelines.rb2
-rw-r--r--spec/factories/clusters/agent_tokens.rb9
-rw-r--r--spec/factories/clusters/agents.rb9
-rw-r--r--spec/factories/design_management/designs.rb8
-rw-r--r--spec/factories/design_management/versions.rb4
-rw-r--r--spec/factories/emails.rb2
-rw-r--r--spec/factories/experiments.rb7
-rw-r--r--spec/factories/group_deploy_keys_groups.rb9
-rw-r--r--spec/factories/issues.rb7
-rw-r--r--spec/factories/iterations.rb8
-rw-r--r--spec/factories/labels.rb2
-rw-r--r--spec/factories/merge_requests.rb17
-rw-r--r--spec/factories/namespace_settings.rb7
-rw-r--r--spec/factories/namespaces.rb4
-rw-r--r--spec/factories/packages.rb2
-rw-r--r--spec/factories/plans.rb2
-rw-r--r--spec/factories/project_hooks.rb1
-rw-r--r--spec/factories/project_repository_storage_moves.rb4
-rw-r--r--spec/factories/projects.rb8
-rw-r--r--spec/factories/prometheus_alert.rb4
-rw-r--r--spec/factories/protected_branches/merge_access_levels.rb8
-rw-r--r--spec/factories/protected_branches/push_access_levels.rb8
-rw-r--r--spec/factories/raw_usage_data.rb8
-rw-r--r--spec/factories/resource_iteration_event.rb11
-rw-r--r--spec/factories/services.rb8
-rw-r--r--spec/factories/usage_data.rb8
-rw-r--r--spec/factories/wiki_pages.rb3
-rw-r--r--spec/features/admin/admin_mode/login_spec.rb1
-rw-r--r--spec/features/admin/admin_projects_spec.rb2
-rw-r--r--spec/features/admin/admin_settings_spec.rb3
-rw-r--r--spec/features/atom/users_spec.rb3
-rw-r--r--spec/features/boards/issue_ordering_spec.rb2
-rw-r--r--spec/features/calendar_spec.rb2
-rw-r--r--spec/features/clusters/cluster_detail_page_spec.rb11
-rw-r--r--spec/features/clusters/installing_applications_shared_examples.rb60
-rw-r--r--spec/features/commits_spec.rb2
-rw-r--r--spec/features/explore/groups_spec.rb4
-rw-r--r--spec/features/global_search_spec.rb2
-rw-r--r--spec/features/groups/empty_states_spec.rb22
-rw-r--r--spec/features/groups/group_settings_spec.rb2
-rw-r--r--spec/features/groups/issues_spec.rb46
-rw-r--r--spec/features/groups/members/manage_groups_spec.rb16
-rw-r--r--spec/features/groups/members/manage_members_spec.rb6
-rw-r--r--spec/features/groups/members/master_manages_access_requests_spec.rb1
-rw-r--r--spec/features/groups/members/search_members_spec.rb2
-rw-r--r--spec/features/groups/members/tabs_spec.rb112
-rw-r--r--spec/features/groups/navbar_spec.rb11
-rw-r--r--spec/features/groups/packages_spec.rb80
-rw-r--r--spec/features/groups_spec.rb2
-rw-r--r--spec/features/import/manifest_import_spec.rb8
-rw-r--r--spec/features/invites_spec.rb18
-rw-r--r--spec/features/issuables/issuable_list_spec.rb4
-rw-r--r--spec/features/issues/bulk_assignment_labels_spec.rb3
-rw-r--r--spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb23
-rw-r--r--spec/features/issues/filtered_search/recent_searches_spec.rb21
-rw-r--r--spec/features/issues/filtered_search/visual_tokens_spec.rb4
-rw-r--r--spec/features/issues/gfm_autocomplete_spec.rb198
-rw-r--r--spec/features/issues/service_desk_spec.rb6
-rw-r--r--spec/features/issues/update_issues_spec.rb7
-rw-r--r--spec/features/issues/user_filters_issues_spec.rb4
-rw-r--r--spec/features/issues/user_sees_empty_state_spec.rb6
-rw-r--r--spec/features/issues/user_sorts_issues_spec.rb16
-rw-r--r--spec/features/issues/user_views_issues_spec.rb28
-rw-r--r--spec/features/markdown/copy_as_gfm_spec.rb2
-rw-r--r--spec/features/markdown/metrics_spec.rb12
-rw-r--r--spec/features/merge_request/user_approves_spec.rb2
-rw-r--r--spec/features/merge_request/user_comments_on_diff_spec.rb1
-rw-r--r--spec/features/merge_request/user_customizes_merge_commit_message_spec.rb2
-rw-r--r--spec/features/merge_request/user_edits_merge_request_spec.rb23
-rw-r--r--spec/features/merge_request/user_jumps_to_discussion_spec.rb29
-rw-r--r--spec/features/merge_request/user_merges_immediately_spec.rb1
-rw-r--r--spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb6
-rw-r--r--spec/features/merge_request/user_posts_diff_notes_spec.rb2
-rw-r--r--spec/features/merge_request/user_posts_notes_spec.rb30
-rw-r--r--spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb2
-rw-r--r--spec/features/merge_request/user_resolves_wip_mr_spec.rb7
-rw-r--r--spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb1
-rw-r--r--spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_closing_issues_message_spec.rb1
-rw-r--r--spec/features/merge_request/user_sees_discussions_spec.rb5
-rw-r--r--spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb51
-rw-r--r--spec/features/merge_request/user_sees_merge_widget_spec.rb20
-rw-r--r--spec/features/merge_request/user_sees_pipelines_from_forked_project_spec.rb1
-rw-r--r--spec/features/merge_request/user_sees_pipelines_spec.rb25
-rw-r--r--spec/features/merge_request/user_sees_versions_spec.rb9
-rw-r--r--spec/features/merge_request/user_sees_wip_help_message_spec.rb18
-rw-r--r--spec/features/merge_request/user_suggests_changes_on_diff_spec.rb1
-rw-r--r--spec/features/merge_request/user_tries_to_access_private_project_info_through_new_mr_spec.rb1
-rw-r--r--spec/features/merge_request/user_views_auto_expanding_diff_spec.rb35
-rw-r--r--spec/features/merge_request/user_views_diffs_file_by_file_spec.rb1
-rw-r--r--spec/features/merge_request/user_views_diffs_spec.rb1
-rw-r--r--spec/features/merge_requests/user_mass_updates_spec.rb11
-rw-r--r--spec/features/merge_requests/user_views_diffs_commit_spec.rb20
-rw-r--r--spec/features/populate_new_pipeline_vars_with_params_spec.rb1
-rw-r--r--spec/features/profiles/chat_names_spec.rb1
-rw-r--r--spec/features/profiles/password_spec.rb2
-rw-r--r--spec/features/profiles/personal_access_tokens_spec.rb7
-rw-r--r--spec/features/profiles/user_edit_preferences_spec.rb2
-rw-r--r--spec/features/profiles/user_visits_profile_preferences_page_spec.rb4
-rw-r--r--spec/features/projects/badges/coverage_spec.rb2
-rw-r--r--spec/features/projects/blobs/blob_line_permalink_updater_spec.rb4
-rw-r--r--spec/features/projects/ci/lint_spec.rb50
-rw-r--r--spec/features/projects/classification_label_on_project_pages_spec.rb1
-rw-r--r--spec/features/projects/clusters/gcp_spec.rb3
-rw-r--r--spec/features/projects/clusters/user_spec.rb1
-rw-r--r--spec/features/projects/commit/diff_notes_spec.rb39
-rw-r--r--spec/features/projects/commit/mini_pipeline_graph_spec.rb3
-rw-r--r--spec/features/projects/commit/user_views_user_status_on_commit_spec.rb2
-rw-r--r--spec/features/projects/commits/user_browses_commits_spec.rb2
-rw-r--r--spec/features/projects/environments/environment_metrics_spec.rb5
-rw-r--r--spec/features/projects/environments/environments_spec.rb2
-rw-r--r--spec/features/projects/features_visibility_spec.rb2
-rw-r--r--spec/features/projects/files/user_browses_files_spec.rb1
-rw-r--r--spec/features/projects/files/user_creates_directory_spec.rb1
-rw-r--r--spec/features/projects/files/user_creates_files_spec.rb1
-rw-r--r--spec/features/projects/files/user_deletes_files_spec.rb1
-rw-r--r--spec/features/projects/files/user_replaces_files_spec.rb1
-rw-r--r--spec/features/projects/fork_spec.rb30
-rw-r--r--spec/features/projects/import_export/export_file_spec.rb1
-rw-r--r--spec/features/projects/issues/design_management/user_uploads_designs_spec.rb28
-rw-r--r--spec/features/projects/issues/viewing_issues_with_external_authorization_enabled_spec.rb6
-rw-r--r--spec/features/projects/jobs/user_browses_job_spec.rb2
-rw-r--r--spec/features/projects/jobs_spec.rb28
-rw-r--r--spec/features/projects/members/list_spec.rb2
-rw-r--r--spec/features/projects/members/master_manages_access_requests_spec.rb1
-rw-r--r--spec/features/projects/navbar_spec.rb13
-rw-r--r--spec/features/projects/package_files_spec.rb55
-rw-r--r--spec/features/projects/packages_spec.rb67
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb72
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb2
-rw-r--r--spec/features/projects/product_analytics/events_spec.rb29
-rw-r--r--spec/features/projects/product_analytics/graphs_spec.rb25
-rw-r--r--spec/features/projects/product_analytics/setup_spec.rb19
-rw-r--r--spec/features/projects/product_analytics/test_spec.rb27
-rw-r--r--spec/features/projects/settings/operations_settings_spec.rb5
-rw-r--r--spec/features/projects/settings/packages_settings_spec.rb38
-rw-r--r--spec/features/projects/show/user_manages_notifications_spec.rb21
-rw-r--r--spec/features/projects/snippets/create_snippet_spec.rb169
-rw-r--r--spec/features/projects/snippets/show_spec.rb156
-rw-r--r--spec/features/projects/wiki/user_updates_wiki_page_spec.rb26
-rw-r--r--spec/features/projects/wiki/user_views_wiki_pages_spec.rb2
-rw-r--r--spec/features/projects_spec.rb10
-rw-r--r--spec/features/registrations/experience_level_spec.rb45
-rw-r--r--spec/features/runners_spec.rb14
-rw-r--r--spec/features/search/user_uses_header_search_field_spec.rb76
-rw-r--r--spec/features/security/project/internal_access_spec.rb12
-rw-r--r--spec/features/security/project/public_access_spec.rb24
-rw-r--r--spec/features/security/project/snippet/internal_access_spec.rb7
-rw-r--r--spec/features/snippets/embedded_snippet_spec.rb22
-rw-r--r--spec/features/snippets/notes_on_personal_snippets_spec.rb11
-rw-r--r--spec/features/snippets/show_spec.rb167
-rw-r--r--spec/features/snippets/user_creates_snippet_spec.rb91
-rw-r--r--spec/features/snippets/user_edits_snippet_spec.rb8
-rw-r--r--spec/features/users/signup_spec.rb25
-rw-r--r--spec/finders/admin/projects_finder_spec.rb4
-rw-r--r--spec/finders/alert_management/alerts_finder_spec.rb42
-rw-r--r--spec/finders/autocomplete/move_to_project_finder_spec.rb10
-rw-r--r--spec/finders/ci/daily_build_group_report_results_finder_spec.rb2
-rw-r--r--spec/finders/ci/pipelines_for_merge_request_finder_spec.rb2
-rw-r--r--spec/finders/design_management/designs_finder_spec.rb30
-rw-r--r--spec/finders/fork_targets_finder_spec.rb4
-rw-r--r--spec/finders/group_descendants_finder_spec.rb2
-rw-r--r--spec/finders/group_projects_finder_spec.rb6
-rw-r--r--spec/finders/issues_finder_spec.rb58
-rw-r--r--spec/finders/joined_groups_finder_spec.rb2
-rw-r--r--spec/finders/members_finder_spec.rb25
-rw-r--r--spec/finders/merge_requests_finder_spec.rb102
-rw-r--r--spec/finders/milestones_finder_spec.rb8
-rw-r--r--spec/finders/personal_access_tokens_finder_spec.rb42
-rw-r--r--spec/finders/personal_projects_finder_spec.rb2
-rw-r--r--spec/finders/projects_finder_spec.rb8
-rw-r--r--spec/finders/releases_finder_spec.rb195
-rw-r--r--spec/finders/template_finder_spec.rb4
-rw-r--r--spec/finders/todos_finder_spec.rb54
-rw-r--r--spec/finders/uploader_finder_spec.rb4
-rw-r--r--spec/fixtures/api/schemas/environment.json1
-rw-r--r--spec/fixtures/api/schemas/job/artifact.json3
-rw-r--r--spec/fixtures/api/schemas/release/link.json1
-rw-r--r--spec/fixtures/gitlab/database/structure_example.sql16
-rw-r--r--spec/fixtures/gitlab/database/structure_example_cleaned.sql16
-rw-r--r--spec/fixtures/helm/helm_list_v2_empty_blob.json.gzbin0 -> 81 bytes
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/dashboard_groups_missing_panels_and_group.yml33
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/dashboard_is_an_array.yml15
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/dashboard_missing_panel_groups.yml32
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/dashboard_panel_is_missing_metrics.yml15
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/dashboard_panle_groups_wrong_content_type.yml33
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/duplicate_id_dashboard.yml67
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/invalid_dashboard.yml67
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/sample_dashboard.yml35
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json4
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panel_groups.json1
-rw-r--r--spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json1
-rw-r--r--spec/fixtures/pipeline_artifacts/code_coverage.json9
-rw-r--r--spec/fixtures/potential_html.po28
-rw-r--r--spec/fixtures/product_analytics/event.json7
-rw-r--r--spec/fixtures/valid.po3
-rw-r--r--spec/frontend/__mocks__/@toast-ui/vue-editor/index.js11
-rw-r--r--spec/frontend/__mocks__/monaco-editor/index.js4
-rw-r--r--spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap50
-rw-r--r--spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js174
-rw-r--r--spec/frontend/add_context_commits_modal/components/review_tab_container_spec.js51
-rw-r--r--spec/frontend/add_context_commits_modal/store/actions_spec.js239
-rw-r--r--spec/frontend/add_context_commits_modal/store/mutations_spec.js156
-rw-r--r--spec/frontend/alert_management/components/alert_details_spec.js (renamed from spec/frontend/alert_management/components/alert_management_detail_spec.js)59
-rw-r--r--spec/frontend/alert_management/components/alert_management_empty_state_spec.js1
-rw-r--r--spec/frontend/alert_management/components/alert_management_list_wrapper_spec.js1
-rw-r--r--spec/frontend/alert_management/components/alert_management_sidebar_todo_spec.js57
-rw-r--r--spec/frontend/alert_management/components/alert_management_table_spec.js96
-rw-r--r--spec/frontend/alert_management/components/alert_metrics_spec.js2
-rw-r--r--spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js4
-rw-r--r--spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js6
-rw-r--r--spec/frontend/alert_management/mocks/alerts.json9
-rw-r--r--spec/frontend/alert_settings/__snapshots__/alert_settings_form_spec.js.snap16
-rw-r--r--spec/frontend/alert_settings/alert_settings_form_spec.js121
-rw-r--r--spec/frontend/alerts_service_settings/components/alerts_service_form_spec.js2
-rw-r--r--spec/frontend/analytics/components/activity_chart_spec.js39
-rw-r--r--spec/frontend/api_spec.js441
-rw-r--r--spec/frontend/awards_handler_spec.js4
-rw-r--r--spec/frontend/badges/components/badge_form_spec.js6
-rw-r--r--spec/frontend/batch_comments/components/draft_note_spec.js52
-rw-r--r--spec/frontend/batch_comments/components/drafts_count_spec.js4
-rw-r--r--spec/frontend/batch_comments/components/preview_item_spec.js2
-rw-r--r--spec/frontend/batch_comments/components/publish_button_spec.js2
-rw-r--r--spec/frontend/batch_comments/components/publish_dropdown_spec.js2
-rw-r--r--spec/frontend/batch_comments/mock_data.js3
-rw-r--r--spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js2
-rw-r--r--spec/frontend/behaviors/copy_as_gfm_spec.js10
-rw-r--r--spec/frontend/behaviors/gl_emoji_spec.js2
-rw-r--r--spec/frontend/blob/components/__snapshots__/blob_edit_content_spec.js.snap10
-rw-r--r--spec/frontend/blob/components/__snapshots__/blob_edit_header_spec.js.snap21
-rw-r--r--spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap2
-rw-r--r--spec/frontend/blob/components/blob_content_error_spec.js2
-rw-r--r--spec/frontend/blob/components/blob_content_spec.js2
-rw-r--r--spec/frontend/blob/components/blob_edit_content_spec.js46
-rw-r--r--spec/frontend/blob/components/blob_edit_header_spec.js44
-rw-r--r--spec/frontend/blob/components/blob_embeddable_spec.js2
-rw-r--r--spec/frontend/blob/components/blob_header_default_actions_spec.js6
-rw-r--r--spec/frontend/blob/components/blob_header_viewer_switcher_spec.js2
-rw-r--r--spec/frontend/blob/components/mock_data.js2
-rw-r--r--spec/frontend/blob/notebook/notebook_viever_spec.js2
-rw-r--r--spec/frontend/blob/pipeline_tour_success_modal_spec.js9
-rw-r--r--spec/frontend/blob/suggest_gitlab_ci_yml/components/popover_spec.js6
-rw-r--r--spec/frontend/blob/utils_spec.js51
-rw-r--r--spec/frontend/blob/viewer/index_spec.js4
-rw-r--r--spec/frontend/blob_edit/blob_bundle_spec.js2
-rw-r--r--spec/frontend/boards/board_card_spec.js2
-rw-r--r--spec/frontend/boards/components/board_column_spec.js5
-rw-r--r--spec/frontend/boards/components/board_form_spec.js2
-rw-r--r--spec/frontend/boards/components/board_list_header_spec.js5
-rw-r--r--spec/frontend/boards/components/board_settings_sidebar_spec.js159
-rw-r--r--spec/frontend/boards/components/boards_selector_spec.js4
-rw-r--r--spec/frontend/boards/components/sidebar/remove_issue_spec.js28
-rw-r--r--spec/frontend/boards/issue_card_spec.js2
-rw-r--r--spec/frontend/boards/issue_spec.js25
-rw-r--r--spec/frontend/boards/list_spec.js2
-rw-r--r--spec/frontend/boards/mock_data.js23
-rw-r--r--spec/frontend/boards/stores/actions_spec.js32
-rw-r--r--spec/frontend/boards/stores/mutations_spec.js47
-rw-r--r--spec/frontend/branches/components/__snapshots__/divergence_graph_spec.js.snap2
-rw-r--r--spec/frontend/ci_variable_list/components/ci_enviroments_dropdown_spec.js8
-rw-r--r--spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js26
-rw-r--r--spec/frontend/ci_variable_list/components/ci_variable_popover_spec.js4
-rw-r--r--spec/frontend/ci_variable_list/store/actions_spec.js4
-rw-r--r--spec/frontend/clusters/clusters_bundle_spec.js72
-rw-r--r--spec/frontend/clusters/components/__snapshots__/applications_spec.js.snap16
-rw-r--r--spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap4
-rw-r--r--spec/frontend/clusters/components/application_row_spec.js15
-rw-r--r--spec/frontend/clusters/components/applications_spec.js66
-rw-r--r--spec/frontend/clusters/components/fluentd_output_settings_spec.js4
-rw-r--r--spec/frontend/clusters/components/ingress_modsecurity_settings_spec.js4
-rw-r--r--spec/frontend/clusters/components/knative_domain_editor_spec.js4
-rw-r--r--spec/frontend/clusters/forms/components/integration_form_spec.js112
-rw-r--r--spec/frontend/clusters/services/application_state_machine_spec.js24
-rw-r--r--spec/frontend/clusters/services/crossplane_provider_stack_spec.js4
-rw-r--r--spec/frontend/clusters/services/mock_data.js10
-rw-r--r--spec/frontend/clusters/stores/clusters_store_spec.js23
-rw-r--r--spec/frontend/clusters_list/components/ancestor_notice_spec.js4
-rw-r--r--spec/frontend/clusters_list/components/clusters_spec.js8
-rw-r--r--spec/frontend/clusters_list/store/actions_spec.js8
-rw-r--r--spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap2
-rw-r--r--spec/frontend/collapsed_sidebar_todo_spec.js2
-rw-r--r--spec/frontend/commit/commit_pipeline_status_component_spec.js2
-rw-r--r--spec/frontend/commit/pipelines/pipelines_spec.js75
-rw-r--r--spec/frontend/confidential_merge_request/components/dropdown_spec.js4
-rw-r--r--spec/frontend/confirm_modal_spec.js2
-rw-r--r--spec/frontend/contributors/store/actions_spec.js2
-rw-r--r--spec/frontend/create_cluster/eks_cluster/store/actions_spec.js2
-rw-r--r--spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js92
-rw-r--r--spec/frontend/deploy_freeze/components/deploy_freeze_settings_spec.js42
-rw-r--r--spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js70
-rw-r--r--spec/frontend/deploy_freeze/components/timezone_dropdown_spec.js98
-rw-r--r--spec/frontend/deploy_freeze/store/actions_spec.js123
-rw-r--r--spec/frontend/deploy_freeze/store/mutations_spec.js72
-rw-r--r--spec/frontend/design_management/components/__snapshots__/design_note_pin_spec.js.snap14
-rw-r--r--spec/frontend/design_management/components/delete_button_spec.js4
-rw-r--r--spec/frontend/design_management/components/design_notes/design_discussion_spec.js4
-rw-r--r--spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap331
-rw-r--r--spec/frontend/design_management/components/list/item_spec.js51
-rw-r--r--spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap (renamed from spec/frontend/design_management_new/components/toolbar/__snapshots__/pagination_spec.js.snap)24
-rw-r--r--spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap76
-rw-r--r--spec/frontend/design_management/components/toolbar/design_navigation_spec.js (renamed from spec/frontend/design_management_new/components/toolbar/pagination_spec.js)8
-rw-r--r--spec/frontend/design_management/components/toolbar/index_spec.js4
-rw-r--r--spec/frontend/design_management/components/toolbar/pagination_button_spec.js61
-rw-r--r--spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap30
-rw-r--r--spec/frontend/design_management/components/upload/__snapshots__/design_dropzone_spec.js.snap158
-rw-r--r--spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap162
-rw-r--r--spec/frontend/design_management/components/upload/design_dropzone_spec.js25
-rw-r--r--spec/frontend/design_management/components/upload/design_version_dropdown_spec.js16
-rw-r--r--spec/frontend/design_management/components/upload/mock_data/all_versions.js12
-rw-r--r--spec/frontend/design_management/mock_data/all_versions.js6
-rw-r--r--spec/frontend/design_management/mock_data/apollo_mock.js106
-rw-r--r--spec/frontend/design_management/mock_data/design.js12
-rw-r--r--spec/frontend/design_management/mock_data/designs.js6
-rw-r--r--spec/frontend/design_management/mock_data/no_designs.js2
-rw-r--r--spec/frontend/design_management/mock_data/versions_list.js0
-rw-r--r--spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap125
-rw-r--r--spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap16
-rw-r--r--spec/frontend/design_management/pages/design/index_spec.js9
-rw-r--r--spec/frontend/design_management/pages/index_apollo_spec.js162
-rw-r--r--spec/frontend/design_management/pages/index_spec.js196
-rw-r--r--spec/frontend/design_management/router_spec.js16
-rw-r--r--spec/frontend/design_management/utils/cache_update_spec.js2
-rw-r--r--spec/frontend/design_management/utils/design_management_utils_spec.js7
-rw-r--r--spec/frontend/design_management/utils/error_messages_spec.js6
-rw-r--r--spec/frontend/design_management_legacy/components/__snapshots__/design_note_pin_spec.js.snap (renamed from spec/frontend/design_management_new/components/__snapshots__/design_note_pin_spec.js.snap)14
-rw-r--r--spec/frontend/design_management_legacy/components/__snapshots__/design_presentation_spec.js.snap (renamed from spec/frontend/design_management_new/components/__snapshots__/design_presentation_spec.js.snap)0
-rw-r--r--spec/frontend/design_management_legacy/components/__snapshots__/design_scaler_spec.js.snap (renamed from spec/frontend/design_management_new/components/__snapshots__/design_scaler_spec.js.snap)0
-rw-r--r--spec/frontend/design_management_legacy/components/__snapshots__/image_spec.js.snap (renamed from spec/frontend/design_management_new/components/__snapshots__/image_spec.js.snap)0
-rw-r--r--spec/frontend/design_management_legacy/components/delete_button_spec.js (renamed from spec/frontend/design_management_new/components/delete_button_spec.js)6
-rw-r--r--spec/frontend/design_management_legacy/components/design_note_pin_spec.js (renamed from spec/frontend/design_management_new/components/design_note_pin_spec.js)2
-rw-r--r--spec/frontend/design_management_legacy/components/design_notes/__snapshots__/design_note_spec.js.snap (renamed from spec/frontend/design_management_new/components/design_notes/__snapshots__/design_note_spec.js.snap)0
-rw-r--r--spec/frontend/design_management_legacy/components/design_notes/__snapshots__/design_reply_form_spec.js.snap (renamed from spec/frontend/design_management_new/components/design_notes/__snapshots__/design_reply_form_spec.js.snap)0
-rw-r--r--spec/frontend/design_management_legacy/components/design_notes/design_discussion_spec.js (renamed from spec/frontend/design_management_new/components/design_notes/design_discussion_spec.js)16
-rw-r--r--spec/frontend/design_management_legacy/components/design_notes/design_note_spec.js (renamed from spec/frontend/design_management_new/components/design_notes/design_note_spec.js)4
-rw-r--r--spec/frontend/design_management_legacy/components/design_notes/design_reply_form_spec.js (renamed from spec/frontend/design_management_new/components/design_notes/design_reply_form_spec.js)2
-rw-r--r--spec/frontend/design_management_legacy/components/design_notes/toggle_replies_widget_spec.js (renamed from spec/frontend/design_management_new/components/design_notes/toggle_replies_widget_spec.js)2
-rw-r--r--spec/frontend/design_management_legacy/components/design_overlay_spec.js (renamed from spec/frontend/design_management_new/components/design_overlay_spec.js)6
-rw-r--r--spec/frontend/design_management_legacy/components/design_presentation_spec.js (renamed from spec/frontend/design_management_new/components/design_presentation_spec.js)4
-rw-r--r--spec/frontend/design_management_legacy/components/design_scaler_spec.js (renamed from spec/frontend/design_management_new/components/design_scaler_spec.js)2
-rw-r--r--spec/frontend/design_management_legacy/components/design_sidebar_spec.js (renamed from spec/frontend/design_management_new/components/design_sidebar_spec.js)6
-rw-r--r--spec/frontend/design_management_legacy/components/image_spec.js (renamed from spec/frontend/design_management_new/components/image_spec.js)2
-rw-r--r--spec/frontend/design_management_legacy/components/list/__snapshots__/item_spec.js.snap149
-rw-r--r--spec/frontend/design_management_legacy/components/list/item_spec.js (renamed from spec/frontend/design_management_new/components/list/item_spec.js)53
-rw-r--r--spec/frontend/design_management_legacy/components/toolbar/__snapshots__/index_spec.js.snap (renamed from spec/frontend/design_management_new/components/toolbar/__snapshots__/index_spec.js.snap)2
-rw-r--r--spec/frontend/design_management_legacy/components/toolbar/__snapshots__/pagination_button_spec.js.snap (renamed from spec/frontend/design_management/components/toolbar/__snapshots__/pagination_button_spec.js.snap)0
-rw-r--r--spec/frontend/design_management_legacy/components/toolbar/__snapshots__/pagination_spec.js.snap (renamed from spec/frontend/design_management/components/toolbar/__snapshots__/pagination_spec.js.snap)0
-rw-r--r--spec/frontend/design_management_legacy/components/toolbar/index_spec.js (renamed from spec/frontend/design_management_new/components/toolbar/index_spec.js)6
-rw-r--r--spec/frontend/design_management_legacy/components/toolbar/pagination_button_spec.js (renamed from spec/frontend/design_management_new/components/toolbar/pagination_button_spec.js)4
-rw-r--r--spec/frontend/design_management_legacy/components/toolbar/pagination_spec.js (renamed from spec/frontend/design_management/components/toolbar/pagination_spec.js)4
-rw-r--r--spec/frontend/design_management_legacy/components/upload/__snapshots__/button_spec.js.snap (renamed from spec/frontend/design_management_new/components/upload/__snapshots__/button_spec.js.snap)24
-rw-r--r--spec/frontend/design_management_legacy/components/upload/__snapshots__/design_dropzone_spec.js.snap (renamed from spec/frontend/design_management_new/components/upload/__snapshots__/design_dropzone_spec.js.snap)158
-rw-r--r--spec/frontend/design_management_legacy/components/upload/__snapshots__/design_version_dropdown_spec.js.snap (renamed from spec/frontend/design_management_new/components/upload/__snapshots__/design_version_dropdown_spec.js.snap)62
-rw-r--r--spec/frontend/design_management_legacy/components/upload/button_spec.js (renamed from spec/frontend/design_management_new/components/upload/button_spec.js)2
-rw-r--r--spec/frontend/design_management_legacy/components/upload/design_dropzone_spec.js (renamed from spec/frontend/design_management_new/components/upload/design_dropzone_spec.js)25
-rw-r--r--spec/frontend/design_management_legacy/components/upload/design_version_dropdown_spec.js (renamed from spec/frontend/design_management_new/components/upload/design_version_dropdown_spec.js)22
-rw-r--r--spec/frontend/design_management_legacy/components/upload/mock_data/all_versions.js (renamed from spec/frontend/design_management_new/components/upload/mock_data/all_versions.js)0
-rw-r--r--spec/frontend/design_management_legacy/mock_data/all_versions.js (renamed from spec/frontend/design_management_new/mock_data/all_versions.js)0
-rw-r--r--spec/frontend/design_management_legacy/mock_data/design.js (renamed from spec/frontend/design_management_new/mock_data/design.js)0
-rw-r--r--spec/frontend/design_management_legacy/mock_data/designs.js (renamed from spec/frontend/design_management_new/mock_data/designs.js)0
-rw-r--r--spec/frontend/design_management_legacy/mock_data/no_designs.js (renamed from spec/frontend/design_management_new/mock_data/no_designs.js)0
-rw-r--r--spec/frontend/design_management_legacy/mock_data/notes.js (renamed from spec/frontend/design_management_new/mock_data/notes.js)0
-rw-r--r--spec/frontend/design_management_legacy/pages/__snapshots__/index_spec.js.snap (renamed from spec/frontend/design_management_new/pages/__snapshots__/index_spec.js.snap)120
-rw-r--r--spec/frontend/design_management_legacy/pages/design/__snapshots__/index_spec.js.snap (renamed from spec/frontend/design_management_new/pages/design/__snapshots__/index_spec.js.snap)16
-rw-r--r--spec/frontend/design_management_legacy/pages/design/index_spec.js (renamed from spec/frontend/design_management_new/pages/design/index_spec.js)27
-rw-r--r--spec/frontend/design_management_legacy/pages/index_spec.js (renamed from spec/frontend/design_management_new/pages/index_spec.js)116
-rw-r--r--spec/frontend/design_management_legacy/router_spec.js (renamed from spec/frontend/design_management_new/router_spec.js)24
-rw-r--r--spec/frontend/design_management_legacy/utils/cache_update_spec.js (renamed from spec/frontend/design_management_new/utils/cache_update_spec.js)6
-rw-r--r--spec/frontend/design_management_legacy/utils/design_management_utils_spec.js (renamed from spec/frontend/design_management_new/utils/design_management_utils_spec.js)2
-rw-r--r--spec/frontend/design_management_legacy/utils/error_messages_spec.js (renamed from spec/frontend/design_management_new/utils/error_messages_spec.js)4
-rw-r--r--spec/frontend/design_management_legacy/utils/tracking_spec.js (renamed from spec/frontend/design_management_new/utils/tracking_spec.js)2
-rw-r--r--spec/frontend/design_management_new/components/list/__snapshots__/item_spec.js.snap472
-rw-r--r--spec/frontend/design_management_new/components/toolbar/__snapshots__/pagination_button_spec.js.snap28
-rw-r--r--spec/frontend/diffs/components/app_spec.js1
-rw-r--r--spec/frontend/diffs/components/compare_versions_spec.js2
-rw-r--r--spec/frontend/diffs/components/diff_expansion_cell_spec.js2
-rw-r--r--spec/frontend/diffs/components/diff_file_spec.js22
-rw-r--r--spec/frontend/diffs/components/diff_stats_spec.js104
-rw-r--r--spec/frontend/diffs/components/diff_table_cell_spec.js56
-rw-r--r--spec/frontend/diffs/components/inline_diff_view_spec.js2
-rw-r--r--spec/frontend/diffs/components/no_changes_spec.js2
-rw-r--r--spec/frontend/diffs/components/parallel_diff_view_spec.js2
-rw-r--r--spec/frontend/diffs/components/tree_list_spec.js1
-rw-r--r--spec/frontend/diffs/diff_file_spec.js60
-rw-r--r--spec/frontend/diffs/store/actions_spec.js27
-rw-r--r--spec/frontend/diffs/store/mutations_spec.js2
-rw-r--r--spec/frontend/diffs/store/utils_spec.js55
-rw-r--r--spec/frontend/dropzone_input_spec.js2
-rw-r--r--spec/frontend/editor/editor_lite_spec.js28
-rw-r--r--spec/frontend/editor/editor_markdown_ext_spec.js2
-rw-r--r--spec/frontend/emoji/emoji_spec.js2
-rw-r--r--spec/frontend/emoji/support/unicode_support_map_spec.js2
-rw-r--r--spec/frontend/environment.js18
-rw-r--r--spec/frontend/environments/environment_actions_spec.js2
-rw-r--r--spec/frontend/environments/environment_external_url_spec.js4
-rw-r--r--spec/frontend/environments/environment_stop_spec.js4
-rw-r--r--spec/frontend/environments/environments_app_spec.js2
-rw-r--r--spec/frontend/environments/folder/environments_folder_view_spec.js6
-rw-r--r--spec/frontend/error_tracking/components/error_details_spec.js4
-rw-r--r--spec/frontend/error_tracking/components/error_tracking_actions_spec.js6
-rw-r--r--spec/frontend/error_tracking/components/error_tracking_list_spec.js28
-rw-r--r--spec/frontend/error_tracking/components/stacktrace_entry_spec.js2
-rw-r--r--spec/frontend/error_tracking/store/actions_spec.js2
-rw-r--r--spec/frontend/error_tracking/store/details/actions_spec.js2
-rw-r--r--spec/frontend/error_tracking/store/list/actions_spec.js4
-rw-r--r--spec/frontend/error_tracking_settings/components/project_dropdown_spec.js14
-rw-r--r--spec/frontend/filtered_search/components/recent_searches_dropdown_content_spec.js8
-rw-r--r--spec/frontend/filtered_search/filtered_search_manager_spec.js46
-rw-r--r--spec/frontend/filtered_search/filtered_search_visual_tokens_spec.js4
-rw-r--r--spec/frontend/filtered_search/services/recent_searches_service_spec.js2
-rw-r--r--spec/frontend/filtered_search/visual_token_value_spec.js2
-rw-r--r--spec/frontend/fixtures/api_merge_requests.rb24
-rw-r--r--spec/frontend/fixtures/api_projects.rb35
-rw-r--r--spec/frontend/fixtures/freeze_period.rb40
-rw-r--r--spec/frontend/fixtures/merge_requests.rb1
-rw-r--r--spec/frontend/fixtures/metrics_dashboard.rb2
-rw-r--r--spec/frontend/fixtures/projects_json.rb47
-rw-r--r--spec/frontend/fixtures/test_report.rb1
-rw-r--r--spec/frontend/flash_spec.js129
-rw-r--r--spec/frontend/frequent_items/components/app_spec.js4
-rw-r--r--spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap30
-rw-r--r--spec/frontend/grafana_integration/components/grafana_integration_spec.js11
-rw-r--r--spec/frontend/groups/components/app_spec.js2
-rw-r--r--spec/frontend/header_spec.js2
-rw-r--r--spec/frontend/helpers/backoff_helper.js33
-rw-r--r--spec/frontend/helpers/dom_events_helper.js3
-rw-r--r--spec/frontend/helpers/dom_shims/index.js2
-rw-r--r--spec/frontend/helpers/dom_shims/mutation_observer.js7
-rw-r--r--spec/frontend/helpers/dom_shims/range.js13
-rw-r--r--spec/frontend/helpers/filtered_search_spec_helper.js2
-rw-r--r--spec/frontend/helpers/init_vue_mr_page_helper.js1
-rw-r--r--spec/frontend/helpers/monitor_helper_spec.js58
-rw-r--r--spec/frontend/ide/components/activity_bar_spec.js8
-rw-r--r--spec/frontend/ide/components/commit_sidebar/empty_state_spec.js8
-rw-r--r--spec/frontend/ide/components/commit_sidebar/form_spec.js9
-rw-r--r--spec/frontend/ide/components/commit_sidebar/list_collapsed_spec.js5
-rw-r--r--spec/frontend/ide/components/commit_sidebar/list_spec.js9
-rw-r--r--spec/frontend/ide/components/commit_sidebar/radio_group_spec.js8
-rw-r--r--spec/frontend/ide/components/commit_sidebar/success_message_spec.js8
-rw-r--r--spec/frontend/ide/components/file_row_extra_spec.js3
-rw-r--r--spec/frontend/ide/components/file_templates/bar_spec.js3
-rw-r--r--spec/frontend/ide/components/ide_review_spec.js4
-rw-r--r--spec/frontend/ide/components/ide_side_bar_spec.js8
-rw-r--r--spec/frontend/ide/components/ide_spec.js6
-rw-r--r--spec/frontend/ide/components/ide_tree_list_spec.js11
-rw-r--r--spec/frontend/ide/components/ide_tree_spec.js9
-rw-r--r--spec/frontend/ide/components/jobs/detail_spec.js2
-rw-r--r--spec/frontend/ide/components/new_dropdown/index_spec.js8
-rw-r--r--spec/frontend/ide/components/new_dropdown/modal_spec.js2
-rw-r--r--spec/frontend/ide/components/panes/collapsible_sidebar_spec.js2
-rw-r--r--spec/frontend/ide/components/pipelines/list_spec.js2
-rw-r--r--spec/frontend/ide/components/preview/navigator_spec.js2
-rw-r--r--spec/frontend/ide/components/repo_editor_spec.js4
-rw-r--r--spec/frontend/ide/helpers.js20
-rw-r--r--spec/frontend/ide/ide_router_spec.js2
-rw-r--r--spec/frontend/ide/lib/decorations/controller_spec.js5
-rw-r--r--spec/frontend/ide/lib/diff/controller_spec.js6
-rw-r--r--spec/frontend/ide/lib/editor_spec.js7
-rw-r--r--spec/frontend/ide/lib/languages/vue_spec.js2
-rw-r--r--spec/frontend/ide/stores/actions/merge_request_spec.js9
-rw-r--r--spec/frontend/ide/stores/actions/project_spec.js4
-rw-r--r--spec/frontend/ide/stores/actions/tree_spec.js2
-rw-r--r--spec/frontend/ide/stores/modules/file_templates/getters_spec.js2
-rw-r--r--spec/frontend/ide/stores/modules/router/actions_spec.js2
-rw-r--r--spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js2
-rw-r--r--spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js2
-rw-r--r--spec/frontend/ide/stores/modules/terminal/messages_spec.js2
-rw-r--r--spec/frontend/ide/stores/modules/terminal_sync/actions_spec.js2
-rw-r--r--spec/frontend/ide/sync_router_and_store_spec.js2
-rw-r--r--spec/frontend/ide/utils_spec.js2
-rw-r--r--spec/frontend/image_diff/helpers/comment_indicator_helper_spec.js2
-rw-r--r--spec/frontend/image_diff/helpers/utils_helper_spec.js2
-rw-r--r--spec/frontend/image_diff/image_diff_spec.js2
-rw-r--r--spec/frontend/image_diff/replaced_image_diff_spec.js2
-rw-r--r--spec/frontend/import_projects/components/import_projects_table_spec.js108
-rw-r--r--spec/frontend/import_projects/components/imported_project_table_row_spec.js65
-rw-r--r--spec/frontend/import_projects/components/page_query_param_sync_spec.js87
-rw-r--r--spec/frontend/import_projects/components/provider_repo_table_row_spec.js120
-rw-r--r--spec/frontend/import_projects/store/actions_spec.js221
-rw-r--r--spec/frontend/import_projects/store/getters_spec.js140
-rw-r--r--spec/frontend/import_projects/store/mutations_spec.js299
-rw-r--r--spec/frontend/import_projects/utils_spec.js32
-rw-r--r--spec/frontend/incidents/components/incidents_list_spec.js362
-rw-r--r--spec/frontend/incidents/mocks/incidents.json39
-rw-r--r--spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap26
-rw-r--r--spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap8
-rw-r--r--spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap54
-rw-r--r--spec/frontend/incidents_settings/components/incidents_settings_service_spec.js4
-rw-r--r--spec/frontend/incidents_settings/components/incidents_settings_tabs_spec.js4
-rw-r--r--spec/frontend/incidents_settings/components/pagerduty_form_spec.js2
-rw-r--r--spec/frontend/integrations/edit/components/dynamic_field_spec.js2
-rw-r--r--spec/frontend/integrations/edit/components/integration_form_spec.js2
-rw-r--r--spec/frontend/integrations/edit/components/trigger_fields_spec.js2
-rw-r--r--spec/frontend/integrations/edit/store/actions_spec.js3
-rw-r--r--spec/frontend/issuable_form_spec.js56
-rw-r--r--spec/frontend/issuable_suggestions/components/item_spec.js2
-rw-r--r--spec/frontend/issuables_list/components/__snapshots__/issuables_list_app_spec.js.snap1
-rw-r--r--spec/frontend/issuables_list/components/issuable_spec.js52
-rw-r--r--spec/frontend/issuables_list/components/issuables_list_app_spec.js21
-rw-r--r--spec/frontend/issuables_list/issuable_list_test_data.js2
-rw-r--r--spec/frontend/issue_show/components/app_spec.js9
-rw-r--r--spec/frontend/issue_show/components/issuable_header_warnings_spec.js79
-rw-r--r--spec/frontend/jira_import/components/jira_import_app_spec.js200
-rw-r--r--spec/frontend/jira_import/components/jira_import_form_spec.js201
-rw-r--r--spec/frontend/jira_import/mock_data.js16
-rw-r--r--spec/frontend/jobs/components/empty_state_spec.js128
-rw-r--r--spec/frontend/jobs/components/job_app_spec.js85
-rw-r--r--spec/frontend/jobs/components/job_log_controllers_spec.js167
-rw-r--r--spec/frontend/jobs/components/log/mock_data.js2
-rw-r--r--spec/frontend/jobs/components/sidebar_spec.js6
-rw-r--r--spec/frontend/jobs/components/stuck_block_spec.js68
-rw-r--r--spec/frontend/labels_select_spec.js31
-rw-r--r--spec/frontend/lazy_loader_spec.js18
-rw-r--r--spec/frontend/lib/utils/common_utils_spec.js2
-rw-r--r--spec/frontend/lib/utils/csrf_token_spec.js2
-rw-r--r--spec/frontend/lib/utils/datetime_utility_spec.js27
-rw-r--r--spec/frontend/lib/utils/poll_spec.js31
-rw-r--r--spec/frontend/lib/utils/poll_until_complete_spec.js2
-rw-r--r--spec/frontend/lib/utils/sticky_spec.js2
-rw-r--r--spec/frontend/lib/utils/url_utility_spec.js112
-rw-r--r--spec/frontend/locale/index_spec.js3
-rw-r--r--spec/frontend/logs/components/environment_logs_spec.js12
-rw-r--r--spec/frontend/logs/components/log_advanced_filters_spec.js2
-rw-r--r--spec/frontend/logs/components/log_simple_filters_spec.js4
-rw-r--r--spec/frontend/logs/mock_data.js10
-rw-r--r--spec/frontend/logs/stores/actions_spec.js2
-rw-r--r--spec/frontend/logs/stores/mutations_spec.js3
-rw-r--r--spec/frontend/maintenance_mode_settings/components/app_spec.js6
-rw-r--r--spec/frontend/merge_request_spec.js2
-rw-r--r--spec/frontend/merge_request_tabs_spec.js2
-rw-r--r--spec/frontend/milestones/project_milestone_combobox_spec.js4
-rw-r--r--spec/frontend/monitoring/alert_widget_spec.js4
-rw-r--r--spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap65
-rw-r--r--spec/frontend/monitoring/components/alert_widget_form_spec.js112
-rw-r--r--spec/frontend/monitoring/components/charts/gauge_spec.js215
-rw-r--r--spec/frontend/monitoring/components/charts/heatmap_spec.js18
-rw-r--r--spec/frontend/monitoring/components/charts/options_spec.js244
-rw-r--r--spec/frontend/monitoring/components/charts/single_stat_spec.js60
-rw-r--r--spec/frontend/monitoring/components/charts/time_series_spec.js27
-rw-r--r--spec/frontend/monitoring/components/dashboard_actions_menu_spec.js440
-rw-r--r--spec/frontend/monitoring/components/dashboard_header_spec.js372
-rw-r--r--spec/frontend/monitoring/components/dashboard_panel_builder_spec.js234
-rw-r--r--spec/frontend/monitoring/components/dashboard_panel_spec.js111
-rw-r--r--spec/frontend/monitoring/components/dashboard_spec.js522
-rw-r--r--spec/frontend/monitoring/components/dashboard_url_time_spec.js2
-rw-r--r--spec/frontend/monitoring/components/dashboards_dropdown_spec.js120
-rw-r--r--spec/frontend/monitoring/components/embeds/metric_embed_spec.js4
-rw-r--r--spec/frontend/monitoring/components/graph_group_spec.js2
-rw-r--r--spec/frontend/monitoring/components/group_empty_state_spec.js2
-rw-r--r--spec/frontend/monitoring/components/refresh_button_spec.js30
-rw-r--r--spec/frontend/monitoring/components/variables/dropdown_field_spec.js6
-rw-r--r--spec/frontend/monitoring/csv_export_spec.js126
-rw-r--r--spec/frontend/monitoring/fixture_data.js24
-rw-r--r--spec/frontend/monitoring/graph_data.js92
-rw-r--r--spec/frontend/monitoring/mock_data.js107
-rw-r--r--spec/frontend/monitoring/pages/panel_new_page_spec.js98
-rw-r--r--spec/frontend/monitoring/requests/index_spec.js149
-rw-r--r--spec/frontend/monitoring/router_spec.js66
-rw-r--r--spec/frontend/monitoring/store/actions_spec.js125
-rw-r--r--spec/frontend/monitoring/store/getters_spec.js119
-rw-r--r--spec/frontend/monitoring/store/mutations_spec.js149
-rw-r--r--spec/frontend/monitoring/utils_spec.js2
-rw-r--r--spec/frontend/notebook/cells/output/html_sanitize_fixtures.js114
-rw-r--r--spec/frontend/notebook/cells/output/html_sanitize_tests.js68
-rw-r--r--spec/frontend/notebook/cells/output/html_spec.js17
-rw-r--r--spec/frontend/notebook/cells/output/index_spec.js2
-rw-r--r--spec/frontend/notes/components/discussion_actions_spec.js14
-rw-r--r--spec/frontend/notes/components/discussion_filter_spec.js5
-rw-r--r--spec/frontend/notes/components/discussion_navigator_spec.js (renamed from spec/frontend/notes/components/discussion_keyboard_navigator_spec.js)38
-rw-r--r--spec/frontend/notes/components/discussion_notes_spec.js47
-rw-r--r--spec/frontend/notes/components/discussion_resolve_with_issue_button_spec.js4
-rw-r--r--spec/frontend/notes/components/note_actions_spec.js2
-rw-r--r--spec/frontend/notes/components/note_awards_list_spec.js2
-rw-r--r--spec/frontend/notes/components/noteable_discussion_spec.js21
-rw-r--r--spec/frontend/notes/components/noteable_note_spec.js38
-rw-r--r--spec/frontend/notes/mixins/discussion_navigation_spec.js31
-rw-r--r--spec/frontend/notes/stores/actions_spec.js85
-rw-r--r--spec/frontend/notes/stores/mutation_spec.js32
-rw-r--r--spec/frontend/onboarding_issues/index_spec.js2
-rw-r--r--spec/frontend/operation_settings/components/metrics_settings_spec.js11
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/code_instruction_spec.js.snap46
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/conan_installation_spec.js.snap49
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/dependency_row_spec.js.snap34
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/history_element_spec.js.snap38
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/maven_installation_spec.js.snap69
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/npm_installation_spec.js.snap69
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/nuget_installation_spec.js.snap49
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap172
-rw-r--r--spec/frontend/packages/details/components/__snapshots__/pypi_installation_spec.js.snap50
-rw-r--r--spec/frontend/packages/details/components/additional_metadata_spec.js119
-rw-r--r--spec/frontend/packages/details/components/app_spec.js281
-rw-r--r--spec/frontend/packages/details/components/code_instruction_spec.js110
-rw-r--r--spec/frontend/packages/details/components/composer_installation_spec.js95
-rw-r--r--spec/frontend/packages/details/components/conan_installation_spec.js68
-rw-r--r--spec/frontend/packages/details/components/dependency_row_spec.js62
-rw-r--r--spec/frontend/packages/details/components/history_element_spec.js57
-rw-r--r--spec/frontend/packages/details/components/installations_commands_spec.js57
-rw-r--r--spec/frontend/packages/details/components/maven_installation_spec.js91
-rw-r--r--spec/frontend/packages/details/components/npm_installation_spec.js99
-rw-r--r--spec/frontend/packages/details/components/nuget_installation_spec.js75
-rw-r--r--spec/frontend/packages/details/components/package_history_spec.js106
-rw-r--r--spec/frontend/packages/details/components/package_title_spec.js168
-rw-r--r--spec/frontend/packages/details/components/pypi_installation_spec.js60
-rw-r--r--spec/frontend/packages/details/mock_data.js47
-rw-r--r--spec/frontend/packages/details/store/actions_spec.js76
-rw-r--r--spec/frontend/packages/details/store/getters_spec.js237
-rw-r--r--spec/frontend/packages/details/store/mutations_spec.js31
-rw-r--r--spec/frontend/packages/details/utils_spec.js24
-rw-r--r--spec/frontend/packages/list/coming_soon/helpers_spec.js36
-rw-r--r--spec/frontend/packages/list/coming_soon/mock_data.js90
-rw-r--r--spec/frontend/packages/list/coming_soon/packages_coming_soon_spec.js138
-rw-r--r--spec/frontend/packages/list/components/__snapshots__/packages_filter_spec.js.snap14
-rw-r--r--spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap457
-rw-r--r--spec/frontend/packages/list/components/packages_filter_spec.js50
-rw-r--r--spec/frontend/packages/list/components/packages_list_app_spec.js148
-rw-r--r--spec/frontend/packages/list/components/packages_list_spec.js219
-rw-r--r--spec/frontend/packages/list/components/packages_sort_spec.js92
-rw-r--r--spec/frontend/packages/list/stores/actions_spec.js240
-rw-r--r--spec/frontend/packages/list/stores/getters_spec.js36
-rw-r--r--spec/frontend/packages/list/stores/mutations_spec.js95
-rw-r--r--spec/frontend/packages/list/utils_spec.js39
-rw-r--r--spec/frontend/packages/mock_data.js170
-rw-r--r--spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap101
-rw-r--r--spec/frontend/packages/shared/components/__snapshots__/publish_method_spec.js.snap39
-rw-r--r--spec/frontend/packages/shared/components/package_list_row_spec.js106
-rw-r--r--spec/frontend/packages/shared/components/package_tags_spec.js115
-rw-r--r--spec/frontend/packages/shared/components/packages_list_loader_spec.js42
-rw-r--r--spec/frontend/packages/shared/components/publish_method_spec.js50
-rw-r--r--spec/frontend/packages/shared/utils_spec.js66
-rw-r--r--spec/frontend/pager_spec.js2
-rw-r--r--spec/frontend/pages/admin/jobs/index/components/stop_jobs_modal_spec.js4
-rw-r--r--spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap26
-rw-r--r--spec/frontend/pages/admin/users/components/delete_user_modal_spec.js4
-rw-r--r--spec/frontend/pages/dashboard/projects/index/components/customize_homepage_banner_spec.js50
-rw-r--r--spec/frontend/pages/labels/components/promote_label_modal_spec.js2
-rw-r--r--spec/frontend/pages/milestones/shared/components/delete_milestone_modal_spec.js4
-rw-r--r--spec/frontend/pages/milestones/shared/components/promote_milestone_modal_spec.js2
-rw-r--r--spec/frontend/pages/profiles/show/emoji_menu_spec.js2
-rw-r--r--spec/frontend/pages/projects/forks/new/components/fork_groups_list_spec.js8
-rw-r--r--spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap16
-rw-r--r--spec/frontend/pages/projects/graphs/code_coverage_spec.js8
-rw-r--r--spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js2
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js84
-rw-r--r--spec/frontend/pages/sessions/new/signin_tabs_memoizer_spec.js48
-rw-r--r--spec/frontend/pdf/page_spec.js2
-rw-r--r--spec/frontend/performance_bar/components/detailed_metric_spec.js2
-rw-r--r--spec/frontend/persistent_user_callout_spec.js2
-rw-r--r--spec/frontend/pipeline_new/components/pipeline_new_form_spec.js108
-rw-r--r--spec/frontend/pipeline_new/mock_data.js21
-rw-r--r--spec/frontend/pipelines/components/dag/dag_spec.js119
-rw-r--r--spec/frontend/pipelines/components/dag/drawing_utils_spec.js4
-rw-r--r--spec/frontend/pipelines/components/dag/mock_data.js436
-rw-r--r--spec/frontend/pipelines/components/dag/parsing_utils_spec.js69
-rw-r--r--spec/frontend/pipelines/components/pipelines_filtered_search_spec.js4
-rw-r--r--spec/frontend/pipelines/graph/action_component_spec.js3
-rw-r--r--spec/frontend/pipelines/graph/graph_component_spec.js2
-rw-r--r--spec/frontend/pipelines/graph/linked_pipeline_spec.js20
-rw-r--r--spec/frontend/pipelines/header_component_spec.js6
-rw-r--r--spec/frontend/pipelines/pipeline_details_mediator_spec.js2
-rw-r--r--spec/frontend/pipelines/pipelines_actions_spec.js4
-rw-r--r--spec/frontend/pipelines/pipelines_artifacts_spec.js2
-rw-r--r--spec/frontend/pipelines/pipelines_spec.js10
-rw-r--r--spec/frontend/pipelines/stage_spec.js2
-rw-r--r--spec/frontend/pipelines/test_reports/stores/actions_spec.js124
-rw-r--r--spec/frontend/pipelines/test_reports/stores/mutations_spec.js33
-rw-r--r--spec/frontend/pipelines/test_reports/test_reports_spec.js28
-rw-r--r--spec/frontend/pipelines/test_reports/test_suite_table_spec.js20
-rw-r--r--spec/frontend/pipelines/tokens/pipeline_branch_name_token_spec.js2
-rw-r--r--spec/frontend/pipelines/tokens/pipeline_tag_name_token_spec.js2
-rw-r--r--spec/frontend/pipelines/tokens/pipeline_trigger_author_token_spec.js2
-rw-r--r--spec/frontend/project_find_file_spec.js6
-rw-r--r--spec/frontend/projects/commits/components/author_select_spec.js6
-rw-r--r--spec/frontend/projects/commits/store/actions_spec.js4
-rw-r--r--spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap83
-rw-r--r--spec/frontend/projects/components/__snapshots__/remove_modal_spec.js.snap126
-rw-r--r--spec/frontend/projects/components/project_delete_button_spec.js47
-rw-r--r--spec/frontend/projects/components/remove_modal_spec.js62
-rw-r--r--spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap113
-rw-r--r--spec/frontend/projects/components/shared/delete_button_spec.js83
-rw-r--r--spec/frontend/projects/experiment_new_project_creation/components/legacy_container_spec.js2
-rw-r--r--spec/frontend/projects/experiment_new_project_creation/components/welcome_spec.js2
-rw-r--r--spec/frontend/projects/project_new_spec.js2
-rw-r--r--spec/frontend/projects/settings/access_dropdown_spec.js140
-rw-r--r--spec/frontend/prometheus_alerts/components/reset_key_spec.js2
-rw-r--r--spec/frontend/ref/components/ref_selector_spec.js50
-rw-r--r--spec/frontend/registry/explorer/components/details_page/details_row_spec.js43
-rw-r--r--spec/frontend/registry/explorer/components/details_page/tags_list_row_spec.js4
-rw-r--r--spec/frontend/registry/explorer/components/list_page/cli_commands_spec.js4
-rw-r--r--spec/frontend/registry/explorer/pages/details_spec.js2
-rw-r--r--spec/frontend/registry/explorer/pages/list_spec.js4
-rw-r--r--spec/frontend/registry/explorer/stores/actions_spec.js8
-rw-r--r--spec/frontend/registry/settings/components/settings_form_spec.js2
-rw-r--r--spec/frontend/registry/settings/store/actions_spec.js2
-rw-r--r--spec/frontend/registry/shared/components/details_row_spec.js71
-rw-r--r--spec/frontend/related_merge_requests/store/actions_spec.js2
-rw-r--r--spec/frontend/releases/components/app_edit_new_spec.js (renamed from spec/frontend/releases/components/app_edit_spec.js)103
-rw-r--r--spec/frontend/releases/components/app_index_spec.js2
-rw-r--r--spec/frontend/releases/components/app_new_spec.js26
-rw-r--r--spec/frontend/releases/components/app_show_spec.js2
-rw-r--r--spec/frontend/releases/components/asset_links_form_spec.js143
-rw-r--r--spec/frontend/releases/components/release_block_assets_spec.js4
-rw-r--r--spec/frontend/releases/components/release_block_footer_spec.js2
-rw-r--r--spec/frontend/releases/components/release_block_metadata_spec.js2
-rw-r--r--spec/frontend/releases/components/tag_field_exsting_spec.js78
-rw-r--r--spec/frontend/releases/components/tag_field_new_spec.js144
-rw-r--r--spec/frontend/releases/components/tag_field_spec.js59
-rw-r--r--spec/frontend/releases/stores/modules/detail/actions_spec.js615
-rw-r--r--spec/frontend/releases/stores/modules/detail/getters_spec.js85
-rw-r--r--spec/frontend/releases/stores/modules/detail/mutations_spec.js48
-rw-r--r--spec/frontend/releases/util_spec.js103
-rw-r--r--spec/frontend/reports/accessibility_report/mock_data.js3
-rw-r--r--spec/frontend/reports/accessibility_report/store/actions_spec.js6
-rw-r--r--spec/frontend/reports/codequality_report/store/actions_spec.js6
-rw-r--r--spec/frontend/reports/components/grouped_test_reports_app_spec.js29
-rw-r--r--spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap12
-rw-r--r--spec/frontend/repository/components/breadcrumbs_spec.js6
-rw-r--r--spec/frontend/repository/components/last_commit_spec.js8
-rw-r--r--spec/frontend/repository/components/preview/index_spec.js6
-rw-r--r--spec/frontend/repository/components/table/index_spec.js2
-rw-r--r--spec/frontend/repository/components/tree_content_spec.js76
-rw-r--r--spec/frontend/repository/components/web_ide_link_spec.js2
-rw-r--r--spec/frontend/repository/utils/dom_spec.js2
-rw-r--r--spec/frontend/search_autocomplete_spec.js26
-rw-r--r--spec/frontend/self_monitor/components/self_monitor_form_spec.js2
-rw-r--r--spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap20
-rw-r--r--spec/frontend/serverless/components/empty_state_spec.js25
-rw-r--r--spec/frontend/serverless/components/function_details_spec.js10
-rw-r--r--spec/frontend/serverless/components/functions_spec.js57
-rw-r--r--spec/frontend/serverless/components/missing_prometheus_spec.js20
-rw-r--r--spec/frontend/serverless/survey_banner_spec.js2
-rw-r--r--spec/frontend/serverless/utils.js4
-rw-r--r--spec/frontend/sidebar/__snapshots__/confidential_issue_sidebar_spec.js.snap8
-rw-r--r--spec/frontend/sidebar/confidential/__snapshots__/edit_form_spec.js.snap50
-rw-r--r--spec/frontend/sidebar/confidential/edit_form_buttons_spec.js108
-rw-r--r--spec/frontend/sidebar/confidential/edit_form_spec.js11
-rw-r--r--spec/frontend/sidebar/confidential_issue_sidebar_spec.js98
-rw-r--r--spec/frontend/sidebar/lock/__snapshots__/edit_form_spec.js.snap79
-rw-r--r--spec/frontend/sidebar/lock/constants.js2
-rw-r--r--spec/frontend/sidebar/lock/edit_form_buttons_spec.js171
-rw-r--r--spec/frontend/sidebar/lock/edit_form_spec.js67
-rw-r--r--spec/frontend/sidebar/lock/issuable_lock_form_spec.js133
-rw-r--r--spec/frontend/sidebar/lock/lock_issue_sidebar_spec.js99
-rw-r--r--spec/frontend/sidebar/todo_spec.js2
-rw-r--r--spec/frontend/snippet/collapsible_input_spec.js2
-rw-r--r--spec/frontend/snippet/snippet_bundle_spec.js2
-rw-r--r--spec/frontend/snippet/snippet_edit_spec.js3
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap32
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap2
-rw-r--r--spec/frontend/snippets/components/edit_spec.js539
-rw-r--r--spec/frontend/snippets/components/show_spec.js59
-rw-r--r--spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js301
-rw-r--r--spec/frontend/snippets/components/snippet_blob_edit_spec.js227
-rw-r--r--spec/frontend/snippets/components/snippet_blob_view_spec.js82
-rw-r--r--spec/frontend/snippets/components/snippet_description_edit_spec.js2
-rw-r--r--spec/frontend/snippets/components/snippet_description_view_spec.js2
-rw-r--r--spec/frontend/snippets/components/snippet_header_spec.js112
-rw-r--r--spec/frontend/snippets/components/snippet_title_spec.js4
-rw-r--r--spec/frontend/snippets/components/snippet_visibility_edit_spec.js4
-rw-r--r--spec/frontend/snippets/test_utils.js76
-rw-r--r--spec/frontend/snippets/utils/blob_spec.js63
-rw-r--r--spec/frontend/snippets_spec.js2
-rw-r--r--spec/frontend/static_site_editor/components/app_spec.js34
-rw-r--r--spec/frontend/static_site_editor/components/edit_area_spec.js41
-rw-r--r--spec/frontend/static_site_editor/components/saved_changes_message_spec.js56
-rw-r--r--spec/frontend/static_site_editor/pages/home_spec.js2
-rw-r--r--spec/frontend/static_site_editor/pages/success_spec.js63
-rw-r--r--spec/frontend/static_site_editor/services/formatter_spec.js26
-rw-r--r--spec/frontend/static_site_editor/services/submit_content_changes_spec.js2
-rw-r--r--spec/frontend/static_site_editor/services/templater_spec.js104
-rw-r--r--spec/frontend/test_setup.js2
-rw-r--r--spec/frontend/vue_alerts_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/approvals/approvals_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/approvals/approvals_summary_optional_spec.js12
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js209
-rw-r--r--spec/frontend/vue_mr_widget/components/pipeline_tour_mock_data.js4
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js4
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js66
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js6
-rw-r--r--spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js4
-rw-r--r--spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js2
-rw-r--r--spec/frontend/vue_mr_widget/mock_data.js3
-rw-r--r--spec/frontend/vue_mr_widget/mr_widget_options_spec.js43
-rw-r--r--spec/frontend/vue_mr_widget/stores/get_state_key_spec.js52
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap5
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap40
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap14
-rw-r--r--spec/frontend/vue_shared/components/clone_dropdown_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/content_viewer/viewers/markdown_viewer_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/date_time_picker/date_time_picker_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/dismissible_container_spec.js58
-rw-r--r--spec/frontend/vue_shared/components/dismissible_feedback_alert_spec.js91
-rw-r--r--spec/frontend/vue_shared/components/file_finder/item_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/file_icon_spec.js3
-rw-r--r--spec/frontend/vue_shared/components/file_row_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js129
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js19
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js56
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js170
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js152
-rw-r--r--spec/frontend/vue_shared/components/form/form_footer_actions_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/form/title_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/header_ci_component_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/icon_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/identicon_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/issuable/issuable_header_warnings_spec.js73
-rw-r--r--spec/frontend/vue_shared/components/issue/issue_assignees_spec.js23
-rw-r--r--spec/frontend/vue_shared/components/issue/issue_milestone_spec.js3
-rw-r--r--spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js94
-rw-r--r--spec/frontend/vue_shared/components/markdown/field_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/editor_service_spec.js24
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_integration_spec.js69
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_spec.js51
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/services/build_html_to_markdown_renderer_spec.js83
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/services/renderers/build_uneditable_token_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_embedded_ruby_spec.js12
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_identifier_paragraph_spec.js27
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_kramdown_list_spec.js23
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_kramdown_text_spec.js12
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_softbreak_spec.js23
-rw-r--r--spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_utils_spec.js44
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_button_spec.js17
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js131
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_spec.js12
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_title_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js45
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/split_button_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/table_pagination_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/time_ago_tooltip_spec.js22
-rw-r--r--spec/frontend/vue_shared/components/toggle_button_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js6
-rw-r--r--spec/frontend/vue_shared/directives/autofocusonshow_spec.js11
-rw-r--r--spec/frontend/whats_new/components/app_spec.js57
-rw-r--r--spec/frontend/whats_new/components/trigger_spec.js43
-rw-r--r--spec/frontend/whats_new/store/actions_spec.js17
-rw-r--r--spec/frontend/whats_new/store/mutations_spec.js25
-rw-r--r--spec/frontend_integration/ide/ide_integration_spec.js88
-rw-r--r--spec/frontend_integration/test_helpers/factories/commit.js15
-rw-r--r--spec/frontend_integration/test_helpers/factories/commit_id.js21
-rw-r--r--spec/frontend_integration/test_helpers/factories/index.js2
-rw-r--r--spec/frontend_integration/test_helpers/fixtures.js10
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/graphql.js21
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/index.js45
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/routes/404.js7
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/routes/ci.js11
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/routes/graphql.js11
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/routes/index.js12
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/routes/projects.js23
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/routes/repository.js38
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/use.js5
-rw-r--r--spec/frontend_integration/test_helpers/setup/index.js5
-rw-r--r--spec/frontend_integration/test_helpers/setup/setup_axios.js5
-rw-r--r--spec/frontend_integration/test_helpers/setup/setup_globals.js15
-rw-r--r--spec/frontend_integration/test_helpers/setup/setup_mock_server.js13
-rw-r--r--spec/frontend_integration/test_helpers/setup/setup_serializers.js3
-rw-r--r--spec/frontend_integration/test_helpers/snapshot_serializer.js18
-rw-r--r--spec/frontend_integration/test_helpers/utils/obj.js36
-rw-r--r--spec/frontend_integration/test_helpers/utils/obj_spec.js23
-rw-r--r--spec/frontend_integration/test_helpers/utils/overclock_timers.js65
-rw-r--r--spec/frontend_integration/test_setup.js1
-rw-r--r--spec/graphql/features/authorization_spec.rb3
-rw-r--r--spec/graphql/mutations/boards/issues/issue_move_list_spec.rb90
-rw-r--r--spec/graphql/mutations/boards/lists/create_spec.rb80
-rw-r--r--spec/graphql/mutations/boards/lists/update_spec.rb56
-rw-r--r--spec/graphql/mutations/commits/create_spec.rb2
-rw-r--r--spec/graphql/mutations/design_management/move_spec.rb81
-rw-r--r--spec/graphql/mutations/issues/set_assignees_spec.rb9
-rw-r--r--spec/graphql/mutations/issues/set_subscription_spec.rb10
-rw-r--r--spec/graphql/mutations/issues/update_spec.rb79
-rw-r--r--spec/graphql/mutations/merge_requests/create_spec.rb13
-rw-r--r--spec/graphql/mutations/merge_requests/set_assignees_spec.rb103
-rw-r--r--spec/graphql/mutations/merge_requests/set_subscription_spec.rb42
-rw-r--r--spec/graphql/resolvers/alert_management/alert_resolver_spec.rb16
-rw-r--r--spec/graphql/resolvers/board_list_issues_resolver_spec.rb58
-rw-r--r--spec/graphql/resolvers/board_lists_resolver_spec.rb24
-rw-r--r--spec/graphql/resolvers/ci_configuration/sast_resolver_spec.rb28
-rw-r--r--spec/graphql/resolvers/commit_pipelines_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/group_issues_resolver_spec.rb43
-rw-r--r--spec/graphql/resolvers/group_milestones_resolver_spec.rb123
-rw-r--r--spec/graphql/resolvers/issue_status_counts_resolver_spec.rb64
-rw-r--r--spec/graphql/resolvers/issues_resolver_spec.rb16
-rw-r--r--spec/graphql/resolvers/merge_requests_resolver_spec.rb18
-rw-r--r--spec/graphql/resolvers/milestone_resolver_spec.rb113
-rw-r--r--spec/graphql/resolvers/project_milestones_resolver_spec.rb117
-rw-r--r--spec/graphql/resolvers/project_pipeline_resolver_spec.rb17
-rw-r--r--spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb28
-rw-r--r--spec/graphql/resolvers/todo_resolver_spec.rb14
-rw-r--r--spec/graphql/types/alert_management/alert_type_spec.rb4
-rw-r--r--spec/graphql/types/board_list_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/group_type_spec.rb17
-rw-r--r--spec/graphql/types/ci/job_type_spec.rb16
-rw-r--r--spec/graphql/types/ci/stage_type_spec.rb16
-rw-r--r--spec/graphql/types/ci_configuration/sast/analyzers_entity_type_spec.rb11
-rw-r--r--spec/graphql/types/ci_configuration/sast/entity_type_spec.rb11
-rw-r--r--spec/graphql/types/ci_configuration/sast/options_entity_spec.rb11
-rw-r--r--spec/graphql/types/ci_configuration/sast/type_spec.rb11
-rw-r--r--spec/graphql/types/commit_type_spec.rb4
-rw-r--r--spec/graphql/types/countable_connection_type_spec.rb (renamed from spec/graphql/types/issue_connection_type_spec.rb)0
-rw-r--r--spec/graphql/types/design_management/design_at_version_type_spec.rb1
-rw-r--r--spec/graphql/types/environment_type_spec.rb67
-rw-r--r--spec/graphql/types/group_type_spec.rb2
-rw-r--r--spec/graphql/types/issue_status_count_type_spec.rb17
-rw-r--r--spec/graphql/types/issue_type_enum_spec.rb13
-rw-r--r--spec/graphql/types/issue_type_spec.rb12
-rw-r--r--spec/graphql/types/merge_request_type_spec.rb4
-rw-r--r--spec/graphql/types/project_type_spec.rb106
-rw-r--r--spec/graphql/types/prometheus_alert_type_spec.rb17
-rw-r--r--spec/graphql/types/query_type_spec.rb1
-rw-r--r--spec/graphql/types/snippet_type_spec.rb2
-rw-r--r--spec/graphql/types/snippets/blob_action_enum_spec.rb (renamed from spec/graphql/types/snippets/file_input_action_enum_spec.rb)4
-rw-r--r--spec/graphql/types/snippets/blob_action_input_type_spec.rb (renamed from spec/graphql/types/snippets/file_input_type_spec.rb)8
-rw-r--r--spec/graphql/types/snippets/blob_viewer_type_spec.rb4
-rw-r--r--spec/graphql/types/time_type_spec.rb10
-rw-r--r--spec/graphql/types/tree/blob_type_spec.rb2
-rw-r--r--spec/graphql/types/tree/tree_entry_type_spec.rb2
-rw-r--r--spec/graphql/types/user_status_type_spec.rb17
-rw-r--r--spec/graphql/types/user_type_spec.rb3
-rw-r--r--spec/helpers/appearances_helper_spec.rb2
-rw-r--r--spec/helpers/application_helper_spec.rb36
-rw-r--r--spec/helpers/auto_devops_helper_spec.rb2
-rw-r--r--spec/helpers/award_emoji_helper_spec.rb4
-rw-r--r--spec/helpers/blame_helper_spec.rb2
-rw-r--r--spec/helpers/blob_helper_spec.rb110
-rw-r--r--spec/helpers/branches_helper_spec.rb32
-rw-r--r--spec/helpers/ci/pipelines_helper_spec.rb35
-rw-r--r--spec/helpers/ci/status_helper_spec.rb2
-rw-r--r--spec/helpers/environments_helper_spec.rb7
-rw-r--r--spec/helpers/events_helper_spec.rb2
-rw-r--r--spec/helpers/gitlab_routing_helper_spec.rb84
-rw-r--r--spec/helpers/groups_helper_spec.rb2
-rw-r--r--spec/helpers/icons_helper_spec.rb13
-rw-r--r--spec/helpers/issuables_helper_spec.rb2
-rw-r--r--spec/helpers/issues_helper_spec.rb8
-rw-r--r--spec/helpers/members_helper_spec.rb2
-rw-r--r--spec/helpers/namespace_storage_limit_alert_helper_spec.rb11
-rw-r--r--spec/helpers/namespaces_helper_spec.rb4
-rw-r--r--spec/helpers/notes_helper_spec.rb14
-rw-r--r--spec/helpers/notifications_helper_spec.rb1
-rw-r--r--spec/helpers/operations_helper_spec.rb2
-rw-r--r--spec/helpers/packages_helper_spec.rb88
-rw-r--r--spec/helpers/profiles_helper_spec.rb2
-rw-r--r--spec/helpers/projects/alert_management_helper_spec.rb5
-rw-r--r--spec/helpers/projects/incidents_helper_spec.rb27
-rw-r--r--spec/helpers/projects/issues/service_desk_helper_spec.rb53
-rw-r--r--spec/helpers/projects_helper_spec.rb8
-rw-r--r--spec/helpers/releases_helper_spec.rb1
-rw-r--r--spec/helpers/services_helper_spec.rb5
-rw-r--r--spec/helpers/snippets_helper_spec.rb57
-rw-r--r--spec/helpers/user_callouts_helper_spec.rb46
-rw-r--r--spec/helpers/visibility_level_helper_spec.rb7
-rw-r--r--spec/helpers/wiki_helper_spec.rb2
-rw-r--r--spec/initializers/carrierwave_patch_spec.rb32
-rw-r--r--spec/initializers/database_config_spec.rb72
-rw-r--r--spec/initializers/direct_upload_support_spec.rb12
-rw-r--r--spec/javascripts/test_bundle.js13
-rw-r--r--spec/lib/api/entities/nuget/dependency_group_spec.rb1
-rw-r--r--spec/lib/api/entities/nuget/dependency_spec.rb1
-rw-r--r--spec/lib/api/entities/nuget/metadatum_spec.rb2
-rw-r--r--spec/lib/api/entities/nuget/search_result_spec.rb1
-rw-r--r--spec/lib/api/entities/snippet_spec.rb8
-rw-r--r--spec/lib/api/helpers/merge_requests_helpers_spec.rb63
-rw-r--r--spec/lib/api/helpers/packages_manager_clients_helpers_spec.rb34
-rw-r--r--spec/lib/api/helpers_spec.rb86
-rw-r--r--spec/lib/api/support/git_access_actor_spec.rb48
-rw-r--r--spec/lib/api/validations/validators/file_path_spec.rb73
-rw-r--r--spec/lib/backup/repository_spec.rb88
-rw-r--r--spec/lib/banzai/filter/absolute_link_filter_spec.rb1
-rw-r--r--spec/lib/banzai/filter/ascii_doc_post_processing_filter_spec.rb6
-rw-r--r--spec/lib/banzai/filter/gollum_tags_filter_spec.rb3
-rw-r--r--spec/lib/banzai/filter/inline_alert_metrics_filter_spec.rb21
-rw-r--r--spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb15
-rw-r--r--spec/lib/banzai/filter/label_reference_filter_spec.rb13
-rw-r--r--spec/lib/banzai/filter/merge_request_reference_filter_spec.rb1
-rw-r--r--spec/lib/banzai/filter/reference_filter_spec.rb88
-rw-r--r--spec/lib/banzai/filter/syntax_highlight_filter_spec.rb8
-rw-r--r--spec/lib/banzai/filter/wiki_link_filter_spec.rb3
-rw-r--r--spec/lib/banzai/issuable_extractor_spec.rb1
-rw-r--r--spec/lib/banzai/object_renderer_spec.rb1
-rw-r--r--spec/lib/banzai/pipeline/full_pipeline_spec.rb1
-rw-r--r--spec/lib/banzai/pipeline/gfm_pipeline_spec.rb28
-rw-r--r--spec/lib/banzai/pipeline/wiki_pipeline_spec.rb2
-rw-r--r--spec/lib/banzai/reference_parser/base_parser_spec.rb99
-rw-r--r--spec/lib/banzai/reference_parser/snippet_parser_spec.rb11
-rw-r--r--spec/lib/container_registry/client_spec.rb1
-rw-r--r--spec/lib/gitlab/alert_management/alert_params_spec.rb2
-rw-r--r--spec/lib/gitlab/alert_management/alert_status_counts_spec.rb8
-rw-r--r--spec/lib/gitlab/alerting/alert_spec.rb4
-rw-r--r--spec/lib/gitlab/analytics/unique_visits_spec.rb41
-rw-r--r--spec/lib/gitlab/app_logger_spec.rb8
-rw-r--r--spec/lib/gitlab/application_rate_limiter_spec.rb1
-rw-r--r--spec/lib/gitlab/asciidoc/include_processor_spec.rb1
-rw-r--r--spec/lib/gitlab/asciidoc_spec.rb45
-rw-r--r--spec/lib/gitlab/audit/null_author_spec.rb22
-rw-r--r--spec/lib/gitlab/audit/unauthenticated_author_spec.rb17
-rw-r--r--spec/lib/gitlab/auth/auth_finders_spec.rb55
-rw-r--r--spec/lib/gitlab/auth/ldap/user_spec.rb3
-rw-r--r--spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb1
-rw-r--r--spec/lib/gitlab/auth/o_auth/user_spec.rb195
-rw-r--r--spec/lib/gitlab/auth/request_authenticator_spec.rb1
-rw-r--r--spec/lib/gitlab/auth/saml/user_spec.rb2
-rw-r--r--spec/lib/gitlab/auth_spec.rb13
-rw-r--r--spec/lib/gitlab/background_migration/archive_legacy_traces_spec.rb61
-rw-r--r--spec/lib/gitlab/background_migration/backfill_hashed_project_repositories_spec.rb7
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_fullpath_in_repo_config_spec.rb88
-rw-r--r--spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb1
-rw-r--r--spec/lib/gitlab/background_migration/copy_merge_request_target_project_to_merge_request_metrics_spec.rb39
-rw-r--r--spec/lib/gitlab/background_migration/fix_cross_project_label_links_spec.rb111
-rw-r--r--spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb1
-rw-r--r--spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb84
-rw-r--r--spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/migrate_stage_index_spec.rb35
-rw-r--r--spec/lib/gitlab/background_migration/populate_cluster_kubernetes_namespace_table_spec.rb94
-rw-r--r--spec/lib/gitlab/background_migration/populate_personal_snippet_statistics_spec.rb141
-rw-r--r--spec/lib/gitlab/background_migration/populate_untracked_uploads_dependencies/untracked_file_spec.rb263
-rw-r--r--spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb254
-rw-r--r--spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb159
-rw-r--r--spec/lib/gitlab/background_migration/remove_restricted_todos_spec.rb126
-rw-r--r--spec/lib/gitlab/background_migration/set_confidential_note_events_on_services_spec.rb33
-rw-r--r--spec/lib/gitlab/background_migration/set_confidential_note_events_on_webhooks_spec.rb33
-rw-r--r--spec/lib/gitlab/background_migration/set_merge_request_diff_files_count_spec.rb40
-rw-r--r--spec/lib/gitlab/background_migration/set_null_external_diff_store_to_local_value_spec.rb33
-rw-r--r--spec/lib/gitlab/background_migration/set_null_package_files_file_store_to_local_value_spec.rb33
-rw-r--r--spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb16
-rw-r--r--spec/lib/gitlab/background_migration_spec.rb1
-rw-r--r--spec/lib/gitlab/badge/coverage/report_spec.rb2
-rw-r--r--spec/lib/gitlab/bitbucket_import/importer_spec.rb2
-rw-r--r--spec/lib/gitlab/build_access_spec.rb2
-rw-r--r--spec/lib/gitlab/checks/change_access_spec.rb16
-rw-r--r--spec/lib/gitlab/ci/ansi2html_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/build/artifacts/expire_in_parser_spec.rb55
-rw-r--r--spec/lib/gitlab/ci/build/auto_retry_spec.rb127
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb88
-rw-r--r--spec/lib/gitlab/ci/config/entry/processable_spec.rb86
-rw-r--r--spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb188
-rw-r--r--spec/lib/gitlab/ci/config/entry/product/parallel_spec.rb94
-rw-r--r--spec/lib/gitlab/ci/config/entry/product/variables_spec.rb88
-rw-r--r--spec/lib/gitlab/ci/config/entry/service_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/external/file/local_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/external/processor_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/normalizer/factory_spec.rb13
-rw-r--r--spec/lib/gitlab/ci/config/normalizer/matrix_strategy_spec.rb102
-rw-r--r--spec/lib/gitlab/ci/config/normalizer/number_strategy_spec.rb68
-rw-r--r--spec/lib/gitlab/ci/config/normalizer_spec.rb211
-rw-r--r--spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb35
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb28
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/lexeme/and_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/lexeme/equals_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/lexeme/matches_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/lexeme/not_equals_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/lexeme/not_matches_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/lexeme/or_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb29
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/parser_spec.rb117
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb56
-rw-r--r--spec/lib/gitlab/ci/reports/accessibility_reports_comparer_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/reports/test_report_summary_spec.rb64
-rw-r--r--spec/lib/gitlab/ci/reports/test_suite_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/reports/test_suite_summary_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/runner_instructions_spec.rb217
-rw-r--r--spec/lib/gitlab/ci/status/composite_spec.rb83
-rw-r--r--spec/lib/gitlab/ci/trace/stream_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb179
-rw-r--r--spec/lib/gitlab/cleanup/remote_uploads_spec.rb1
-rw-r--r--spec/lib/gitlab/cross_project_access/class_methods_spec.rb1
-rw-r--r--spec/lib/gitlab/danger/changelog_spec.rb18
-rw-r--r--spec/lib/gitlab/danger/commit_linter_spec.rb1
-rw-r--r--spec/lib/gitlab/danger/helper_spec.rb40
-rw-r--r--spec/lib/gitlab/danger/roulette_spec.rb233
-rw-r--r--spec/lib/gitlab/danger/teammate_spec.rb53
-rw-r--r--spec/lib/gitlab/database/batch_count_spec.rb140
-rw-r--r--spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb1
-rw-r--r--spec/lib/gitlab/database/custom_structure_spec.rb1
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb2
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb1
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb110
-rw-r--r--spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb35
-rw-r--r--spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb1
-rw-r--r--spec/lib/gitlab/database/postgresql_adapter/schema_versions_copy_mixin_spec.rb42
-rw-r--r--spec/lib/gitlab/database/schema_version_files_spec.rb95
-rw-r--r--spec/lib/gitlab/database/similarity_score_spec.rb93
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_spec.rb38
-rw-r--r--spec/lib/gitlab/diff/file_collection/commit_spec.rb1
-rw-r--r--spec/lib/gitlab/diff/file_collection/compare_spec.rb1
-rw-r--r--spec/lib/gitlab/diff/file_collection/merge_request_diff_spec.rb1
-rw-r--r--spec/lib/gitlab/diff/file_spec.rb2
-rw-r--r--spec/lib/gitlab/diff/highlight_cache_spec.rb55
-rw-r--r--spec/lib/gitlab/diff/position_collection_spec.rb2
-rw-r--r--spec/lib/gitlab/diff/stats_cache_spec.rb29
-rw-r--r--spec/lib/gitlab/diff/suggestion_spec.rb2
-rw-r--r--spec/lib/gitlab/email/message/repository_push_spec.rb2
-rw-r--r--spec/lib/gitlab/encoding_helper_spec.rb1
-rw-r--r--spec/lib/gitlab/experimentation_spec.rb62
-rw-r--r--spec/lib/gitlab/external_authorization/client_spec.rb24
-rw-r--r--spec/lib/gitlab/external_authorization/response_spec.rb20
-rw-r--r--spec/lib/gitlab/file_finder_spec.rb42
-rw-r--r--spec/lib/gitlab/fogbugz_import/importer_spec.rb1
-rw-r--r--spec/lib/gitlab/gfm/reference_rewriter_spec.rb14
-rw-r--r--spec/lib/gitlab/git/branch_spec.rb1
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb1
-rw-r--r--spec/lib/gitlab/git/conflict/parser_spec.rb3
-rw-r--r--spec/lib/gitlab/git/diff_collection_spec.rb2
-rw-r--r--spec/lib/gitlab/git/diff_spec.rb2
-rw-r--r--spec/lib/gitlab/git/patches/collection_spec.rb1
-rw-r--r--spec/lib/gitlab/git/patches/commit_patches_spec.rb1
-rw-r--r--spec/lib/gitlab/git/patches/patch_spec.rb1
-rw-r--r--spec/lib/gitlab/git/pre_receive_error_spec.rb16
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb3
-rw-r--r--spec/lib/gitlab/git_access_project_spec.rb92
-rw-r--r--spec/lib/gitlab/git_access_spec.rb57
-rw-r--r--spec/lib/gitlab/git_access_wiki_spec.rb61
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb1
-rw-r--r--spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb1
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb8
-rw-r--r--spec/lib/gitlab/gitaly_client/wiki_service_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/client_spec.rb2
-rw-r--r--spec/lib/gitlab/gitlab_import/project_creator_spec.rb1
-rw-r--r--spec/lib/gitlab/google_code_import/importer_spec.rb1
-rw-r--r--spec/lib/gitlab/google_code_import/project_creator_spec.rb1
-rw-r--r--spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb1
-rw-r--r--spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb2
-rw-r--r--spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb1
-rw-r--r--spec/lib/gitlab/graphql/find_argument_in_parent_spec.rb1
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb4
-rw-r--r--spec/lib/gitlab/hashed_path_spec.rb28
-rw-r--r--spec/lib/gitlab/hook_data/issuable_builder_spec.rb1
-rw-r--r--spec/lib/gitlab/http_spec.rb85
-rw-r--r--spec/lib/gitlab/i18n/po_linter_spec.rb74
-rw-r--r--spec/lib/gitlab/i18n/translation_entry_spec.rb166
-rw-r--r--spec/lib/gitlab/i18n_spec.rb8
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml12
-rw-r--r--spec/lib/gitlab/import_export/base/object_builder_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/group/tree_restorer_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/project/tree_saver_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml8
-rw-r--r--spec/lib/gitlab/incident_management/pager_duty/incident_issue_description_spec.rb2
-rw-r--r--spec/lib/gitlab/incoming_email_spec.rb10
-rw-r--r--spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb4
-rw-r--r--spec/lib/gitlab/issuable_sorter_spec.rb1
-rw-r--r--spec/lib/gitlab/issuables_count_for_state_spec.rb15
-rw-r--r--spec/lib/gitlab/jira_import/issue_serializer_spec.rb1
-rw-r--r--spec/lib/gitlab/jira_import/metadata_collector_spec.rb2
-rw-r--r--spec/lib/gitlab/job_waiter_spec.rb6
-rw-r--r--spec/lib/gitlab/json_spec.rb32
-rw-r--r--spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb217
-rw-r--r--spec/lib/gitlab/kubernetes/helm/api_spec.rb5
-rw-r--r--spec/lib/gitlab/kubernetes/helm/base_command_spec.rb3
-rw-r--r--spec/lib/gitlab/kubernetes/helm/delete_command_spec.rb47
-rw-r--r--spec/lib/gitlab/kubernetes/helm/init_command_spec.rb2
-rw-r--r--spec/lib/gitlab/kubernetes/helm/install_command_spec.rb44
-rw-r--r--spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb40
-rw-r--r--spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb2
-rw-r--r--spec/lib/gitlab/kubernetes/kube_client_spec.rb54
-rw-r--r--spec/lib/gitlab/kubernetes/network_policy_spec.rb238
-rw-r--r--spec/lib/gitlab/kubernetes/node_spec.rb52
-rw-r--r--spec/lib/gitlab/language_detection_spec.rb1
-rw-r--r--spec/lib/gitlab/legacy_github_import/importer_spec.rb1
-rw-r--r--spec/lib/gitlab/legacy_github_import/issuable_formatter_spec.rb1
-rw-r--r--spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb1
-rw-r--r--spec/lib/gitlab/lograge/custom_options_spec.rb1
-rw-r--r--spec/lib/gitlab/manifest_import/project_creator_spec.rb3
-rw-r--r--spec/lib/gitlab/markdown_cache/redis/store_spec.rb1
-rw-r--r--spec/lib/gitlab/metrics/background_transaction_spec.rb34
-rw-r--r--spec/lib/gitlab/metrics/dashboard/cache_spec.rb86
-rw-r--r--spec/lib/gitlab/metrics/dashboard/defaults_spec.rb1
-rw-r--r--spec/lib/gitlab/metrics/dashboard/finder_spec.rb45
-rw-r--r--spec/lib/gitlab/metrics/dashboard/processor_spec.rb25
-rw-r--r--spec/lib/gitlab/metrics/dashboard/repo_dashboard_finder_spec.rb54
-rw-r--r--spec/lib/gitlab/metrics/dashboard/stages/metric_endpoint_inserter_spec.rb59
-rw-r--r--spec/lib/gitlab/metrics/dashboard/stages/track_panel_type_spec.rb27
-rw-r--r--spec/lib/gitlab/metrics/dashboard/url_spec.rb28
-rw-r--r--spec/lib/gitlab/metrics/dashboard/validator/client_spec.rb29
-rw-r--r--spec/lib/gitlab/metrics/dashboard/validator/custom_formats_spec.rb15
-rw-r--r--spec/lib/gitlab/metrics/dashboard/validator/errors_spec.rb140
-rw-r--r--spec/lib/gitlab/metrics/dashboard/validator/post_schema_validator_spec.rb78
-rw-r--r--spec/lib/gitlab/metrics/dashboard/validator_spec.rb146
-rw-r--r--spec/lib/gitlab/metrics/elasticsearch_rack_middleware_spec.rb34
-rw-r--r--spec/lib/gitlab/metrics/method_call_spec.rb27
-rw-r--r--spec/lib/gitlab/metrics/methods_spec.rb10
-rw-r--r--spec/lib/gitlab/metrics/rack_middleware_spec.rb8
-rw-r--r--spec/lib/gitlab/metrics/redis_rack_middleware_spec.rb61
-rw-r--r--spec/lib/gitlab/metrics/samplers/threads_sampler_spec.rb83
-rw-r--r--spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb6
-rw-r--r--spec/lib/gitlab/metrics/subscribers/action_view_spec.rb6
-rw-r--r--spec/lib/gitlab/metrics/subscribers/active_record_spec.rb17
-rw-r--r--spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb69
-rw-r--r--spec/lib/gitlab/metrics/transaction_spec.rb121
-rw-r--r--spec/lib/gitlab/metrics/web_transaction_spec.rb92
-rw-r--r--spec/lib/gitlab/metrics_spec.rb9
-rw-r--r--spec/lib/gitlab/middleware/rails_queue_duration_spec.rb11
-rw-r--r--spec/lib/gitlab/middleware/read_only_spec.rb13
-rw-r--r--spec/lib/gitlab/pages/settings_spec.rb48
-rw-r--r--spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb106
-rw-r--r--spec/lib/gitlab/phabricator_import/user_finder_spec.rb1
-rw-r--r--spec/lib/gitlab/popen/runner_spec.rb2
-rw-r--r--spec/lib/gitlab/project_search_results_spec.rb1
-rw-r--r--spec/lib/gitlab/prometheus/queries/matched_metric_query_spec.rb1
-rw-r--r--spec/lib/gitlab/prometheus_client_spec.rb1
-rw-r--r--spec/lib/gitlab/redis/hll_spec.rb106
-rw-r--r--spec/lib/gitlab/regex_spec.rb16
-rw-r--r--spec/lib/gitlab/repository_cache_adapter_spec.rb86
-rw-r--r--spec/lib/gitlab/repository_hash_cache_spec.rb18
-rw-r--r--spec/lib/gitlab/repository_set_cache_spec.rb17
-rw-r--r--spec/lib/gitlab/search/query_spec.rb8
-rw-r--r--spec/lib/gitlab/service_desk_email_spec.rb22
-rw-r--r--spec/lib/gitlab/sidekiq_cluster_spec.rb1
-rw-r--r--spec/lib/gitlab/sidekiq_logging/exception_handler_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb7
-rw-r--r--spec/lib/gitlab/sidekiq_middleware_spec.rb39
-rw-r--r--spec/lib/gitlab/sidekiq_versioning/middleware_spec.rb48
-rw-r--r--spec/lib/gitlab/sidekiq_versioning/worker_spec.rb54
-rw-r--r--spec/lib/gitlab/static_site_editor/config_spec.rb22
-rw-r--r--spec/lib/gitlab/template/gitlab_ci_yml_template_spec.rb42
-rw-r--r--spec/lib/gitlab/template/metrics_dashboard_template_spec.rb26
-rw-r--r--spec/lib/gitlab/tree_summary_spec.rb4
-rw-r--r--spec/lib/gitlab/url_blocker_spec.rb1
-rw-r--r--spec/lib/gitlab/url_builder_spec.rb16
-rw-r--r--spec/lib/gitlab/usage_data/topology_spec.rb174
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb113
-rw-r--r--spec/lib/gitlab/usage_data_counters/track_unique_actions_spec.rb63
-rw-r--r--spec/lib/gitlab/usage_data_counters/wiki_page_counter_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb651
-rw-r--r--spec/lib/gitlab/user_access_spec.rb4
-rw-r--r--spec/lib/gitlab/utils/usage_data_spec.rb2
-rw-r--r--spec/lib/gitlab/utils_spec.rb64
-rw-r--r--spec/lib/gitlab/view/presenter/base_spec.rb28
-rw-r--r--spec/lib/gitlab/workhorse_spec.rb18
-rw-r--r--spec/lib/json_web_token/rsa_token_spec.rb1
-rw-r--r--spec/lib/mattermost/session_spec.rb1
-rw-r--r--spec/lib/object_storage/config_spec.rb179
-rw-r--r--spec/lib/object_storage/direct_upload_spec.rb87
-rw-r--r--spec/lib/omni_auth/strategies/jwt_spec.rb2
-rw-r--r--spec/lib/product_analytics/event_params_spec.rb7
-rw-r--r--spec/lib/product_analytics/tracker_spec.rb8
-rw-r--r--spec/lib/rspec_flaky/example_spec.rb1
-rw-r--r--spec/lib/rspec_flaky/flaky_example_spec.rb2
-rw-r--r--spec/lib/rspec_flaky/flaky_examples_collection_spec.rb1
-rw-r--r--spec/lib/rspec_flaky/listener_spec.rb2
-rw-r--r--spec/lib/rspec_flaky/report_spec.rb2
-rw-r--r--spec/lib/sentry/client/event_spec.rb2
-rw-r--r--spec/lib/sentry/client/issue_spec.rb2
-rw-r--r--spec/mailers/emails/profile_spec.rb50
-rw-r--r--spec/mailers/notify_spec.rb1
-rw-r--r--spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb8
-rw-r--r--spec/migrations/20200123155929_remove_invalid_jira_data_spec.rb7
-rw-r--r--spec/migrations/20200127090233_remove_invalid_issue_tracker_data_spec.rb5
-rw-r--r--spec/migrations/20200130145430_reschedule_migrate_issue_trackers_data_spec.rb11
-rw-r--r--spec/migrations/20200728080250_replace_unique_index_on_cycle_analytics_stages_spec.rb47
-rw-r--r--spec/migrations/migrate_all_merge_request_user_mentions_to_db_spec.rb35
-rw-r--r--spec/migrations/migrate_incident_issues_to_incident_type_spec.rb55
-rw-r--r--spec/migrations/migrate_k8s_service_integration_spec.rb1
-rw-r--r--spec/migrations/remove_duplicate_labels_from_project_spec.rb1
-rw-r--r--spec/migrations/remove_orphaned_invited_members_spec.rb2
-rw-r--r--spec/migrations/schedule_populate_personal_snippet_statistics_spec.rb56
-rw-r--r--spec/models/ability_spec.rb1
-rw-r--r--spec/models/alert_management/alert_spec.rb151
-rw-r--r--spec/models/analytics/cycle_analytics/project_stage_spec.rb2
-rw-r--r--spec/models/application_record_spec.rb23
-rw-r--r--spec/models/application_setting_spec.rb6
-rw-r--r--spec/models/audit_event_partitioned_spec.rb41
-rw-r--r--spec/models/audit_event_spec.rb18
-rw-r--r--spec/models/blob_viewer/composer_json_spec.rb1
-rw-r--r--spec/models/blob_viewer/gemspec_spec.rb1
-rw-r--r--spec/models/blob_viewer/go_mod_spec.rb1
-rw-r--r--spec/models/blob_viewer/package_json_spec.rb2
-rw-r--r--spec/models/blob_viewer/podspec_json_spec.rb1
-rw-r--r--spec/models/blob_viewer/podspec_spec.rb1
-rw-r--r--spec/models/blob_viewer/route_map_spec.rb1
-rw-r--r--spec/models/ci/build_spec.rb248
-rw-r--r--spec/models/ci/build_trace_chunk_spec.rb14
-rw-r--r--spec/models/ci/build_trace_chunks/database_spec.rb18
-rw-r--r--spec/models/ci/build_trace_chunks/fog_spec.rb28
-rw-r--r--spec/models/ci/build_trace_chunks/redis_spec.rb98
-rw-r--r--spec/models/ci/daily_build_group_report_result_spec.rb1
-rw-r--r--spec/models/ci/group_spec.rb20
-rw-r--r--spec/models/ci/instance_variable_spec.rb29
-rw-r--r--spec/models/ci/job_artifact_spec.rb76
-rw-r--r--spec/models/ci/pipeline_artifact_spec.rb82
-rw-r--r--spec/models/ci/pipeline_spec.rb311
-rw-r--r--spec/models/ci/ref_spec.rb71
-rw-r--r--spec/models/clusters/agent_spec.rb35
-rw-r--r--spec/models/clusters/agent_token_spec.rb18
-rw-r--r--spec/models/clusters/applications/ingress_spec.rb4
-rw-r--r--spec/models/clusters/cluster_spec.rb83
-rw-r--r--spec/models/clusters/platforms/kubernetes_spec.rb1
-rw-r--r--spec/models/commit_collection_spec.rb29
-rw-r--r--spec/models/commit_status_spec.rb56
-rw-r--r--spec/models/commit_with_pipeline_spec.rb1
-rw-r--r--spec/models/concerns/cache_markdown_field_spec.rb31
-rw-r--r--spec/models/concerns/ci/artifactable_spec.rb21
-rw-r--r--spec/models/concerns/ci/has_status_spec.rb36
-rw-r--r--spec/models/concerns/counter_attribute_spec.rb52
-rw-r--r--spec/models/concerns/featurable_spec.rb1
-rw-r--r--spec/models/concerns/issuable_spec.rb2
-rw-r--r--spec/models/concerns/manual_inverse_association_spec.rb2
-rw-r--r--spec/models/concerns/milestoneable_spec.rb18
-rw-r--r--spec/models/concerns/sha_attribute_spec.rb8
-rw-r--r--spec/models/custom_emoji_spec.rb1
-rw-r--r--spec/models/design_management/design_at_version_spec.rb9
-rw-r--r--spec/models/design_management/design_collection_spec.rb9
-rw-r--r--spec/models/design_management/design_spec.rb134
-rw-r--r--spec/models/environment_spec.rb24
-rw-r--r--spec/models/event_spec.rb39
-rw-r--r--spec/models/experiment_spec.rb112
-rw-r--r--spec/models/experiment_user_spec.rb14
-rw-r--r--spec/models/group_deploy_key_spec.rb74
-rw-r--r--spec/models/group_deploy_keys_group_spec.rb15
-rw-r--r--spec/models/group_spec.rb45
-rw-r--r--spec/models/issue_spec.rb84
-rw-r--r--spec/models/iteration_spec.rb100
-rw-r--r--spec/models/lfs_object_spec.rb16
-rw-r--r--spec/models/member_spec.rb23
-rw-r--r--spec/models/merge_request/metrics_spec.rb40
-rw-r--r--spec/models/merge_request_diff_commit_spec.rb2
-rw-r--r--spec/models/merge_request_diff_spec.rb24
-rw-r--r--spec/models/merge_request_spec.rb32
-rw-r--r--spec/models/namespace_spec.rb1
-rw-r--r--spec/models/note_spec.rb32
-rw-r--r--spec/models/notification_setting_spec.rb43
-rw-r--r--spec/models/packages/package_file_spec.rb27
-rw-r--r--spec/models/pages_domain_spec.rb27
-rw-r--r--spec/models/personal_access_token_spec.rb12
-rw-r--r--spec/models/personal_snippet_spec.rb12
-rw-r--r--spec/models/plan_limits_spec.rb4
-rw-r--r--spec/models/product_analytics_event_spec.rb14
-rw-r--r--spec/models/project_repository_storage_move_spec.rb4
-rw-r--r--spec/models/project_services/buildkite_service_spec.rb20
-rw-r--r--spec/models/project_services/gitlab_issue_tracker_service_spec.rb54
-rw-r--r--spec/models/project_services/jira_service_spec.rb2
-rw-r--r--spec/models/project_services/jira_tracker_data_spec.rb4
-rw-r--r--spec/models/project_services/microsoft_teams_service_spec.rb1
-rw-r--r--spec/models/project_snippet_spec.rb4
-rw-r--r--spec/models/project_spec.rb217
-rw-r--r--spec/models/project_statistics_spec.rb4
-rw-r--r--spec/models/prometheus_alert_spec.rb37
-rw-r--r--spec/models/prometheus_metric_spec.rb4
-rw-r--r--spec/models/raw_usage_data_spec.rb44
-rw-r--r--spec/models/release_spec.rb16
-rw-r--r--spec/models/repository_spec.rb48
-rw-r--r--spec/models/resource_iteration_event_spec.rb17
-rw-r--r--spec/models/resource_milestone_event_spec.rb69
-rw-r--r--spec/models/service_spec.rb48
-rw-r--r--spec/models/snippet_repository_spec.rb1
-rw-r--r--spec/models/suggestion_spec.rb52
-rw-r--r--spec/models/terraform/state_spec.rb8
-rw-r--r--spec/models/user_spec.rb35
-rw-r--r--spec/models/wiki_page_spec.rb118
-rw-r--r--spec/models/wiki_spec.rb14
-rw-r--r--spec/policies/ci/build_policy_spec.rb32
-rw-r--r--spec/policies/ci/pipeline_policy_spec.rb2
-rw-r--r--spec/policies/ci/pipeline_schedule_policy_spec.rb6
-rw-r--r--spec/policies/concerns/crud_policy_helpers_spec.rb50
-rw-r--r--spec/policies/concerns/readonly_abilities_spec.rb27
-rw-r--r--spec/policies/design_management/design_policy_spec.rb53
-rw-r--r--spec/policies/group_deploy_key_policy_spec.rb27
-rw-r--r--spec/policies/group_deploy_keys_group_policy_spec.rb24
-rw-r--r--spec/policies/group_policy_spec.rb20
-rw-r--r--spec/policies/issue_policy_spec.rb6
-rw-r--r--spec/policies/merge_request_policy_spec.rb6
-rw-r--r--spec/policies/personal_access_token_policy_spec.rb63
-rw-r--r--spec/policies/project_policy_spec.rb16
-rw-r--r--spec/policies/user_policy_spec.rb28
-rw-r--r--spec/presenters/alert_management/alert_presenter_spec.rb25
-rw-r--r--spec/presenters/alert_management/prometheus_alert_presenter_spec.rb23
-rw-r--r--spec/presenters/blob_presenter_spec.rb11
-rw-r--r--spec/presenters/clusters/cluster_presenter_spec.rb2
-rw-r--r--spec/presenters/commit_presenter_spec.rb4
-rw-r--r--spec/presenters/event_presenter_spec.rb2
-rw-r--r--spec/presenters/packages/detail/package_presenter_spec.rb45
-rw-r--r--spec/presenters/project_presenter_spec.rb2
-rw-r--r--spec/presenters/projects/prometheus/alert_presenter_spec.rb13
-rw-r--r--spec/presenters/prometheus_alert_presenter_spec.rb32
-rw-r--r--spec/presenters/snippet_blob_presenter_spec.rb77
-rw-r--r--spec/presenters/snippet_presenter_spec.rb8
-rw-r--r--spec/presenters/tree_entry_presenter_spec.rb4
-rw-r--r--spec/presenters/user_presenter_spec.rb16
-rw-r--r--spec/requests/api/admin/ci/variables_spec.rb9
-rw-r--r--spec/requests/api/branches_spec.rb82
-rw-r--r--spec/requests/api/ci/pipelines_spec.rb69
-rw-r--r--spec/requests/api/ci/runner/jobs_artifacts_spec.rb901
-rw-r--r--spec/requests/api/ci/runner/jobs_put_spec.rb196
-rw-r--r--spec/requests/api/ci/runner/jobs_request_post_spec.rb861
-rw-r--r--spec/requests/api/ci/runner/jobs_trace_spec.rb292
-rw-r--r--spec/requests/api/ci/runner/runners_delete_spec.rb54
-rw-r--r--spec/requests/api/ci/runner/runners_post_spec.rb250
-rw-r--r--spec/requests/api/ci/runner/runners_verify_post_spec.rb48
-rw-r--r--spec/requests/api/ci/runner_spec.rb2474
-rw-r--r--spec/requests/api/commits_spec.rb46
-rw-r--r--spec/requests/api/composer_packages_spec.rb375
-rw-r--r--spec/requests/api/conan_packages_spec.rb83
-rw-r--r--spec/requests/api/deploy_keys_spec.rb1
-rw-r--r--spec/requests/api/files_spec.rb50
-rw-r--r--spec/requests/api/go_proxy_spec.rb4
-rw-r--r--spec/requests/api/graphql/boards/board_list_issues_query_spec.rb101
-rw-r--r--spec/requests/api/graphql/boards/board_lists_query_spec.rb14
-rw-r--r--spec/requests/api/graphql/ci/groups_spec.rb55
-rw-r--r--spec/requests/api/graphql/ci/jobs_spec.rb93
-rw-r--r--spec/requests/api/graphql/ci/stages_spec.rb46
-rw-r--r--spec/requests/api/graphql/issue_status_counts_spec.rb58
-rw-r--r--spec/requests/api/graphql/metrics/dashboard/annotations_spec.rb22
-rw-r--r--spec/requests/api/graphql/milestone_spec.rb47
-rw-r--r--spec/requests/api/graphql/mutations/boards/issues/issue_move_list_spec.rb109
-rw-r--r--spec/requests/api/graphql/mutations/boards/lists/update_spec.rb57
-rw-r--r--spec/requests/api/graphql/mutations/container_expiration_policy/update_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/design_management/move_spec.rb122
-rw-r--r--spec/requests/api/graphql/mutations/discussions/toggle_resolve_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/issues/set_due_date_spec.rb6
-rw-r--r--spec/requests/api/graphql/mutations/issues/set_subscription_spec.rb12
-rw-r--r--spec/requests/api/graphql/mutations/issues/update_spec.rb41
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/create_spec.rb1
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_subscription_spec.rb57
-rw-r--r--spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/notes/update/note_spec.rb39
-rw-r--r--spec/requests/api/graphql/mutations/snippets/create_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/snippets/update_spec.rb3
-rw-r--r--spec/requests/api/graphql/project/alert_management/alert/todos_spec.rb51
-rw-r--r--spec/requests/api/graphql/project/alert_management/alert_status_counts_spec.rb6
-rw-r--r--spec/requests/api/graphql/project/alert_management/alerts_spec.rb15
-rw-r--r--spec/requests/api/graphql/project/container_expiration_policy_spec.rb1
-rw-r--r--spec/requests/api/graphql/project/issue/designs/designs_spec.rb3
-rw-r--r--spec/requests/api/graphql/project/issues_spec.rb1
-rw-r--r--spec/requests/api/graphql/project/jira_import_spec.rb5
-rw-r--r--spec/requests/api/graphql/project/jira_projects_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/merge_requests_spec.rb39
-rw-r--r--spec/requests/api/graphql/project/packages_spec.rb50
-rw-r--r--spec/requests/api/graphql/project/pipeline_spec.rb6
-rw-r--r--spec/requests/api/graphql/project/repository_spec.rb1
-rw-r--r--spec/requests/api/graphql/project/tree/tree_spec.rb1
-rw-r--r--spec/requests/api/graphql/project_query_spec.rb14
-rw-r--r--spec/requests/api/graphql/user_query_spec.rb6
-rw-r--r--spec/requests/api/group_import_spec.rb2
-rw-r--r--spec/requests/api/group_milestones_spec.rb68
-rw-r--r--spec/requests/api/group_packages_spec.rb182
-rw-r--r--spec/requests/api/group_variables_spec.rb8
-rw-r--r--spec/requests/api/groups_spec.rb61
-rw-r--r--spec/requests/api/helpers_spec.rb1
-rw-r--r--spec/requests/api/import_bitbucket_server_spec.rb2
-rw-r--r--spec/requests/api/import_github_spec.rb2
-rw-r--r--spec/requests/api/internal/base_spec.rb139
-rw-r--r--spec/requests/api/internal/kubernetes_spec.rb154
-rw-r--r--spec/requests/api/internal/pages_spec.rb1
-rw-r--r--spec/requests/api/issues/get_group_issues_spec.rb6
-rw-r--r--spec/requests/api/issues/get_project_issues_spec.rb9
-rw-r--r--spec/requests/api/issues/issues_spec.rb58
-rw-r--r--spec/requests/api/issues/post_projects_issues_spec.rb3
-rw-r--r--spec/requests/api/jobs_spec.rb12
-rw-r--r--spec/requests/api/merge_requests_spec.rb10
-rw-r--r--spec/requests/api/notes_spec.rb1
-rw-r--r--spec/requests/api/notification_settings_spec.rb3
-rw-r--r--spec/requests/api/npm_packages_spec.rb151
-rw-r--r--spec/requests/api/nuget_packages_spec.rb107
-rw-r--r--spec/requests/api/pages_domains_spec.rb1
-rw-r--r--spec/requests/api/performance_bar_spec.rb41
-rw-r--r--spec/requests/api/project_export_spec.rb16
-rw-r--r--spec/requests/api/project_hooks_spec.rb10
-rw-r--r--spec/requests/api/project_milestones_spec.rb70
-rw-r--r--spec/requests/api/project_snippets_spec.rb80
-rw-r--r--spec/requests/api/project_templates_spec.rb73
-rw-r--r--spec/requests/api/projects_spec.rb33
-rw-r--r--spec/requests/api/pypi_packages_spec.rb313
-rw-r--r--spec/requests/api/releases_spec.rb26
-rw-r--r--spec/requests/api/snippets_spec.rb47
-rw-r--r--spec/requests/api/suggestions_spec.rb4
-rw-r--r--spec/requests/api/users_spec.rb2
-rw-r--r--spec/requests/git_http_spec.rb4
-rw-r--r--spec/requests/groups/milestones_controller_spec.rb1
-rw-r--r--spec/requests/lfs_http_spec.rb19
-rw-r--r--spec/requests/product_analytics/collector_app_spec.rb20
-rw-r--r--spec/requests/projects/incident_management/pagerduty_incidents_spec.rb2
-rw-r--r--spec/requests/projects/metrics/dashboards/builder_spec.rb106
-rw-r--r--spec/requests/projects/metrics_dashboard_spec.rb38
-rw-r--r--spec/requests/rack_attack_global_spec.rb1
-rw-r--r--spec/requests/search_controller_spec.rb66
-rw-r--r--spec/routing/notifications_routing_spec.rb5
-rw-r--r--spec/routing/project_routing_spec.rb82
-rw-r--r--spec/routing/routing_spec.rb24
-rw-r--r--spec/rubocop/cop/avoid_becomes_spec.rb34
-rw-r--r--spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb7
-rw-r--r--spec/rubocop/cop/avoid_return_from_blocks_spec.rb7
-rw-r--r--spec/rubocop/cop/gitlab/finder_with_find_by_spec.rb1
-rw-r--r--spec/rubocop/cop/graphql/json_type_spec.rb79
-rw-r--r--spec/rubocop/cop/migration/drop_table_spec.rb7
-rw-r--r--spec/rubocop/cop/put_group_routes_under_scope_spec.rb17
-rw-r--r--spec/rubocop/cop/put_project_routes_under_scope_spec.rb17
-rw-r--r--spec/rubocop/cop/rspec/any_instance_of_spec.rb2
-rw-r--r--spec/rubocop/cop/usage_data/distinct_count_by_large_foreign_key_spec.rb38
-rw-r--r--spec/rubocop/cop/usage_data/large_table_spec.rb90
-rw-r--r--spec/serializers/accessibility_reports_comparer_entity_spec.rb1
-rw-r--r--spec/serializers/accessibility_reports_comparer_serializer_spec.rb1
-rw-r--r--spec/serializers/analytics_issue_entity_spec.rb25
-rw-r--r--spec/serializers/build_details_entity_spec.rb28
-rw-r--r--spec/serializers/ci/daily_build_group_report_result_serializer_spec.rb1
-rw-r--r--spec/serializers/cluster_error_entity_spec.rb35
-rw-r--r--spec/serializers/cluster_serializer_spec.rb1
-rw-r--r--spec/serializers/commit_entity_spec.rb1
-rw-r--r--spec/serializers/diff_file_base_entity_spec.rb1
-rw-r--r--spec/serializers/diffs_metadata_entity_spec.rb1
-rw-r--r--spec/serializers/environment_entity_spec.rb28
-rw-r--r--spec/serializers/environment_serializer_spec.rb1
-rw-r--r--spec/serializers/group_deploy_key_entity_spec.rb47
-rw-r--r--spec/serializers/import/bitbucket_provider_repo_entity_spec.rb1
-rw-r--r--spec/serializers/import/bitbucket_server_provider_repo_entity_spec.rb7
-rw-r--r--spec/serializers/import/fogbugz_provider_repo_entity_spec.rb1
-rw-r--r--spec/serializers/import/manifest_provider_repo_entity_spec.rb28
-rw-r--r--spec/serializers/merge_request_poll_widget_entity_spec.rb41
-rw-r--r--spec/serializers/merge_request_widget_entity_spec.rb78
-rw-r--r--spec/serializers/merge_requests/pipeline_entity_spec.rb45
-rw-r--r--spec/serializers/paginated_diff_entity_spec.rb1
-rw-r--r--spec/serializers/pipeline_entity_spec.rb21
-rw-r--r--spec/serializers/pipeline_serializer_spec.rb30
-rw-r--r--spec/serializers/prometheus_alert_entity_spec.rb2
-rw-r--r--spec/serializers/release_serializer_spec.rb32
-rw-r--r--spec/serializers/suggestion_entity_spec.rb82
-rw-r--r--spec/serializers/test_report_summary_entity_spec.rb8
-rw-r--r--spec/serializers/test_suite_summary_entity_spec.rb2
-rw-r--r--spec/services/admin/propagate_integration_service_spec.rb2
-rw-r--r--spec/services/alert_management/alerts/update_service_spec.rb3
-rw-r--r--spec/services/alert_management/create_alert_issue_service_spec.rb2
-rw-r--r--spec/services/award_emojis/copy_service_spec.rb34
-rw-r--r--spec/services/boards/lists/create_service_spec.rb15
-rw-r--r--spec/services/boards/lists/list_service_spec.rb6
-rw-r--r--spec/services/branches/create_service_spec.rb20
-rw-r--r--spec/services/ci/build_report_result_service_spec.rb10
-rw-r--r--spec/services/ci/change_variable_service_spec.rb68
-rw-r--r--spec/services/ci/change_variables_service_spec.rb21
-rw-r--r--spec/services/ci/create_job_artifacts_service_spec.rb19
-rw-r--r--spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb8
-rw-r--r--spec/services/ci/create_pipeline_service/dry_run_spec.rb119
-rw-r--r--spec/services/ci/create_pipeline_service/parameter_content_spec.rb9
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb30
-rw-r--r--spec/services/ci/daily_build_group_report_result_service_spec.rb3
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb10
-rw-r--r--spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb35
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service.rb3
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb2
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_needs_one_build_and_test.yml (renamed from spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds.yml)0
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_needs_one_build_and_test_when_always.yml (renamed from spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_always.yml)0
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_fails_with_allow_failure.yml (renamed from spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_allow_failure.yml)0
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_on_failure_deploy_needs_test.yml (renamed from spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_failure.yml)0
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_fails_with_allow_failure_test_on_failure.yml (renamed from spec/services/ci/pipeline_processing/test_cases/dag_build_allow_failure_test_on_failure.yml)0
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_succeds_test_manual_allow_failure_true_deploy_needs_both.yml41
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_succeds_test_manual_allow_failure_true_deploy_needs_test.yml41
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_succeeds_test_on_failure_deploy_needs_test.yml (renamed from spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_success.yml)0
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_test_manual_review_deploy.yml82
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_needs_one_build_and_test.yml (renamed from spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure.yml)0
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_needs_one_build_and_test_when_always.yml (renamed from spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_always.yml)0
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_deploy_needs_empty.yml27
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_fails_with_allow_failure.yml (renamed from spec/services/ci/pipeline_processing/test_cases/dag_test_allow_failure_true.yml)0
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_deploy_always.yml45
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_deploy_on_failure.yml45
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_other_test_succeeds_deploy_needs_both.yml40
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_other_test_succeeds_deploy_needs_both.yml (renamed from spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_other_test_succeeds.yml)0
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_build_fails_test_on_failure.yml (renamed from spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_failure.yml)0
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_build_fails_with_allow_failure_test_on_failure.yml (renamed from spec/services/ci/pipeline_processing/test_cases/stage_build_allow_failure_test_on_failure.yml)0
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_build_succeeds_test_manual_allow_failure_true.yml (renamed from spec/services/ci/pipeline_processing/test_cases/stage_build_fails_test_allow_failure.yml)17
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_build_succeeds_test_on_failure.yml (renamed from spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_success.yml)0
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_build_test_manual_review_deploy.yml79
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true_deploy_always.yml23
-rw-r--r--spec/services/ci/retry_pipeline_service_spec.rb2
-rw-r--r--spec/services/clusters/aws/authorize_role_service_spec.rb6
-rw-r--r--spec/services/clusters/cleanup/project_namespace_service_spec.rb1
-rw-r--r--spec/services/clusters/cleanup/service_account_service_spec.rb1
-rw-r--r--spec/services/clusters/parse_cluster_applications_artifact_service_spec.rb109
-rw-r--r--spec/services/commits/commit_patch_service_spec.rb1
-rw-r--r--spec/services/design_management/move_designs_service_spec.rb147
-rw-r--r--spec/services/design_management/save_designs_service_spec.rb171
-rw-r--r--spec/services/discussions/capture_diff_note_positions_service_spec.rb28
-rw-r--r--spec/services/event_create_service_spec.rb77
-rw-r--r--spec/services/git/base_hooks_service_spec.rb3
-rw-r--r--spec/services/git/process_ref_changes_service_spec.rb14
-rw-r--r--spec/services/git/wiki_push_service_spec.rb2
-rw-r--r--spec/services/groups/group_links/create_service_spec.rb1
-rw-r--r--spec/services/groups/transfer_service_spec.rb69
-rw-r--r--spec/services/groups/update_service_spec.rb48
-rw-r--r--spec/services/import/github_service_spec.rb69
-rw-r--r--spec/services/incident_management/create_incident_label_service_spec.rb10
-rw-r--r--spec/services/incident_management/create_issue_service_spec.rb239
-rw-r--r--spec/services/incident_management/incidents/create_service_spec.rb69
-rw-r--r--spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb99
-rw-r--r--spec/services/incident_management/pager_duty/process_webhook_service_spec.rb127
-rw-r--r--spec/services/issuable/clone/content_rewriter_spec.rb182
-rw-r--r--spec/services/issues/build_service_spec.rb14
-rw-r--r--spec/services/issues/close_service_spec.rb35
-rw-r--r--spec/services/issues/move_service_spec.rb66
-rw-r--r--spec/services/issues/update_service_spec.rb32
-rw-r--r--spec/services/jira/requests/projects/list_service_spec.rb18
-rw-r--r--spec/services/jira_import/cloud_users_mapper_service_spec.rb19
-rw-r--r--spec/services/jira_import/server_users_mapper_service_spec.rb19
-rw-r--r--spec/services/jira_import/users_importer_spec.rb117
-rw-r--r--spec/services/jira_import/users_mapper_spec.rb43
-rw-r--r--spec/services/labels/available_labels_service_spec.rb30
-rw-r--r--spec/services/markdown_content_rewriter_service_spec.rb56
-rw-r--r--spec/services/merge_requests/conflicts/resolve_service_spec.rb1
-rw-r--r--spec/services/merge_requests/create_service_spec.rb1
-rw-r--r--spec/services/merge_requests/ff_merge_service_spec.rb10
-rw-r--r--spec/services/merge_requests/push_options_handler_service_spec.rb33
-rw-r--r--spec/services/merge_requests/pushed_branches_service_spec.rb17
-rw-r--r--spec/services/merge_requests/rebase_service_spec.rb1
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb3
-rw-r--r--spec/services/merge_requests/squash_service_spec.rb1
-rw-r--r--spec/services/metrics/dashboard/clone_dashboard_service_spec.rb6
-rw-r--r--spec/services/metrics/dashboard/cluster_metrics_embed_service_spec.rb1
-rw-r--r--spec/services/metrics/dashboard/custom_dashboard_service_spec.rb20
-rw-r--r--spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb3
-rw-r--r--spec/services/metrics/dashboard/dynamic_embed_service_spec.rb2
-rw-r--r--spec/services/metrics/dashboard/gitlab_alert_embed_service_spec.rb19
-rw-r--r--spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb41
-rw-r--r--spec/services/metrics/dashboard/panel_preview_service_spec.rb83
-rw-r--r--spec/services/metrics/dashboard/pod_dashboard_service_spec.rb16
-rw-r--r--spec/services/metrics/dashboard/self_monitoring_dashboard_service_spec.rb2
-rw-r--r--spec/services/notes/copy_service_spec.rb157
-rw-r--r--spec/services/notes/create_service_spec.rb9
-rw-r--r--spec/services/notes/quick_actions_service_spec.rb27
-rw-r--r--spec/services/notes/update_service_spec.rb50
-rw-r--r--spec/services/notification_service_spec.rb88
-rw-r--r--spec/services/packages/create_dependency_service_spec.rb1
-rw-r--r--spec/services/packages/maven/find_or_create_package_service_spec.rb85
-rw-r--r--spec/services/packages/npm/create_package_service_spec.rb1
-rw-r--r--spec/services/packages/pypi/create_package_service_spec.rb13
-rw-r--r--spec/services/personal_access_tokens/revoke_service_spec.rb44
-rw-r--r--spec/services/pod_logs/elasticsearch_service_spec.rb1
-rw-r--r--spec/services/preview_markdown_service_spec.rb5
-rw-r--r--spec/services/product_analytics/build_graph_service_spec.rb27
-rw-r--r--spec/services/projects/alerting/notify_service_spec.rb18
-rw-r--r--spec/services/projects/cleanup_service_spec.rb2
-rw-r--r--spec/services/projects/container_repository/delete_tags_service_spec.rb293
-rw-r--r--spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb56
-rw-r--r--spec/services/projects/container_repository/third_party/delete_tags_service_spec.rb89
-rw-r--r--spec/services/projects/create_service_spec.rb6
-rw-r--r--spec/services/projects/fork_service_spec.rb69
-rw-r--r--spec/services/projects/operations/update_service_spec.rb1
-rw-r--r--spec/services/projects/prometheus/alerts/notify_service_spec.rb56
-rw-r--r--spec/services/projects/propagate_service_template_spec.rb4
-rw-r--r--spec/services/projects/transfer_service_spec.rb33
-rw-r--r--spec/services/projects/update_pages_configuration_service_spec.rb78
-rw-r--r--spec/services/projects/update_pages_service_spec.rb22
-rw-r--r--spec/services/projects/update_remote_mirror_service_spec.rb62
-rw-r--r--spec/services/projects/update_repository_storage_service_spec.rb27
-rw-r--r--spec/services/projects/update_service_spec.rb62
-rw-r--r--spec/services/releases/create_service_spec.rb1
-rw-r--r--spec/services/resource_access_tokens/create_service_spec.rb11
-rw-r--r--spec/services/resource_events/change_milestone_service_spec.rb10
-rw-r--r--spec/services/search_service_spec.rb27
-rw-r--r--spec/services/service_desk_settings/update_service_spec.rb11
-rw-r--r--spec/services/snippets/create_service_spec.rb1
-rw-r--r--spec/services/snippets/update_service_spec.rb1
-rw-r--r--spec/services/spam/spam_action_service_spec.rb1
-rw-r--r--spec/services/spam/spam_verdict_service_spec.rb1
-rw-r--r--spec/services/submit_usage_ping_service_spec.rb123
-rw-r--r--spec/services/suggestions/apply_service_spec.rb34
-rw-r--r--spec/services/suggestions/create_service_spec.rb1
-rw-r--r--spec/services/system_note_service_spec.rb2
-rw-r--r--spec/services/system_notes/alert_management_service_spec.rb16
-rw-r--r--spec/services/system_notes/design_management_service_spec.rb1
-rw-r--r--spec/services/todo_service_spec.rb85
-rw-r--r--spec/services/users/create_service_spec.rb1
-rw-r--r--spec/services/users/refresh_authorized_projects_service_spec.rb20
-rw-r--r--spec/services/web_hook_service_spec.rb10
-rw-r--r--spec/services/wiki_pages/event_create_service_spec.rb3
-rw-r--r--spec/services/wikis/create_attachment_service_spec.rb1
-rw-r--r--spec/spec_helper.rb15
-rw-r--r--spec/support/counter_attribute.rb14
-rw-r--r--spec/support/csv_response.rb5
-rw-r--r--spec/support/gitlab_stubs/gitlab_ci_for_sast.yml13
-rw-r--r--spec/support/helpers/bare_repo_operations.rb2
-rw-r--r--spec/support/helpers/cycle_analytics_helpers.rb2
-rw-r--r--spec/support/helpers/design_management_test_helpers.rb4
-rw-r--r--spec/support/helpers/filtered_search_helpers.rb2
-rw-r--r--spec/support/helpers/http_basic_auth_helpers.rb17
-rw-r--r--spec/support/helpers/jira_service_helper.rb2
-rw-r--r--spec/support/helpers/login_helpers.rb2
-rw-r--r--spec/support/helpers/memory_usage_helper.rb2
-rw-r--r--spec/support/helpers/metrics_dashboard_helpers.rb18
-rw-r--r--spec/support/helpers/metrics_dashboard_url_helpers.rb10
-rw-r--r--spec/support/helpers/navbar_structure_helper.rb18
-rw-r--r--spec/support/helpers/notification_helpers.rb4
-rw-r--r--spec/support/helpers/packages_manager_api_spec_helper.rb12
-rw-r--r--spec/support/helpers/require_migration.rb31
-rw-r--r--spec/support/helpers/stub_configuration.rb4
-rw-r--r--spec/support/helpers/stub_feature_flags.rb31
-rw-r--r--spec/support/helpers/stub_object_storage.rb13
-rw-r--r--spec/support/helpers/stubbed_feature.rb49
-rw-r--r--spec/support/helpers/test_env.rb6
-rw-r--r--spec/support/helpers/trigger_helpers.rb2
-rw-r--r--spec/support/helpers/usage_data_helpers.rb1
-rw-r--r--spec/support/helpers/wait_for_requests.rb6
-rw-r--r--spec/support/matchers/exceed_query_limit.rb4
-rw-r--r--spec/support/migrations_helpers/track_untracked_uploads_helpers.rb130
-rw-r--r--spec/support/protected_branch_helpers.rb5
-rw-r--r--spec/support/shared_contexts/change_access_checks_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/csv_response_shared_context.rb5
-rw-r--r--spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb6
-rw-r--r--spec/support/shared_contexts/lib/gitlab/git_access_shared_examples.rb17
-rw-r--r--spec/support/shared_contexts/navbar_structure_context.rb3
-rw-r--r--spec/support/shared_contexts/policies/group_policy_shared_context.rb3
-rw-r--r--spec/support/shared_contexts/prometheus/alert_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/read_ci_configuration_shared_context.rb9
-rw-r--r--spec/support/shared_contexts/requests/api/graphql/jira_import/jira_projects_context.rb2
-rw-r--r--spec/support/shared_contexts/services/projects/container_repository/delete_tags_service_shared_context.rb78
-rw-r--r--spec/support/shared_examples/alert_notification_service_shared_examples.rb29
-rw-r--r--spec/support/shared_examples/controllers/binary_blob_shared_examples.rb34
-rw-r--r--spec/support/shared_examples/controllers/concerns/graceful_timeout_handling_shared_examples.rb7
-rw-r--r--spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb83
-rw-r--r--spec/support/shared_examples/controllers/metrics/dashboard/prometheus_api_proxy_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/controllers/variables_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb54
-rw-r--r--spec/support/shared_examples/create_alert_issue_shared_examples.rb27
-rw-r--r--spec/support/shared_examples/features/master_manages_access_requests_shared_example.rb21
-rw-r--r--spec/support/shared_examples/features/packages_shared_examples.rb113
-rw-r--r--spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/features/rss_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/features/snippets_shared_examples.rb222
-rw-r--r--spec/support/shared_examples/finders/snippet_visibility_shared_examples.rb386
-rw-r--r--spec/support/shared_examples/graphql/design_fields_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/graphql/mutations/resolves_subscription_shared_examples.rb45
-rw-r--r--spec/support/shared_examples/graphql/mutations/set_assignees_shared_examples.rb126
-rw-r--r--spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/graphql/projects/merge_request_n_plus_one_query_examples.rb11
-rw-r--r--spec/support/shared_examples/lib/api/ci/runner_shared_examples.rb22
-rw-r--r--spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb22
-rw-r--r--spec/support/shared_examples/lib/gitlab/kubernetes/network_policy_common_shared_examples.rb160
-rw-r--r--spec/support/shared_examples/lib/gitlab/template/template_shared_examples.rb49
-rw-r--r--spec/support/shared_examples/models/chat_service_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/cluster_application_helm_cert_shared_examples.rb38
-rw-r--r--spec/support/shared_examples/models/cluster_application_initial_status_shared_examples.rb42
-rw-r--r--spec/support/shared_examples/models/cluster_application_status_shared_examples.rb152
-rw-r--r--spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb176
-rw-r--r--spec/support/shared_examples/models/concerns/file_store_mounter_shared_examples.rb17
-rw-r--r--spec/support/shared_examples/models/concerns/timebox_shared_examples.rb41
-rw-r--r--spec/support/shared_examples/models/issuable_hook_data_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/models/relative_positioning_shared_examples.rb602
-rw-r--r--spec/support/shared_examples/models/resource_event_shared_examples.rb (renamed from spec/support/shared_examples/resource_events.rb)0
-rw-r--r--spec/support/shared_examples/models/resource_timebox_event_shared_examples.rb75
-rw-r--r--spec/support/shared_examples/models/update_project_statistics_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/path_extraction_shared_examples.rb68
-rw-r--r--spec/support/shared_examples/policies/project_policy_shared_examples.rb15
-rw-r--r--spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb14
-rw-r--r--spec/support/shared_examples/requests/api/graphql/mutations/subscription_shared_examples.rb69
-rw-r--r--spec/support/shared_examples/requests/api/milestones_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/requests/api/notes_shared_examples.rb61
-rw-r--r--spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb5
-rw-r--r--spec/support/shared_examples/requests/api/packages_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb17
-rw-r--r--spec/support/shared_examples/requests/snippet_shared_examples.rb77
-rw-r--r--spec/support/shared_examples/services/boards/issues_list_service_shared_examples.rb22
-rw-r--r--spec/support/shared_examples/services/boards/lists_list_service_shared_examples.rb18
-rw-r--r--spec/support/shared_examples/services/jira_import/user_mapper_services_shared_examples.rb39
-rw-r--r--spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb41
-rw-r--r--spec/support/shared_examples/services/resource_events/change_milestone_service_shared_examples.rb52
-rw-r--r--spec/support/shared_examples/services/wiki_pages/create_service_shared_examples.rb11
-rw-r--r--spec/support/shared_examples/services/wikis/create_attachment_service_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/snippet_blob_shared_examples.rb21
-rw-r--r--spec/support_specs/helpers/stub_feature_flags_spec.rb36
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb112
-rw-r--r--spec/tasks/gitlab/packages/migrate_rake_spec.rb4
-rw-r--r--spec/tooling/lib/tooling/helm3_client_spec.rb2
-rw-r--r--spec/uploaders/ci/pipeline_artifact_uploader_spec.rb39
-rw-r--r--spec/uploaders/file_mover_spec.rb1
-rw-r--r--spec/uploaders/object_storage_spec.rb71
-rw-r--r--spec/uploaders/packages/package_file_uploader_spec.rb4
-rw-r--r--spec/validators/qualified_domain_array_validator_spec.rb1
-rw-r--r--spec/views/admin/dashboard/index.html.haml_spec.rb9
-rw-r--r--spec/views/layouts/_flash.html.haml_spec.rb30
-rw-r--r--spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb52
-rw-r--r--spec/views/notify/changed_milestone_email.html.haml_spec.rb2
-rw-r--r--spec/views/profiles/preferences/show.html.haml_spec.rb41
-rw-r--r--spec/views/projects/ci/lints/show.html.haml_spec.rb23
-rw-r--r--spec/views/projects/commit/show.html.haml_spec.rb6
-rw-r--r--spec/views/projects/imports/new.html.haml_spec.rb2
-rw-r--r--spec/views/projects/merge_requests/show.html.haml_spec.rb18
-rw-r--r--spec/views/projects/pipelines/new.html.haml_spec.rb34
-rw-r--r--spec/views/projects/pipelines/show.html.haml_spec.rb77
-rw-r--r--spec/views/projects/services/_form.haml_spec.rb4
-rw-r--r--spec/views/projects/settings/operations/show.html.haml_spec.rb2
-rw-r--r--spec/views/search/show.html.haml_spec.rb32
-rw-r--r--spec/views/shared/_label_row.html.haml_spec.rb2
-rw-r--r--spec/workers/deployments/finished_worker_spec.rb24
-rw-r--r--spec/workers/flush_counter_increments_worker_spec.rb41
-rw-r--r--spec/workers/git_garbage_collect_worker_spec.rb137
-rw-r--r--spec/workers/gitlab/import/stuck_project_import_jobs_worker_spec.rb4
-rw-r--r--spec/workers/gitlab/jira_import/import_issue_worker_spec.rb3
-rw-r--r--spec/workers/gitlab/jira_import/stuck_jira_import_jobs_worker_spec.rb4
-rw-r--r--spec/workers/gitlab_usage_ping_worker_spec.rb38
-rw-r--r--spec/workers/incident_management/process_alert_worker_spec.rb14
-rw-r--r--spec/workers/migrate_external_diffs_worker_spec.rb2
-rw-r--r--spec/workers/namespaceless_project_destroy_worker_spec.rb2
-rw-r--r--spec/workers/namespaces/root_statistics_worker_spec.rb4
-rw-r--r--spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb2
-rw-r--r--spec/workers/pages_domain_verification_worker_spec.rb2
-rw-r--r--spec/workers/pages_update_configuration_worker_spec.rb53
-rw-r--r--spec/workers/personal_access_tokens/expired_notification_worker_spec.rb69
-rw-r--r--spec/workers/pipeline_process_worker_spec.rb11
-rw-r--r--spec/workers/pipeline_update_worker_spec.rb32
-rw-r--r--spec/workers/process_commit_worker_spec.rb4
-rw-r--r--spec/workers/propagate_integration_worker_spec.rb2
-rw-r--r--spec/workers/propagate_service_template_worker_spec.rb2
-rw-r--r--spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb4
-rw-r--r--spec/workers/repository_check/single_repository_worker_spec.rb2
-rw-r--r--spec/workers/repository_cleanup_worker_spec.rb4
-rw-r--r--spec/workers/repository_import_worker_spec.rb6
-rw-r--r--spec/workers/repository_update_remote_mirror_worker_spec.rb2
-rw-r--r--spec/workers/stuck_ci_jobs_worker_spec.rb2
-rw-r--r--spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb2
-rw-r--r--spec/workers/update_highest_role_worker_spec.rb1
1879 files changed, 50686 insertions, 20446 deletions
diff --git a/spec/bin/feature_flag_spec.rb b/spec/bin/feature_flag_spec.rb
index 3a315a13686..f85b8f22210 100644
--- a/spec/bin/feature_flag_spec.rb
+++ b/spec/bin/feature_flag_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'bin/feature-flag' do
using RSpec::Parameterized::TableSyntax
describe FeatureFlagCreator do
- let(:argv) { %w[feature-flag-name -t development -g group::memory -i https://url] }
+ let(:argv) { %w[feature-flag-name -t development -g group::memory -i https://url -m http://url] }
let(:options) { FeatureFlagOptionParser.parse(argv) }
let(:creator) { described_class.new(options) }
let(:existing_flag) { File.join('config', 'feature_flags', 'development', 'existing-feature-flag.yml') }
@@ -115,24 +115,45 @@ RSpec.describe 'bin/feature-flag' do
describe '.read_type' do
let(:type) { 'development' }
- it 'reads type from $stdin' do
- expect($stdin).to receive(:gets).and_return(type)
- expect do
+ context 'when there is only a single type defined' do
+ before do
+ stub_const('FeatureFlagOptionParser::TYPES',
+ development: { description: 'short' }
+ )
+ end
+
+ it 'returns that type' do
expect(described_class.read_type).to eq(:development)
- end.to output(/specify the type/).to_stdout
+ end
end
- context 'invalid type given' do
- let(:type) { 'invalid' }
+ context 'when there are many types defined' do
+ before do
+ stub_const('FeatureFlagOptionParser::TYPES',
+ development: { description: 'short' },
+ licensed: { description: 'licensed' }
+ )
+ end
- it 'shows error message and retries' do
+ it 'reads type from $stdin' do
expect($stdin).to receive(:gets).and_return(type)
- expect($stdin).to receive(:gets).and_raise('EOF')
-
expect do
- expect { described_class.read_type }.to raise_error(/EOF/)
+ expect(described_class.read_type).to eq(:development)
end.to output(/specify the type/).to_stdout
- .and output(/Invalid type specified/).to_stderr
+ end
+
+ context 'when invalid type is given' do
+ let(:type) { 'invalid' }
+
+ it 'shows error message and retries' do
+ expect($stdin).to receive(:gets).and_return(type)
+ expect($stdin).to receive(:gets).and_raise('EOF')
+
+ expect do
+ expect { described_class.read_type }.to raise_error(/EOF/)
+ end.to output(/specify the type/).to_stdout
+ .and output(/Invalid type specified/).to_stderr
+ end
end
end
end
@@ -162,15 +183,51 @@ RSpec.describe 'bin/feature-flag' do
end
end
- describe '.rollout_issue_url' do
+ describe '.read_introduced_by_url' do
+ let(:url) { 'https://merge-request' }
+
+ it 'reads type from $stdin' do
+ expect($stdin).to receive(:gets).and_return(url)
+ expect do
+ expect(described_class.read_introduced_by_url).to eq('https://merge-request')
+ end.to output(/can you paste the URL here/).to_stdout
+ end
+
+ context 'empty URL given' do
+ let(:url) { '' }
+
+ it 'skips entry' do
+ expect($stdin).to receive(:gets).and_return(url)
+ expect do
+ expect(described_class.read_introduced_by_url).to be_nil
+ end.to output(/can you paste the URL here/).to_stdout
+ end
+ end
+
+ context 'invalid URL given' do
+ let(:url) { 'invalid' }
+
+ it 'shows error message and retries' do
+ expect($stdin).to receive(:gets).and_return(url)
+ expect($stdin).to receive(:gets).and_raise('EOF')
+
+ expect do
+ expect { described_class.read_introduced_by_url }.to raise_error(/EOF/)
+ end.to output(/can you paste the URL here/).to_stdout
+ .and output(/URL needs to start with/).to_stderr
+ end
+ end
+ end
+
+ describe '.read_rollout_issue_url' do
let(:options) { OpenStruct.new(name: 'foo', type: :development) }
let(:url) { 'https://issue' }
it 'reads type from $stdin' do
expect($stdin).to receive(:gets).and_return(url)
expect do
- expect(described_class.read_issue_url(options)).to eq('https://issue')
- end.to output(/Paste URL here/).to_stdout
+ expect(described_class.read_rollout_issue_url(options)).to eq('https://issue')
+ end.to output(/Paste URL of `rollout issue` here/).to_stdout
end
context 'invalid URL given' do
@@ -181,8 +238,8 @@ RSpec.describe 'bin/feature-flag' do
expect($stdin).to receive(:gets).and_raise('EOF')
expect do
- expect { described_class.read_issue_url(options) }.to raise_error(/EOF/)
- end.to output(/Paste URL here/).to_stdout
+ expect { described_class.read_rollout_issue_url(options) }.to raise_error(/EOF/)
+ end.to output(/Paste URL of `rollout issue` here/).to_stdout
.and output(/URL needs to start/).to_stderr
end
end
diff --git a/spec/config/mail_room_spec.rb b/spec/config/mail_room_spec.rb
index 206f2744281..289e18be0d7 100644
--- a/spec/config/mail_room_spec.rb
+++ b/spec/config/mail_room_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe 'mail_room.yml' do
cmd = "puts ERB.new(File.read(#{absolute_path(mailroom_config_path).inspect})).result"
output, status = Gitlab::Popen.popen(%W(ruby -rerb -e #{cmd}), absolute_path('config'), vars)
- raise "Error interpreting #{mailroom_config_path}: #{output}" unless status.zero?
+ raise "Error interpreting #{mailroom_config_path}: #{output}" unless status == 0
YAML.load(output)
end
diff --git a/spec/config/object_store_settings_spec.rb b/spec/config/object_store_settings_spec.rb
index 4a800261625..36938c74afa 100644
--- a/spec/config/object_store_settings_spec.rb
+++ b/spec/config/object_store_settings_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe ObjectStoreSettings do
'region' => 'us-east-1'
}
end
+
let(:config) do
{
'lfs' => { 'enabled' => true },
diff --git a/spec/controllers/admin/application_settings_controller_spec.rb b/spec/controllers/admin/application_settings_controller_spec.rb
index 7b8528009d8..4f223811be8 100644
--- a/spec/controllers/admin/application_settings_controller_spec.rb
+++ b/spec/controllers/admin/application_settings_controller_spec.rb
@@ -105,22 +105,6 @@ RSpec.describe Admin::ApplicationSettingsController do
expect(ApplicationSetting.current.minimum_password_length).to eq(10)
end
- it 'updates namespace_storage_size_limit setting' do
- put :update, params: { application_setting: { namespace_storage_size_limit: '100' } }
-
- expect(response).to redirect_to(general_admin_application_settings_path)
- expect(response).to set_flash[:notice].to('Application settings saved successfully')
- expect(ApplicationSetting.current.namespace_storage_size_limit).to eq(100)
- end
-
- it 'does not accept an invalid namespace_storage_size_limit' do
- put :update, params: { application_setting: { namespace_storage_size_limit: '-100' } }
-
- expect(response).to render_template(:general)
- expect(assigns(:application_setting).errors[:namespace_storage_size_limit]).to be_present
- expect(ApplicationSetting.current.namespace_storage_size_limit).not_to eq(-100)
- end
-
it 'updates repository_storages_weighted setting' do
put :update, params: { application_setting: { repository_storages_weighted_default: 75 } }
@@ -158,23 +142,12 @@ RSpec.describe Admin::ApplicationSettingsController do
end
describe 'verify panel actions' do
- before do
- stub_feature_flags(instance_level_integrations: false)
- end
-
Admin::ApplicationSettingsController::VALID_SETTING_PANELS.each do |valid_action|
it_behaves_like 'renders correct panels' do
let(:action) { valid_action }
end
end
end
- end
-
- describe 'PATCH #integrations' do
- before do
- stub_feature_flags(instance_level_integrations: false)
- sign_in(admin)
- end
describe 'EKS integration' do
let(:application_setting) { ApplicationSetting.current }
@@ -188,7 +161,7 @@ RSpec.describe Admin::ApplicationSettingsController do
end
it 'updates EKS settings' do
- patch :integrations, params: { application_setting: settings_params }
+ put :update, params: { application_setting: settings_params }
expect(application_setting.eks_integration_enabled).to be_truthy
expect(application_setting.eks_account_id).to eq '123456789012'
@@ -202,7 +175,7 @@ RSpec.describe Admin::ApplicationSettingsController do
it 'does not update the secret key' do
application_setting.update!(eks_secret_access_key: 'dummy secret key')
- patch :integrations, params: { application_setting: settings_params }
+ put :update, params: { application_setting: settings_params }
expect(application_setting.reload.eks_secret_access_key).to eq 'dummy secret key'
end
diff --git a/spec/controllers/admin/integrations_controller_spec.rb b/spec/controllers/admin/integrations_controller_spec.rb
index 7e7b60db2dc..4a5d5ede728 100644
--- a/spec/controllers/admin/integrations_controller_spec.rb
+++ b/spec/controllers/admin/integrations_controller_spec.rb
@@ -10,16 +10,6 @@ RSpec.describe Admin::IntegrationsController do
end
describe '#edit' do
- context 'when instance_level_integrations not enabled' do
- it 'returns not_found' do
- stub_feature_flags(instance_level_integrations: false)
-
- get :edit, params: { id: Service.available_services_names.sample }
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
Service.available_services_names.each do |integration_name|
context "#{integration_name}" do
it 'successfully displays the template' do
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index aec629ba330..357044a144c 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -190,20 +190,10 @@ RSpec.describe ApplicationController do
expect(response).to redirect_to new_user_session_path
end
- context 'request format is unknown' do
- it 'redirects if unauthenticated' do
- get :index, format: 'unknown'
+ it 'redirects if unauthenticated and request format is unknown' do
+ get :index, format: 'unknown'
- expect(response).to redirect_to new_user_session_path
- end
-
- it 'returns a 401 if the feature flag is disabled' do
- stub_feature_flags(devise_redirect_unknown_formats: false)
-
- get :index, format: 'unknown'
-
- expect(response).to have_gitlab_http_status(:unauthorized)
- end
+ expect(response).to redirect_to new_user_session_path
end
end
diff --git a/spec/controllers/boards/issues_controller_spec.rb b/spec/controllers/boards/issues_controller_spec.rb
index 601b8d427e0..a7f3ab0089f 100644
--- a/spec/controllers/boards/issues_controller_spec.rb
+++ b/spec/controllers/boards/issues_controller_spec.rb
@@ -265,6 +265,7 @@ RSpec.describe Boards::IssuesController do
hash[:ids] = [issue2.id]
end
end
+
let(:requesting_user) { user }
let(:expected_status) { 200 }
let(:expected_issue_count) { 2 }
@@ -280,6 +281,7 @@ RSpec.describe Boards::IssuesController do
hash[:ids] = [issue2.id]
end
end
+
let(:requesting_user) { user }
let(:expected_status) { 403 }
let(:expected_issue_count) { 1 }
@@ -296,6 +298,7 @@ RSpec.describe Boards::IssuesController do
hash[:move_before_id] = nil
end
end
+
let(:requesting_user) { user }
let(:expected_status) { 200 }
let(:expected_issue_count) { 4 }
@@ -317,6 +320,7 @@ RSpec.describe Boards::IssuesController do
hash[:move_before_id] = nil
end
end
+
let(:requesting_user) { user }
let(:expected_status) { 200 }
let(:expected_issue_count) { 3 }
@@ -332,6 +336,7 @@ RSpec.describe Boards::IssuesController do
hash[:move_before_id] = nil
end
end
+
let(:requesting_user) { user }
let(:expected_status) { 200 }
let(:expected_issue_count) { 4 }
@@ -350,6 +355,7 @@ RSpec.describe Boards::IssuesController do
hash[:move_after_id] = issue4.id
end
end
+
let(:requesting_user) { user }
let(:expected_status) { 200 }
let(:expected_issue_count) { 5 }
@@ -365,6 +371,7 @@ RSpec.describe Boards::IssuesController do
hash[:ids] = (0..51).to_a
end
end
+
let(:requesting_user) { user }
let(:expected_status) { 422 }
let(:expected_issue_count) { 1 }
@@ -380,6 +387,7 @@ RSpec.describe Boards::IssuesController do
hash[:ids] = 'foobar'
end
end
+
let(:requesting_user) { user }
let(:expected_status) { 400 }
let(:expected_issue_count) { 1 }
diff --git a/spec/controllers/concerns/checks_collaboration_spec.rb b/spec/controllers/concerns/checks_collaboration_spec.rb
index be8beff5dd6..7fcd190d71a 100644
--- a/spec/controllers/concerns/checks_collaboration_spec.rb
+++ b/spec/controllers/concerns/checks_collaboration_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe ChecksCollaboration do
it 'is true when the user can push to a branch of the project' do
fake_access = double('Gitlab::UserAccess')
expect(fake_access).to receive(:can_push_to_branch?).with('a-branch').and_return(true)
- expect(Gitlab::UserAccess).to receive(:new).with(user, project: project).and_return(fake_access)
+ expect(Gitlab::UserAccess).to receive(:new).with(user, container: project).and_return(fake_access)
expect(helper.can_collaborate_with_project?(project, ref: 'a-branch')).to be_truthy
end
diff --git a/spec/controllers/concerns/graceful_timeout_handling_spec.rb b/spec/controllers/concerns/graceful_timeout_handling_spec.rb
new file mode 100644
index 00000000000..cece36f06b2
--- /dev/null
+++ b/spec/controllers/concerns/graceful_timeout_handling_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GracefulTimeoutHandling, type: :controller do
+ controller(ApplicationController) do
+ include GracefulTimeoutHandling
+
+ skip_before_action :authenticate_user!
+
+ def index
+ raise ActiveRecord::QueryCanceled.new
+ end
+ end
+
+ context 'for json request' do
+ subject { get :index, format: :json }
+
+ it 'renders graceful error message' do
+ subject
+
+ expect(json_response['error']).to eq(_('There is too much data to calculate. Please change your selection.'))
+ expect(response.code).to eq '200'
+ end
+
+ it 'logs exception' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(kind_of(ActiveRecord::QueryCanceled))
+
+ subject
+ end
+ end
+
+ context 'for html request' do
+ subject { get :index, format: :html }
+
+ it 'has no effect' do
+ expect do
+ subject
+ end.to raise_error(ActiveRecord::QueryCanceled)
+ end
+ end
+end
diff --git a/spec/controllers/concerns/metrics_dashboard_spec.rb b/spec/controllers/concerns/metrics_dashboard_spec.rb
index f0c9874965e..8a4d8828aaa 100644
--- a/spec/controllers/concerns/metrics_dashboard_spec.rb
+++ b/spec/controllers/concerns/metrics_dashboard_spec.rb
@@ -165,13 +165,14 @@ RSpec.describe MetricsDashboard do
it 'adds starred dashboard information and sorts the list' do
all_dashboards = json_response['all_dashboards'].map { |dashboard| dashboard.slice('display_name', 'starred', 'user_starred_path') }
expected_response = [
- { "display_name" => "Default dashboard", "starred" => false, 'user_starred_path' => api_v4_projects_metrics_user_starred_dashboards_path(id: project.id, params: { dashboard_path: 'config/prometheus/common_metrics.yml' }) },
{ "display_name" => "anomaly.yml", "starred" => false, 'user_starred_path' => api_v4_projects_metrics_user_starred_dashboards_path(id: project.id, params: { dashboard_path: '.gitlab/dashboards/anomaly.yml' }) },
{ "display_name" => "errors.yml", "starred" => true, 'user_starred_path' => api_v4_projects_metrics_user_starred_dashboards_path(id: project.id, params: { dashboard_path: '.gitlab/dashboards/errors.yml' }) },
+ { "display_name" => "K8s pod health", "starred" => false, 'user_starred_path' => api_v4_projects_metrics_user_starred_dashboards_path(id: project.id, params: { dashboard_path: 'config/prometheus/pod_metrics.yml' }) },
+ { "display_name" => "Overview", "starred" => false, 'user_starred_path' => api_v4_projects_metrics_user_starred_dashboards_path(id: project.id, params: { dashboard_path: 'config/prometheus/common_metrics.yml' }) },
{ "display_name" => "test.yml", "starred" => true, 'user_starred_path' => api_v4_projects_metrics_user_starred_dashboards_path(id: project.id, params: { dashboard_path: '.gitlab/dashboards/test.yml' }) }
]
- expect(all_dashboards).to eql expected_response
+ expect(all_dashboards).to eq(expected_response)
end
end
end
diff --git a/spec/controllers/concerns/send_file_upload_spec.rb b/spec/controllers/concerns/send_file_upload_spec.rb
index 7cfaf1b248f..e24e4cbf5e7 100644
--- a/spec/controllers/concerns/send_file_upload_spec.rb
+++ b/spec/controllers/concerns/send_file_upload_spec.rb
@@ -21,6 +21,12 @@ RSpec.describe SendFileUpload do
let(:controller_class) do
Class.new do
include SendFileUpload
+
+ def params
+ {}
+ end
+
+ def current_user; end
end
end
@@ -42,6 +48,89 @@ RSpec.describe SendFileUpload do
FileUtils.rm_f(temp_file)
end
+ shared_examples 'handles image resize requests' do
+ let(:headers) { double }
+
+ before do
+ allow(uploader).to receive(:image?).and_return(true)
+ allow(uploader).to receive(:mounted_as).and_return(:avatar)
+
+ allow(controller).to receive(:headers).and_return(headers)
+ # both of these are valid cases, depending on whether we are dealing with
+ # local or remote files
+ allow(controller).to receive(:send_file)
+ allow(controller).to receive(:redirect_to)
+ end
+
+ context 'when feature is enabled for current user' do
+ let(:user) { build(:user) }
+
+ before do
+ stub_feature_flags(dynamic_image_resizing: user)
+ allow(controller).to receive(:current_user).and_return(user)
+ end
+
+ context 'with valid width parameter' do
+ it 'renders OK with workhorse command header' do
+ expect(controller).not_to receive(:send_file)
+ expect(controller).to receive(:params).at_least(:once).and_return(width: '64')
+ expect(controller).to receive(:head).with(:ok)
+ expect(headers).to receive(:store).with(Gitlab::Workhorse::SEND_DATA_HEADER, /^send-scaled-img:/)
+
+ subject
+ end
+ end
+
+ context 'with missing width parameter' do
+ it 'does not write workhorse command header' do
+ expect(headers).not_to receive(:store).with(Gitlab::Workhorse::SEND_DATA_HEADER, /^send-scaled-img:/)
+
+ subject
+ end
+ end
+
+ context 'with invalid width parameter' do
+ it 'does not write workhorse command header' do
+ expect(controller).to receive(:params).at_least(:once).and_return(width: 'not a number')
+ expect(headers).not_to receive(:store).with(Gitlab::Workhorse::SEND_DATA_HEADER, /^send-scaled-img:/)
+
+ subject
+ end
+ end
+
+ context 'with width that is not allowed' do
+ it 'does not write workhorse command header' do
+ expect(controller).to receive(:params).at_least(:once).and_return(width: '63')
+ expect(headers).not_to receive(:store).with(Gitlab::Workhorse::SEND_DATA_HEADER, /^send-scaled-img:/)
+
+ subject
+ end
+ end
+
+ context 'when image file is not an avatar' do
+ it 'does not write workhorse command header' do
+ expect(uploader).to receive(:mounted_as).and_return(nil) # FileUploader is not mounted
+ expect(headers).not_to receive(:store).with(Gitlab::Workhorse::SEND_DATA_HEADER, /^send-scaled-img:/)
+
+ subject
+ end
+ end
+ end
+
+ context 'when feature is disabled' do
+ before do
+ stub_feature_flags(dynamic_image_resizing: false)
+ end
+
+ it 'does not write workhorse command header' do
+ expect(controller).to receive(:params).at_least(:once).and_return(width: '64')
+ expect(headers).not_to receive(:store).with(Gitlab::Workhorse::SEND_DATA_HEADER, /^send-scaled-img:/)
+
+ subject
+ end
+ end
+ end
+
context 'when local file is used' do
before do
uploader.store!(temp_file)
@@ -52,6 +141,8 @@ RSpec.describe SendFileUpload do
subject
end
+
+ it_behaves_like 'handles image resize requests'
end
context 'with inline image' do
@@ -155,6 +246,8 @@ RSpec.describe SendFileUpload do
it_behaves_like 'proxied file'
end
end
+
+ it_behaves_like 'handles image resize requests'
end
end
end
diff --git a/spec/controllers/dashboard/todos_controller_spec.rb b/spec/controllers/dashboard/todos_controller_spec.rb
index 2e3328ae4d2..f0aa351bee0 100644
--- a/spec/controllers/dashboard/todos_controller_spec.rb
+++ b/spec/controllers/dashboard/todos_controller_spec.rb
@@ -42,15 +42,6 @@ RSpec.describe Dashboard::TodosController do
expect(response).to have_gitlab_http_status(:ok)
end
-
- context 'tracking visits' do
- let_it_be(:authorized_project) { create(:project, :public) }
-
- it_behaves_like 'tracking unique visits', :index do
- let(:request_params) { { project_id: authorized_project.id } }
- let(:target_id) { 'u_analytics_todos' }
- end
- end
end
context "with render_views" do
diff --git a/spec/controllers/explore/projects_controller_spec.rb b/spec/controllers/explore/projects_controller_spec.rb
index fd86501ff5d..4ec890a528f 100644
--- a/spec/controllers/explore/projects_controller_spec.rb
+++ b/spec/controllers/explore/projects_controller_spec.rb
@@ -138,6 +138,33 @@ RSpec.describe Explore::ProjectsController do
end
end
+ shared_examples 'avoids N+1 queries' do
+ [:index, :trending, :starred].each do |endpoint|
+ describe "GET #{endpoint}" do
+ render_views
+
+ # some N+1 queries still exist
+ it 'avoids N+1 queries' do
+ projects = create_list(:project, 3, :repository, :public)
+ projects.each do |project|
+ pipeline = create(:ci_pipeline, :success, project: project, sha: project.commit.id)
+ create(:commit_status, :success, pipeline: pipeline, ref: pipeline.ref)
+ end
+
+ control = ActiveRecord::QueryRecorder.new { get endpoint }
+
+ new_projects = create_list(:project, 2, :repository, :public)
+ new_projects.each do |project|
+ pipeline = create(:ci_pipeline, :success, project: project, sha: project.commit.id)
+ create(:commit_status, :success, pipeline: pipeline, ref: pipeline.ref)
+ end
+
+ expect { get endpoint }.not_to exceed_query_limit(control).with_threshold(8)
+ end
+ end
+ end
+ end
+
context 'when user is signed in' do
let(:user) { create(:user) }
@@ -147,6 +174,7 @@ RSpec.describe Explore::ProjectsController do
include_examples 'explore projects'
include_examples "blocks high page numbers"
+ include_examples 'avoids N+1 queries'
context 'user preference sorting' do
let(:project) { create(:project) }
@@ -160,6 +188,7 @@ RSpec.describe Explore::ProjectsController do
context 'when user is not signed in' do
include_examples 'explore projects'
include_examples "blocks high page numbers"
+ include_examples 'avoids N+1 queries'
context 'user preference sorting' do
let(:project) { create(:project) }
diff --git a/spec/controllers/groups/milestones_controller_spec.rb b/spec/controllers/groups/milestones_controller_spec.rb
index e47bb75af22..5c7b88a218a 100644
--- a/spec/controllers/groups/milestones_controller_spec.rb
+++ b/spec/controllers/groups/milestones_controller_spec.rb
@@ -86,6 +86,7 @@ RSpec.describe Groups::MilestonesController do
let!(:public_project_with_private_issues_and_mrs) do
create(:project, :public, :issues_private, :merge_requests_private, group: public_group)
end
+
let!(:private_milestone) { create(:milestone, project: public_project_with_private_issues_and_mrs, title: 'project milestone') }
context 'when anonymous user' do
diff --git a/spec/controllers/groups/releases_controller_spec.rb b/spec/controllers/groups/releases_controller_spec.rb
new file mode 100644
index 00000000000..0925548f60a
--- /dev/null
+++ b/spec/controllers/groups/releases_controller_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Groups::ReleasesController do
+ let(:group) { create(:group) }
+ let!(:project) { create(:project, :repository, :public, namespace: group) }
+ let!(:private_project) { create(:project, :repository, :private, namespace: group) }
+ let(:developer) { create(:user) }
+ let!(:release_1) { create(:release, project: project, tag: 'v1', released_at: Time.zone.parse('2020-02-15')) }
+ let!(:release_2) { create(:release, project: project, tag: 'v2', released_at: Time.zone.parse('2020-02-20')) }
+ let!(:private_release_1) { create(:release, project: private_project, tag: 'p1', released_at: Time.zone.parse('2020-03-01')) }
+ let!(:private_release_2) { create(:release, project: private_project, tag: 'p2', released_at: Time.zone.parse('2020-03-05')) }
+
+ before do
+ private_project.add_developer(developer)
+ end
+
+ describe 'GET #index' do
+ context 'as json' do
+ let(:format) { :json }
+
+ subject { get :index, params: { group_id: group }, format: format }
+
+ context 'json_response' do
+ before do
+ subject
+ end
+
+ it 'returns an application/json content_type' do
+ expect(response.content_type).to eq 'application/json'
+ end
+
+ it 'returns OK' do
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'the user is not authorized' do
+ before do
+ subject
+ end
+
+ it 'does not return any releases' do
+ expect(json_response.map {|r| r['tag'] } ).to match_array(%w(v2 v1))
+ end
+
+ it 'returns OK' do
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'the user is authorized' do
+ it "returns all group's public and private project's releases as JSON, ordered by released_at" do
+ sign_in(developer)
+
+ subject
+
+ expect(json_response.map {|r| r['tag'] } ).to match_array(%w(p2 p1 v2 v1))
+ end
+ end
+
+ context 'N+1 queries' do
+ it 'avoids N+1 database queries' do
+ control_count = ActiveRecord::QueryRecorder.new { subject }.count
+
+ create_list(:release, 5, project: project)
+ create_list(:release, 5, project: private_project)
+
+ expect { subject }.not_to exceed_query_limit(control_count)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/controllers/groups/settings/repository_controller_spec.rb b/spec/controllers/groups/settings/repository_controller_spec.rb
index 6d0caf6d655..14bbdc05282 100644
--- a/spec/controllers/groups/settings/repository_controller_spec.rb
+++ b/spec/controllers/groups/settings/repository_controller_spec.rb
@@ -36,6 +36,7 @@ RSpec.describe Groups::Settings::RepositoryController do
deploy_token_type: DeployToken.deploy_token_types[:group_type]
}
end
+
let(:request_params) do
{
group_id: group.to_param,
@@ -60,7 +61,7 @@ RSpec.describe Groups::Settings::RepositoryController do
'token' => be_a(String),
'scopes' => deploy_token_params.inject([]) do |scopes, kv|
key, value = kv
- key.to_s.start_with?('read_') && !value.to_i.zero? ? scopes << key.to_s : scopes
+ key.to_s.start_with?('read_') && value.to_i != 0 ? scopes << key.to_s : scopes
end
}
end
diff --git a/spec/controllers/groups/shared_projects_controller_spec.rb b/spec/controllers/groups/shared_projects_controller_spec.rb
index dafce094b14..528d5c073b7 100644
--- a/spec/controllers/groups/shared_projects_controller_spec.rb
+++ b/spec/controllers/groups/shared_projects_controller_spec.rb
@@ -17,9 +17,9 @@ RSpec.describe Groups::SharedProjectsController do
).execute(group)
end
- let_it_be(:group) { create(:group) }
- let_it_be(:user) { create(:user) }
- let_it_be(:shared_project) do
+ let!(:group) { create(:group) }
+ let!(:user) { create(:user) }
+ let!(:shared_project) do
shared_project = create(:project, namespace: user.namespace)
share_project(shared_project)
diff --git a/spec/controllers/help_controller_spec.rb b/spec/controllers/help_controller_spec.rb
index 6c0b3efa53b..3049396dd0f 100644
--- a/spec/controllers/help_controller_spec.rb
+++ b/spec/controllers/help_controller_spec.rb
@@ -159,15 +159,6 @@ RSpec.describe HelpController do
end
end
- describe 'GET #ui' do
- context 'for UI Development Kit' do
- it 'renders found' do
- get :ui
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
- end
-
def stub_readme(content)
expect(File).to receive(:read).and_return(content)
end
diff --git a/spec/controllers/import/available_namespaces_controller_spec.rb b/spec/controllers/import/available_namespaces_controller_spec.rb
new file mode 100644
index 00000000000..ebccc862a13
--- /dev/null
+++ b/spec/controllers/import/available_namespaces_controller_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Import::AvailableNamespacesController do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:manageable_groups) { [create(:group), create(:group)] }
+
+ before do
+ sign_in(user)
+ manageable_groups.each { |group| group.add_maintainer(user) }
+ end
+
+ describe "GET index" do
+ it "returns list of available namespaces" do
+ unrelated_group = create(:group)
+
+ get :index
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_kind_of(Array)
+
+ response_ids = json_response.map { |n| n["id"] }
+
+ expect(response_ids).not_to include(unrelated_group.id)
+ expect(response_ids).to contain_exactly(*manageable_groups.map(&:id))
+ end
+
+ context "with an anonymous user" do
+ before do
+ sign_out(user)
+ end
+
+ it "redirects to sign-in page" do
+ get :index
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/import/bitbucket_server_controller_spec.rb b/spec/controllers/import/bitbucket_server_controller_spec.rb
index bb80de6425f..d5f94be65b6 100644
--- a/spec/controllers/import/bitbucket_server_controller_spec.rb
+++ b/spec/controllers/import/bitbucket_server_controller_spec.rb
@@ -139,8 +139,6 @@ RSpec.describe Import::BitbucketServerController do
describe 'GET status' do
render_views
- let(:repos) { instance_double(BitbucketServer::Collection) }
-
before do
allow(controller).to receive(:client).and_return(client)
@@ -157,14 +155,14 @@ RSpec.describe Import::BitbucketServerController do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['incompatible_repos'].length).to eq(1)
- expect(json_response.dig("incompatible_repos", 0, "id")).to eq(@invalid_repo.full_name)
+ expect(json_response.dig("incompatible_repos", 0, "id")).to eq("#{@invalid_repo.project_key}/#{@invalid_repo.slug}")
expect(json_response['provider_repos'].length).to eq(1)
expect(json_response.dig("provider_repos", 0, "id")).to eq(@repo.full_name)
end
it_behaves_like 'import controller status' do
let(:repo) { @repo }
- let(:repo_id) { @repo.full_name }
+ let(:repo_id) { "#{@repo.project_key}/#{@repo.slug}" }
let(:import_source) { @repo.browse_url }
let(:provider_name) { 'bitbucket_server' }
let(:client_repos_field) { :repos }
diff --git a/spec/controllers/import/gitea_controller_spec.rb b/spec/controllers/import/gitea_controller_spec.rb
index 9001faef408..3e4b159271a 100644
--- a/spec/controllers/import/gitea_controller_spec.rb
+++ b/spec/controllers/import/gitea_controller_spec.rb
@@ -34,6 +34,14 @@ RSpec.describe Import::GiteaController do
assign_host_url
end
+ it "requests provider repos list" do
+ expect(stub_client(repos: [], orgs: [])).to receive(:repos)
+
+ get :status
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
context 'when host url is local or not http' do
%w[https://localhost:3000 http://192.168.0.1 ftp://testing].each do |url|
let(:host_url) { url }
diff --git a/spec/controllers/import/github_controller_spec.rb b/spec/controllers/import/github_controller_spec.rb
index a5a3dc463d3..e19b6caca5b 100644
--- a/spec/controllers/import/github_controller_spec.rb
+++ b/spec/controllers/import/github_controller_spec.rb
@@ -15,10 +15,7 @@ RSpec.describe Import::GithubController do
it "redirects to GitHub for an access token if logged in with GitHub" do
allow(controller).to receive(:logged_in_with_provider?).and_return(true)
expect(controller).to receive(:go_to_provider_for_permissions).and_call_original
- allow_any_instance_of(Gitlab::LegacyGithubImport::Client)
- .to receive(:authorize_url)
- .with(users_import_github_callback_url)
- .and_call_original
+ allow(controller).to receive(:authorize_url).and_call_original
get :new
@@ -46,13 +43,15 @@ RSpec.describe Import::GithubController do
end
describe "GET callback" do
+ before do
+ allow(controller).to receive(:get_token).and_return(token)
+ allow(controller).to receive(:oauth_options).and_return({})
+
+ stub_omniauth_provider('github')
+ end
+
it "updates access token" do
token = "asdasd12345"
- allow_any_instance_of(Gitlab::LegacyGithubImport::Client)
- .to receive(:get_token).and_return(token)
- allow_any_instance_of(Gitlab::LegacyGithubImport::Client)
- .to receive(:github_options).and_return({})
- stub_omniauth_provider('github')
get :callback
@@ -66,7 +65,86 @@ RSpec.describe Import::GithubController do
end
describe "GET status" do
- it_behaves_like 'a GitHub-ish import controller: GET status'
+ context 'when using OAuth' do
+ before do
+ allow(controller).to receive(:logged_in_with_provider?).and_return(true)
+ end
+
+ context 'when OAuth config is missing' do
+ let(:new_import_url) { public_send("new_import_#{provider}_url") }
+
+ before do
+ allow(controller).to receive(:oauth_config).and_return(nil)
+ end
+
+ it 'returns missing config error' do
+ expect(controller).to receive(:go_to_provider_for_permissions).and_call_original
+
+ get :status
+
+ expect(session[:"#{provider}_access_token"]).to be_nil
+ expect(controller).to redirect_to(new_import_url)
+ expect(flash[:alert]).to eq('Missing OAuth configuration for GitHub.')
+ end
+ end
+ end
+
+ context 'when feature remove_legacy_github_client is disabled' do
+ before do
+ stub_feature_flags(remove_legacy_github_client: false)
+ session[:"#{provider}_access_token"] = 'asdasd12345'
+ end
+
+ it_behaves_like 'a GitHub-ish import controller: GET status'
+
+ it 'uses Gitlab::LegacyGitHubImport::Client' do
+ expect(controller.send(:client)).to be_instance_of(Gitlab::LegacyGithubImport::Client)
+ end
+
+ it 'fetches repos using legacy client' do
+ expect_next_instance_of(Gitlab::LegacyGithubImport::Client) do |client|
+ expect(client).to receive(:repos)
+ end
+
+ get :status
+ end
+ end
+
+ context 'when feature remove_legacy_github_client is enabled' do
+ before do
+ stub_feature_flags(remove_legacy_github_client: true)
+ session[:"#{provider}_access_token"] = 'asdasd12345'
+ end
+
+ it_behaves_like 'a GitHub-ish import controller: GET status'
+
+ it 'uses Gitlab::GithubImport::Client' do
+ expect(controller.send(:client)).to be_instance_of(Gitlab::GithubImport::Client)
+ end
+
+ it 'fetches repos using latest github client' do
+ expect_next_instance_of(Gitlab::GithubImport::Client) do |client|
+ expect(client).to receive(:each_page).with(:repos).and_return([].to_enum)
+ end
+
+ get :status
+ end
+
+ it 'concatenates list of repos from multiple pages' do
+ repo_1 = OpenStruct.new(login: 'emacs', full_name: 'asd/emacs', name: 'emacs', owner: { login: 'owner' })
+ repo_2 = OpenStruct.new(login: 'vim', full_name: 'asd/vim', name: 'vim', owner: { login: 'owner' })
+ repos = [OpenStruct.new(objects: [repo_1]), OpenStruct.new(objects: [repo_2])].to_enum
+
+ allow(stub_client).to receive(:each_page).and_return(repos)
+
+ get :status, format: :json
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.dig('provider_repos').count).to eq(2)
+ expect(json_response.dig('provider_repos', 0, 'id')).to eq(repo_1.id)
+ expect(json_response.dig('provider_repos', 1, 'id')).to eq(repo_2.id)
+ end
+ end
end
describe "POST create" do
diff --git a/spec/controllers/import/gitlab_controller_spec.rb b/spec/controllers/import/gitlab_controller_spec.rb
index 42c4348dac2..826625ba9c3 100644
--- a/spec/controllers/import/gitlab_controller_spec.rb
+++ b/spec/controllers/import/gitlab_controller_spec.rb
@@ -53,6 +53,7 @@ RSpec.describe Import::GitlabController do
let(:gitlab_user) do
{ username: gitlab_username }.with_indifferent_access
end
+
let(:gitlab_repo) do
{
path: 'vim',
diff --git a/spec/controllers/import/manifest_controller_spec.rb b/spec/controllers/import/manifest_controller_spec.rb
new file mode 100644
index 00000000000..ec8bd45b65c
--- /dev/null
+++ b/spec/controllers/import/manifest_controller_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Import::ManifestController do
+ include ImportSpecHelper
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group)}
+
+ before(:all) do
+ group.add_maintainer(user)
+ end
+
+ before do
+ sign_in(user)
+ end
+
+ def assign_session_group
+ session[:manifest_import_repositories] = []
+ session[:manifest_import_group_id] = group.id
+ end
+
+ describe 'GET status' do
+ let(:repo1) { OpenStruct.new(id: 'test1', url: 'http://demo.host/test1') }
+ let(:repo2) { OpenStruct.new(id: 'test2', url: 'http://demo.host/test2') }
+ let(:repos) { [repo1, repo2] }
+
+ before do
+ assign_session_group
+
+ session[:manifest_import_repositories] = repos
+ end
+
+ it "returns variables for json request" do
+ project = create(:project, import_type: 'manifest', creator_id: user.id)
+
+ get :status, format: :json
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
+ expect(json_response.dig("provider_repos", 0, "id")).to eq(repo1.id)
+ expect(json_response.dig("provider_repos", 1, "id")).to eq(repo2.id)
+ expect(json_response.dig("namespaces", 0, "id")).to eq(group.id)
+ end
+
+ it "does not show already added project" do
+ project = create(:project, import_type: 'manifest', namespace: user.namespace, import_status: :finished, import_url: repo1.url)
+
+ get :status, format: :json
+
+ expect(json_response.dig("imported_projects", 0, "id")).to eq(project.id)
+ expect(json_response.dig("provider_repos").length).to eq(1)
+ expect(json_response.dig("provider_repos", 0, "id")).not_to eq(repo1.id)
+ end
+ end
+end
diff --git a/spec/controllers/invites_controller_spec.rb b/spec/controllers/invites_controller_spec.rb
index a9e4073780d..2b222331b55 100644
--- a/spec/controllers/invites_controller_spec.rb
+++ b/spec/controllers/invites_controller_spec.rb
@@ -34,25 +34,4 @@ RSpec.describe InvitesController do
expect(flash[:notice]).to be_nil
end
end
-
- describe 'POST #accept' do
- it 'accepts user' do
- expect do
- post :accept, params: { id: token }
- end.to change { project_members.include?(user) }.from(false).to(true)
-
- expect(response).to have_gitlab_http_status(:found)
- expect(flash[:notice]).to include 'You have been granted'
- end
- end
-
- describe 'GET #decline' do
- it 'declines user' do
- get :decline, params: { id: token }
-
- expect { member.reload }.to raise_error ActiveRecord::RecordNotFound
- expect(response).to have_gitlab_http_status(:found)
- expect(flash[:notice]).to include 'You have declined the invitation to join'
- end
- end
end
diff --git a/spec/controllers/omniauth_callbacks_controller_spec.rb b/spec/controllers/omniauth_callbacks_controller_spec.rb
index 0b99f28f79b..dce996b977d 100644
--- a/spec/controllers/omniauth_callbacks_controller_spec.rb
+++ b/spec/controllers/omniauth_callbacks_controller_spec.rb
@@ -181,6 +181,23 @@ RSpec.describe OmniauthCallbacksController, type: :controller do
end
end
+ context 'when user with 2FA is unconfirmed' do
+ render_views
+
+ let(:user) { create(:omniauth_user, :two_factor, extern_uid: 'my-uid', provider: provider) }
+
+ before do
+ user.update_column(:confirmed_at, nil)
+ end
+
+ it 'redirects to login page' do
+ post provider
+
+ expect(response).to redirect_to(new_user_session_path)
+ expect(flash[:alert]).to match(/You have to confirm your email address before continuing./)
+ end
+ end
+
context 'sign up' do
include_context 'sign_up'
diff --git a/spec/controllers/projects/ci/daily_build_group_report_results_controller_spec.rb b/spec/controllers/projects/ci/daily_build_group_report_results_controller_spec.rb
index 252ad6ec9c4..594c24bb7e3 100644
--- a/spec/controllers/projects/ci/daily_build_group_report_results_controller_spec.rb
+++ b/spec/controllers/projects/ci/daily_build_group_report_results_controller_spec.rb
@@ -149,8 +149,4 @@ RSpec.describe Projects::Ci::DailyBuildGroupReportResultsController do
date: date
)
end
-
- def csv_response
- CSV.parse(response.body)
- end
end
diff --git a/spec/controllers/projects/ci/lints_controller_spec.rb b/spec/controllers/projects/ci/lints_controller_spec.rb
index eb92385fc83..b3e08292546 100644
--- a/spec/controllers/projects/ci/lints_controller_spec.rb
+++ b/spec/controllers/projects/ci/lints_controller_spec.rb
@@ -45,6 +45,9 @@ RSpec.describe Projects::Ci::LintsController do
end
describe 'POST #create' do
+ subject { post :create, params: params }
+
+ let(:params) { { namespace_id: project.namespace, project_id: project, content: content } }
let(:remote_file_path) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.gitlab-ci-1.yml' }
let(:remote_file_content) do
@@ -72,18 +75,62 @@ RSpec.describe Projects::Ci::LintsController do
before do
stub_full_request(remote_file_path).to_return(body: remote_file_content)
project.add_developer(user)
+ end
- post :create, params: { namespace_id: project.namespace, project_id: project, content: content }
+ shared_examples 'returns a successful validation' do
+ it 'returns successfully' do
+ subject
+ expect(response).to be_successful
+ end
+
+ it 'render show page' do
+ subject
+ expect(response).to render_template :show
+ end
+
+ it 'retrieves project' do
+ subject
+ expect(assigns(:project)).to eq(project)
+ end
end
- it { expect(response).to be_successful }
+ context 'using legacy validation (YamlProcessor)' do
+ it_behaves_like 'returns a successful validation'
- it 'render show page' do
- expect(response).to render_template :show
+ it 'runs validations through YamlProcessor' do
+ expect(Gitlab::Ci::YamlProcessor).to receive(:new_with_validation_errors).and_call_original
+
+ subject
+ end
end
- it 'retrieves project' do
- expect(assigns(:project)).to eq(project)
+ context 'using dry_run mode' do
+ subject { post :create, params: params.merge(dry_run: 'true') }
+
+ it_behaves_like 'returns a successful validation'
+
+ it 'runs validations through Ci::CreatePipelineService' do
+ expect(Ci::CreatePipelineService)
+ .to receive(:new)
+ .with(project, user, ref: 'master')
+ .and_call_original
+
+ subject
+ end
+
+ context 'when dry_run feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_lint_creates_pipeline_with_dry_run: false)
+ end
+
+ it_behaves_like 'returns a successful validation'
+
+ it 'runs validations through YamlProcessor' do
+ expect(Gitlab::Ci::YamlProcessor).to receive(:new_with_validation_errors).and_call_original
+
+ subject
+ end
+ end
end
end
@@ -98,13 +145,23 @@ RSpec.describe Projects::Ci::LintsController do
before do
project.add_developer(user)
-
- post :create, params: { namespace_id: project.namespace, project_id: project, content: content }
end
it 'assigns errors' do
+ subject
+
expect(assigns[:errors]).to eq(['root config contains unknown keys: rubocop'])
end
+
+ context 'with dry_run mode' do
+ subject { post :create, params: params.merge(dry_run: 'true') }
+
+ it 'assigns errors' do
+ subject
+
+ expect(assigns[:errors]).to eq(['root config contains unknown keys: rubocop'])
+ end
+ end
end
context 'without enough privileges' do
diff --git a/spec/controllers/projects/cycle_analytics/events_controller_spec.rb b/spec/controllers/projects/cycle_analytics/events_controller_spec.rb
index 408ce51d34b..c5b72ff2b3b 100644
--- a/spec/controllers/projects/cycle_analytics/events_controller_spec.rb
+++ b/spec/controllers/projects/cycle_analytics/events_controller_spec.rb
@@ -57,6 +57,8 @@ RSpec.describe Projects::CycleAnalytics::EventsController do
end
end
+ include_examples GracefulTimeoutHandling
+
def get_issue(additional_params: {})
params = additional_params.merge(namespace_id: project.namespace, project_id: project)
get(:issue, params: params, format: :json)
diff --git a/spec/controllers/projects/cycle_analytics_controller_spec.rb b/spec/controllers/projects/cycle_analytics_controller_spec.rb
index ec853b74b9b..e956065972f 100644
--- a/spec/controllers/projects/cycle_analytics_controller_spec.rb
+++ b/spec/controllers/projects/cycle_analytics_controller_spec.rb
@@ -67,4 +67,6 @@ RSpec.describe Projects::CycleAnalyticsController do
end
end
end
+
+ include_examples GracefulTimeoutHandling
end
diff --git a/spec/controllers/projects/environments_controller_spec.rb b/spec/controllers/projects/environments_controller_spec.rb
index 85ec1f7396d..d1142cbd129 100644
--- a/spec/controllers/projects/environments_controller_spec.rb
+++ b/spec/controllers/projects/environments_controller_spec.rb
@@ -348,34 +348,10 @@ RSpec.describe Projects::EnvironmentsController do
end
describe 'GET #metrics_redirect' do
- it 'redirects to environment if it exists' do
+ it 'redirects to metrics dashboard page' do
get :metrics_redirect, params: { namespace_id: project.namespace, project_id: project }
- expect(response).to redirect_to(environment_metrics_path(environment))
- end
-
- context 'with anonymous user and public dashboard visibility' do
- let(:project) { create(:project, :public) }
- let(:user) { create(:user) }
-
- it 'redirects successfully' do
- project.project_feature.update!(metrics_dashboard_access_level: ProjectFeature::ENABLED)
-
- get :metrics_redirect, params: { namespace_id: project.namespace, project_id: project }
-
- expect(response).to redirect_to(environment_metrics_path(environment))
- end
- end
-
- context 'when there are no environments' do
- let(:environment) { }
-
- it 'redirects to empty metrics page' do
- get :metrics_redirect, params: { namespace_id: project.namespace, project_id: project }
-
- expect(response).to be_ok
- expect(response).to render_template 'empty_metrics'
- end
+ expect(response).to redirect_to(project_metrics_dashboard_path(project))
end
end
@@ -385,12 +361,12 @@ RSpec.describe Projects::EnvironmentsController do
end
context 'when environment has no metrics' do
- it 'returns a metrics page' do
+ it 'redirects to metrics dashboard page' do
expect(environment).not_to receive(:metrics)
get :metrics, params: environment_params
- expect(response).to be_ok
+ expect(response).to redirect_to(project_metrics_dashboard_path(project, environment: environment))
end
context 'when requesting metrics as JSON' do
@@ -440,12 +416,12 @@ RSpec.describe Projects::EnvironmentsController do
let(:project) { create(:project, :public) }
let(:user) { create(:user) }
- it 'returns success' do
+ it 'redirects to metrics dashboard page' do
project.project_feature.update!(metrics_dashboard_access_level: ProjectFeature::ENABLED)
get :metrics, params: environment_params
- expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to redirect_to(project_metrics_dashboard_path(project, environment: environment))
end
end
end
diff --git a/spec/controllers/projects/forks_controller_spec.rb b/spec/controllers/projects/forks_controller_spec.rb
index 4c0fd7b8954..e8b30294cdd 100644
--- a/spec/controllers/projects/forks_controller_spec.rb
+++ b/spec/controllers/projects/forks_controller_spec.rb
@@ -162,9 +162,25 @@ RSpec.describe Projects::ForksController do
end
context 'when user is signed in' do
- it 'responds with status 200' do
+ before do
sign_in(user)
+ end
+
+ context 'when JSON requested' do
+ it 'responds with available groups' do
+ get :new,
+ format: :json,
+ params: {
+ namespace_id: project.namespace,
+ project_id: project
+ }
+
+ expect(json_response['namespaces'].length).to eq(1)
+ expect(json_response['namespaces'].first['id']).to eq(group.id)
+ end
+ end
+ it 'responds with status 200' do
subject
expect(response).to have_gitlab_http_status(:ok)
@@ -229,6 +245,7 @@ RSpec.describe Projects::ForksController do
continue: continue_params
}
end
+
let(:continue_params) do
{
to: '/-/ide/project/path',
diff --git a/spec/controllers/projects/hooks_controller_spec.rb b/spec/controllers/projects/hooks_controller_spec.rb
index 440e6b2a74c..85d036486ee 100644
--- a/spec/controllers/projects/hooks_controller_spec.rb
+++ b/spec/controllers/projects/hooks_controller_spec.rb
@@ -36,7 +36,8 @@ RSpec.describe Projects::HooksController do
note_events: true,
job_events: true,
pipeline_events: true,
- wiki_page_events: true
+ wiki_page_events: true,
+ deployment_events: true
}
post :create, params: { namespace_id: project.namespace, project_id: project, hook: hook_params }
diff --git a/spec/controllers/projects/incidents_controller_spec.rb b/spec/controllers/projects/incidents_controller_spec.rb
new file mode 100644
index 00000000000..2baae0661cb
--- /dev/null
+++ b/spec/controllers/projects/incidents_controller_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::IncidentsController do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+
+ before_all do
+ project.add_developer(developer)
+ project.add_guest(guest)
+ end
+
+ describe 'GET #index' do
+ def make_request
+ get :index, params: { namespace_id: project.namespace, project_id: project }
+ end
+
+ it 'shows the page for user with developer role' do
+ sign_in(developer)
+ make_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:index)
+ end
+
+ context 'when user is unauthorized' do
+ it 'redirects to the login page' do
+ sign_out(developer)
+ make_request
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+
+ context 'when user is a guest' do
+ it 'shows 404' do
+ sign_in(guest)
+ make_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index f9580c79390..a0e478ef368 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -181,10 +181,11 @@ RSpec.describe Projects::IssuesController do
project.add_developer(user)
end
- it 'builds a new issue' do
+ it 'builds a new issue', :aggregate_failures do
get :new, params: { namespace_id: project.namespace, project_id: project }
expect(assigns(:issue)).to be_a_new(Issue)
+ expect(assigns(:issue).issue_type).to eq('issue')
end
where(:conf_value, :conf_result) do
@@ -214,6 +215,24 @@ RSpec.describe Projects::IssuesController do
end
end
+ context 'setting issue type' do
+ let(:issue_type) { 'issue' }
+
+ before do
+ get :new, params: { namespace_id: project.namespace, project_id: project, issue: { issue_type: issue_type } }
+ end
+
+ subject { assigns(:issue).issue_type }
+
+ it { is_expected.to eq('issue') }
+
+ context 'incident issue' do
+ let(:issue_type) { 'incident' }
+
+ it { is_expected.to eq(issue_type) }
+ end
+ end
+
it 'fills in an issue for a merge request' do
project_with_repository = create(:project, :repository)
project_with_repository.add_developer(user)
@@ -964,6 +983,33 @@ RSpec.describe Projects::IssuesController do
expect { issue.update(description: [issue.description, labels].join(' ')) }
.not_to exceed_query_limit(control_count + 2 * labels.count)
end
+
+ context 'real-time sidebar feature flag' do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:issue) { create(:issue, project: project) }
+
+ where(:action_cable_in_app_enabled, :feature_flag_enabled, :gon_feature_flag) do
+ true | true | true
+ true | false | true
+ false | true | true
+ false | false | false
+ end
+
+ with_them do
+ before do
+ expect(Gitlab::ActionCable::Config).to receive(:in_app?).and_return(action_cable_in_app_enabled)
+ stub_feature_flags(real_time_issue_sidebar: feature_flag_enabled)
+ end
+
+ it 'broadcasts to the issues channel based on ActionCable and feature flag values' do
+ go(id: issue.to_param)
+
+ expect(Gon.features).to include('realTimeIssueSidebar' => gon_feature_flag)
+ end
+ end
+ end
end
describe 'GET #realtime_changes' do
@@ -1022,6 +1068,14 @@ RSpec.describe Projects::IssuesController do
project.issues.first
end
+ it 'creates the issue successfully', :aggregate_failures do
+ issue = post_new_issue
+
+ expect(issue).to be_a(Issue)
+ expect(issue.persisted?).to eq(true)
+ expect(issue.issue_type).to eq('issue')
+ end
+
context 'resolving discussions in MergeRequest' do
let(:discussion) { create(:diff_note_on_merge_request).to_discussion }
let(:merge_request) { discussion.noteable }
@@ -1262,6 +1316,20 @@ RSpec.describe Projects::IssuesController do
end
end
end
+
+ context 'setting issue type' do
+ let(:issue_type) { 'issue' }
+
+ subject { post_new_issue(issue_type: issue_type)&.issue_type }
+
+ it { is_expected.to eq('issue') }
+
+ context 'incident issue' do
+ let(:issue_type) { 'incident' }
+
+ it { is_expected.to eq(issue_type) }
+ end
+ end
end
describe 'POST #mark_as_spam' do
diff --git a/spec/controllers/projects/logs_controller_spec.rb b/spec/controllers/projects/logs_controller_spec.rb
index 0f34e536064..d5c602df41d 100644
--- a/spec/controllers/projects/logs_controller_spec.rb
+++ b/spec/controllers/projects/logs_controller_spec.rb
@@ -22,8 +22,8 @@ RSpec.describe Projects::LogsController do
describe 'GET #index' do
let(:empty_project) { create(:project) }
- it 'returns 404 with developer access' do
- project.add_developer(user)
+ it 'returns 404 with reporter access' do
+ project.add_reporter(user)
get :index, params: environment_params
@@ -31,7 +31,7 @@ RSpec.describe Projects::LogsController do
end
it 'renders empty logs page if no environment exists' do
- empty_project.add_maintainer(user)
+ empty_project.add_developer(user)
get :index, params: { namespace_id: empty_project.namespace, project_id: empty_project }
@@ -40,7 +40,7 @@ RSpec.describe Projects::LogsController do
end
it 'renders index template' do
- project.add_maintainer(user)
+ project.add_developer(user)
get :index, params: environment_params
@@ -59,6 +59,7 @@ RSpec.describe Projects::LogsController do
container_name: container
}
end
+
let(:service_result_json) { Gitlab::Json.parse(service_result.to_json) }
let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*', projects: [project]) }
@@ -69,14 +70,27 @@ RSpec.describe Projects::LogsController do
end
end
- it 'returns 404 with developer access' do
- project.add_developer(user)
+ it 'returns 404 with reporter access' do
+ project.add_reporter(user)
get endpoint, params: environment_params(pod_name: pod_name, format: :json)
expect(response).to have_gitlab_http_status(:not_found)
end
+ context 'with developer access' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'returns the service result' do
+ get endpoint, params: environment_params(pod_name: pod_name, format: :json)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response).to eq(service_result_json)
+ end
+ end
+
context 'with maintainer access' do
before do
project.add_maintainer(user)
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 4327e0bbb7a..8e1b250cd3c 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -334,7 +334,7 @@ RSpec.describe Projects::MergeRequestsController do
it 'closes MR without errors' do
update_merge_request(state_event: 'close')
- expect(response).to redirect_to([merge_request.target_project.namespace.becomes(Namespace), merge_request.target_project, merge_request])
+ expect(response).to redirect_to([merge_request.target_project, merge_request])
expect(merge_request.reload.closed?).to be_truthy
end
@@ -343,7 +343,7 @@ RSpec.describe Projects::MergeRequestsController do
update_merge_request(title: 'New title')
- expect(response).to redirect_to([merge_request.target_project.namespace.becomes(Namespace), merge_request.target_project, merge_request])
+ expect(response).to redirect_to([merge_request.target_project, merge_request])
expect(merge_request.reload.title).to eq 'New title'
end
diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb
index 9728fad417e..570d65dba4f 100644
--- a/spec/controllers/projects/notes_controller_spec.rb
+++ b/spec/controllers/projects/notes_controller_spec.rb
@@ -302,6 +302,7 @@ RSpec.describe Projects::NotesController do
target_id: merge_request.id
}.merge(extra_request_params)
end
+
let(:extra_request_params) { {} }
let(:project_visibility) { Gitlab::VisibilityLevel::PUBLIC }
diff --git a/spec/controllers/projects/pipelines/tests_controller_spec.rb b/spec/controllers/projects/pipelines/tests_controller_spec.rb
index e2abd1238c5..61118487e20 100644
--- a/spec/controllers/projects/pipelines/tests_controller_spec.rb
+++ b/spec/controllers/projects/pipelines/tests_controller_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Projects::Pipelines::TestsController do
get_tests_summary_json
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['total_count']).to eq(2)
+ expect(json_response.dig('total', 'count')).to eq(2)
end
end
@@ -28,20 +28,7 @@ RSpec.describe Projects::Pipelines::TestsController do
get_tests_summary_json
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['total_count']).to eq(0)
- end
- end
-
- context 'when feature is disabled' do
- before do
- stub_feature_flags(build_report_summary: false)
- end
-
- it 'returns 404' do
- get_tests_summary_json
-
- expect(response).to have_gitlab_http_status(:not_found)
- expect(response.body).to be_empty
+ expect(json_response.dig('total', 'count')).to eq(0)
end
end
end
@@ -71,21 +58,6 @@ RSpec.describe Projects::Pipelines::TestsController do
expect(response.body).to be_empty
end
end
-
- context 'when feature is disabled' do
- let(:suite_name) { 'test' }
-
- before do
- stub_feature_flags(build_report_summary: false)
- end
-
- it 'returns 404' do
- get_tests_show_json([])
-
- expect(response).to have_gitlab_http_status(:not_found)
- expect(response.body).to be_empty
- end
- end
end
def get_tests_summary_json
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index 872f0e97b09..ef560f6426b 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -57,27 +57,6 @@ RSpec.describe Projects::PipelinesController do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['pipelines'].count).to eq 12
end
-
- context 'with build_report_summary turned off' do
- before do
- stub_feature_flags(build_report_summary: false)
- end
-
- it 'does not execute N+1 queries' do
- get_pipelines_index_json
-
- control_count = ActiveRecord::QueryRecorder.new do
- get_pipelines_index_json
- end.count
-
- create_all_pipeline_types
-
- # There appears to be one extra query for Pipelines#has_warnings? for some reason
- expect { get_pipelines_index_json }.not_to exceed_query_limit(control_count + 1)
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['pipelines'].count).to eq 12
- end
- end
end
it 'does not include coverage data for the pipelines' do
@@ -880,113 +859,88 @@ RSpec.describe Projects::PipelinesController do
end
end
- context 'when feature is enabled' do
- before do
- stub_feature_flags(junit_pipeline_view: project)
- end
-
- context 'when pipeline does not have a test report' do
- it 'renders an empty test report' do
- get_test_report_json
+ context 'when pipeline does not have a test report' do
+ it 'renders an empty test report' do
+ get_test_report_json
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['total_count']).to eq(0)
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['total_count']).to eq(0)
end
+ end
- context 'when pipeline has a test report' do
- before do
- create(:ci_build, name: 'rspec', pipeline: pipeline).tap do |build|
- create(:ci_job_artifact, :junit, job: build)
- end
- end
-
- it 'renders the test report' do
- get_test_report_json
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['total_count']).to eq(4)
- end
+ context 'when pipeline has a test report' do
+ before do
+ create(:ci_build, :test_reports, name: 'rspec', pipeline: pipeline)
end
- context 'when pipeline has a corrupt test report artifact' do
- before do
- create(:ci_build, name: 'rspec', pipeline: pipeline).tap do |build|
- create(:ci_job_artifact, :junit_with_corrupted_data, job: build)
- end
+ it 'renders the test report' do
+ get_test_report_json
- get_test_report_json
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['total_count']).to eq(4)
+ end
+ end
- it 'renders the test reports' do
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['test_suites'].count).to eq(1)
- end
+ context 'when pipeline has a corrupt test report artifact' do
+ before do
+ create(:ci_build, :broken_test_reports, name: 'rspec', pipeline: pipeline)
- it 'returns a suite_error on the suite with corrupted XML' do
- expect(json_response['test_suites'].first['suite_error']).to eq('JUnit XML parsing failed: 1:1: FATAL: Document is empty')
- end
+ get_test_report_json
end
- context 'when junit_pipeline_screenshots_view is enabled' do
- before do
- stub_feature_flags(junit_pipeline_screenshots_view: project)
- end
-
- context 'when test_report contains attachment and scope is with_attachment as a URL param' do
- let(:pipeline) { create(:ci_pipeline, :with_test_reports_attachment, project: project) }
+ it 'renders the test reports' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['test_suites'].count).to eq(1)
+ end
- it 'returns a test reports with attachment' do
- get_test_report_json(scope: 'with_attachment')
+ it 'returns a suite_error on the suite with corrupted XML' do
+ expect(json_response['test_suites'].first['suite_error']).to eq('JUnit XML parsing failed: 1:1: FATAL: Document is empty')
+ end
+ end
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response["test_suites"]).to be_present
- expect(json_response["test_suites"].first["test_cases"].first).to include("attachment_url")
- end
- end
+ context 'when junit_pipeline_screenshots_view is enabled' do
+ before do
+ stub_feature_flags(junit_pipeline_screenshots_view: project)
+ end
- context 'when test_report does not contain attachment and scope is with_attachment as a URL param' do
- let(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
+ context 'when test_report contains attachment and scope is with_attachment as a URL param' do
+ let(:pipeline) { create(:ci_pipeline, :with_test_reports_attachment, project: project) }
- it 'returns a test reports with empty values' do
- get_test_report_json(scope: 'with_attachment')
+ it 'returns a test reports with attachment' do
+ get_test_report_json(scope: 'with_attachment')
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response["test_suites"]).to be_empty
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response["test_suites"]).to be_present
+ expect(json_response["test_suites"].first["test_cases"].first).to include("attachment_url")
end
end
- context 'when junit_pipeline_screenshots_view is disabled' do
- before do
- stub_feature_flags(junit_pipeline_screenshots_view: false)
- end
-
- context 'when test_report contains attachment and scope is with_attachment as a URL param' do
- let(:pipeline) { create(:ci_pipeline, :with_test_reports_attachment, project: project) }
+ context 'when test_report does not contain attachment and scope is with_attachment as a URL param' do
+ let(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
- it 'returns a test reports without attachment_url' do
- get_test_report_json(scope: 'with_attachment')
+ it 'returns a test reports with empty values' do
+ get_test_report_json(scope: 'with_attachment')
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response["test_suites"].first["test_cases"].first).not_to include("attachment_url")
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response["test_suites"]).to be_empty
end
end
end
- context 'when feature is disabled' do
- let(:pipeline) { create(:ci_empty_pipeline, project: project) }
-
+ context 'when junit_pipeline_screenshots_view is disabled' do
before do
- stub_feature_flags(junit_pipeline_view: false)
+ stub_feature_flags(junit_pipeline_screenshots_view: false)
end
- it 'renders empty response' do
- get_test_report_json
+ context 'when test_report contains attachment and scope is with_attachment as a URL param' do
+ let(:pipeline) { create(:ci_pipeline, :with_test_reports_attachment, project: project) }
+
+ it 'returns a test reports without attachment_url' do
+ get_test_report_json(scope: 'with_attachment')
- expect(response).to have_gitlab_http_status(:no_content)
- expect(response.body).to be_empty
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response["test_suites"].first["test_cases"].first).not_to include("attachment_url")
+ end
end
end
@@ -1010,76 +964,6 @@ RSpec.describe Projects::PipelinesController do
end
end
- describe 'GET test_report_count.json' do
- subject(:test_reports_count_json) do
- get :test_reports_count, params: {
- namespace_id: project.namespace,
- project_id: project,
- id: pipeline.id
- },
- format: :json
- end
-
- context 'when feature is enabled' do
- before do
- stub_feature_flags(junit_pipeline_view: true)
- end
-
- context 'when pipeline does not have a test report' do
- let(:pipeline) { create(:ci_pipeline, project: project) }
-
- it 'renders an empty badge counter' do
- test_reports_count_json
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['total_count']).to eq(0)
- end
- end
-
- context 'when pipeline has a test report' do
- let(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
-
- it 'renders the badge counter value' do
- test_reports_count_json
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['total_count']).to eq(4)
- end
- end
-
- context 'when pipeline has corrupt test reports' do
- let(:pipeline) { create(:ci_pipeline, project: project) }
-
- before do
- job = create(:ci_build, pipeline: pipeline)
- create(:ci_job_artifact, :junit_with_corrupted_data, job: job, project: project)
- end
-
- it 'renders 0' do
- test_reports_count_json
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['total_count']).to eq(0)
- end
- end
- end
-
- context 'when feature is disabled' do
- let(:pipeline) { create(:ci_empty_pipeline, project: project) }
-
- before do
- stub_feature_flags(junit_pipeline_view: false)
- end
-
- it 'renders empty response' do
- test_reports_count_json
-
- expect(response).to have_gitlab_http_status(:no_content)
- expect(response.body).to be_empty
- end
- end
- end
-
describe 'GET latest' do
let(:branch_main) { project.repository.branches[0] }
let(:branch_secondary) { project.repository.branches[1] }
diff --git a/spec/controllers/projects/product_analytics_controller_spec.rb b/spec/controllers/projects/product_analytics_controller_spec.rb
new file mode 100644
index 00000000000..47f1d96c70b
--- /dev/null
+++ b/spec/controllers/projects/product_analytics_controller_spec.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::ProductAnalyticsController do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ before(:all) do
+ project.add_maintainer(user)
+ end
+
+ before do
+ sign_in(user)
+ stub_feature_flags(product_analytics: true)
+ end
+
+ describe 'GET #index' do
+ it 'renders index with 200 status code' do
+ get :index, params: project_params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:index)
+ end
+
+ context 'with an anonymous user' do
+ before do
+ sign_out(user)
+ end
+
+ it 'redirects to sign-in page' do
+ get :index, params: project_params
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+
+ context 'feature flag disabled' do
+ before do
+ stub_feature_flags(product_analytics: false)
+ end
+
+ it 'returns not found' do
+ get :index, params: project_params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ describe 'GET #test' do
+ it 'renders test with 200 status code' do
+ get :test, params: project_params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:test)
+ end
+ end
+
+ describe 'GET #setup' do
+ it 'renders setup with 200 status code' do
+ get :setup, params: project_params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:setup)
+ end
+ end
+
+ describe 'GET #graphs' do
+ it 'renders graphs with 200 status code' do
+ get :graphs, params: project_params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:graphs)
+ end
+
+ context 'feature flag disabled' do
+ before do
+ stub_feature_flags(product_analytics: false)
+ end
+
+ it 'returns not found' do
+ get :graphs, params: project_params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ private
+
+ def project_params(opts = {})
+ opts.reverse_merge(namespace_id: project.namespace, project_id: project)
+ end
+end
diff --git a/spec/controllers/projects/prometheus/alerts_controller_spec.rb b/spec/controllers/projects/prometheus/alerts_controller_spec.rb
index 6e3148231bd..cbd599506df 100644
--- a/spec/controllers/projects/prometheus/alerts_controller_spec.rb
+++ b/spec/controllers/projects/prometheus/alerts_controller_spec.rb
@@ -111,6 +111,7 @@ RSpec.describe Projects::Prometheus::AlertsController do
describe 'GET #show' do
let(:alert) do
create(:prometheus_alert,
+ :with_runbook_url,
project: project,
environment: environment,
prometheus_metric: metric)
@@ -140,6 +141,7 @@ RSpec.describe Projects::Prometheus::AlertsController do
'query' => alert.query,
'operator' => alert.computed_operator,
'threshold' => alert.threshold,
+ 'runbook_url' => alert.runbook_url,
'alert_path' => alert_path(alert)
}
end
@@ -225,7 +227,8 @@ RSpec.describe Projects::Prometheus::AlertsController do
'title' => metric.title,
'query' => metric.query,
'operator' => '>',
- 'threshold' => 1.0
+ 'threshold' => 1.0,
+ 'runbook_url' => 'https://sample.runbook.com'
}
end
@@ -234,6 +237,7 @@ RSpec.describe Projects::Prometheus::AlertsController do
opts,
operator: '>',
threshold: '1',
+ runbook_url: 'https://sample.runbook.com',
environment_id: environment,
prometheus_metric_id: metric
)
@@ -250,14 +254,14 @@ RSpec.describe Projects::Prometheus::AlertsController do
expect(json_response).to include(alert_params)
end
- it 'returns no_content for an invalid metric' do
+ it 'returns bad_request for an invalid metric' do
make_request(prometheus_metric_id: 'invalid')
- expect(response).to have_gitlab_http_status(:no_content)
+ expect(response).to have_gitlab_http_status(:bad_request)
end
it_behaves_like 'unprivileged'
- it_behaves_like 'project non-specific environment', :no_content
+ it_behaves_like 'project non-specific environment', :bad_request
end
describe 'PUT #update' do
@@ -304,6 +308,12 @@ RSpec.describe Projects::Prometheus::AlertsController do
expect(json_response).to include(alert_params)
end
+ it 'returns bad_request for an invalid alert data' do
+ make_request(runbook_url: 'bad-url')
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
it_behaves_like 'unprivileged'
it_behaves_like 'project non-specific environment', :not_found
it_behaves_like 'project non-specific metric', :not_found
diff --git a/spec/controllers/projects/protected_branches_controller_spec.rb b/spec/controllers/projects/protected_branches_controller_spec.rb
index 09eb1a45c73..a0cb5c1473a 100644
--- a/spec/controllers/projects/protected_branches_controller_spec.rb
+++ b/spec/controllers/projects/protected_branches_controller_spec.rb
@@ -27,6 +27,7 @@ RSpec.describe Projects::ProtectedBranchesController do
{ merge_access_levels_attributes: maintainer_access_level,
push_access_levels_attributes: maintainer_access_level }
end
+
let(:create_params) { attributes_for(:protected_branch).merge(access_level_params) }
before do
diff --git a/spec/controllers/projects/serverless/functions_controller_spec.rb b/spec/controllers/projects/serverless/functions_controller_spec.rb
index 3071d0b7f54..7f558ad9231 100644
--- a/spec/controllers/projects/serverless/functions_controller_spec.rb
+++ b/spec/controllers/projects/serverless/functions_controller_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe Projects::Serverless::FunctionsController do
let(:knative_stub_options) do
{ namespace: namespace.namespace, name: function_name, description: function_description }
end
+
let(:knative) { create(:clusters_applications_knative, :installed, cluster: cluster) }
let(:namespace) do
diff --git a/spec/controllers/projects/services_controller_spec.rb b/spec/controllers/projects/services_controller_spec.rb
index e8a23dcfafb..50f474c0222 100644
--- a/spec/controllers/projects/services_controller_spec.rb
+++ b/spec/controllers/projects/services_controller_spec.rb
@@ -82,7 +82,7 @@ RSpec.describe Projects::ServicesController do
'active' => '1',
'push_events' => '1',
'token' => 'token',
- 'project_url' => 'http://test.com'
+ 'project_url' => 'https://buildkite.com/organization/pipeline'
}
end
diff --git a/spec/controllers/projects/settings/operations_controller_spec.rb b/spec/controllers/projects/settings/operations_controller_spec.rb
index d4f3c5d0c9b..191b718af56 100644
--- a/spec/controllers/projects/settings/operations_controller_spec.rb
+++ b/spec/controllers/projects/settings/operations_controller_spec.rb
@@ -206,7 +206,7 @@ RSpec.describe Projects::Settings::OperationsController do
reset_pagerduty_token
new_token = incident_management_setting.reload.pagerduty_token
- new_webhook_url = project_incidents_pagerduty_url(project, token: new_token)
+ new_webhook_url = project_incidents_integrations_pagerduty_url(project, token: new_token)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['pagerduty_webhook_url']).to eq(new_webhook_url)
@@ -219,7 +219,7 @@ RSpec.describe Projects::Settings::OperationsController do
it 'does not reset a token' do
reset_pagerduty_token
- new_webhook_url = project_incidents_pagerduty_url(project, token: nil)
+ new_webhook_url = project_incidents_integrations_pagerduty_url(project, token: nil)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['pagerduty_webhook_url']).to eq(new_webhook_url)
diff --git a/spec/controllers/projects/settings/repository_controller_spec.rb b/spec/controllers/projects/settings/repository_controller_spec.rb
index 46dba691bc4..d93f23ae142 100644
--- a/spec/controllers/projects/settings/repository_controller_spec.rb
+++ b/spec/controllers/projects/settings/repository_controller_spec.rb
@@ -56,6 +56,7 @@ RSpec.describe Projects::Settings::RepositoryController do
deploy_token_type: DeployToken.deploy_token_types[:project_type]
}
end
+
let(:request_params) do
{
namespace_id: project.namespace.to_param,
@@ -77,7 +78,7 @@ RSpec.describe Projects::Settings::RepositoryController do
'token' => be_a(String),
'scopes' => deploy_token_params.inject([]) do |scopes, kv|
key, value = kv
- key.to_s.start_with?('read_') && !value.to_i.zero? ? scopes << key.to_s : scopes
+ key.to_s.start_with?('read_') && value.to_i != 0 ? scopes << key.to_s : scopes
end
}
end
diff --git a/spec/controllers/projects/snippets_controller_spec.rb b/spec/controllers/projects/snippets_controller_spec.rb
index 6fcb24da3cd..bb9b556f442 100644
--- a/spec/controllers/projects/snippets_controller_spec.rb
+++ b/spec/controllers/projects/snippets_controller_spec.rb
@@ -416,12 +416,13 @@ RSpec.describe Projects::SnippetsController do
describe "GET #show for embeddable content" do
let(:project_snippet) { create(:project_snippet, :repository, snippet_permission, project: project, author: user) }
+ let(:extra_params) { {} }
before do
sign_in(user)
end
- subject { get :show, params: { namespace_id: project.namespace, project_id: project, id: project_snippet.to_param }, format: :js }
+ subject { get :show, params: { namespace_id: project.namespace, project_id: project, id: project_snippet.to_param, **extra_params }, format: :js }
context 'when snippet is private' do
let(:snippet_permission) { :private }
@@ -436,7 +437,29 @@ RSpec.describe Projects::SnippetsController do
context 'when snippet is public' do
let(:snippet_permission) { :public }
- it_behaves_like 'successful response'
+ it 'renders the blob from the repository' do
+ subject
+
+ expect(assigns(:snippet)).to eq(project_snippet)
+ expect(assigns(:blobs)).to eq(project_snippet.blobs)
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'does not show the blobs expanded by default' do
+ subject
+
+ expect(project_snippet.blobs.map(&:expanded?)).to be_all(false)
+ end
+
+ context 'when param expanded is set' do
+ let(:extra_params) { { expanded: true } }
+
+ it 'shows all blobs expanded' do
+ subject
+
+ expect(project_snippet.blobs.map(&:expanded?)).to be_all(true)
+ end
+ end
end
context 'when the project is private' do
diff --git a/spec/controllers/projects/tags_controller_spec.rb b/spec/controllers/projects/tags_controller_spec.rb
index 122d1b072d0..d213d003bed 100644
--- a/spec/controllers/projects/tags_controller_spec.rb
+++ b/spec/controllers/projects/tags_controller_spec.rb
@@ -120,10 +120,14 @@ RSpec.describe Projects::TagsController do
request
- release = project.releases.find_by_tag!('1.0')
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:found)
- expect(release).to be_present
- expect(release.description).to eq(release_description)
+ release = project.releases.find_by_tag('1.0')
+
+ expect(release).to be_present
+ expect(release&.description).to eq(release_description)
+ end
end
end
end
diff --git a/spec/controllers/projects/variables_controller_spec.rb b/spec/controllers/projects/variables_controller_spec.rb
index 8bb4c2dae4b..768d2ec00bd 100644
--- a/spec/controllers/projects/variables_controller_spec.rb
+++ b/spec/controllers/projects/variables_controller_spec.rb
@@ -47,6 +47,7 @@ RSpec.describe Projects::VariablesController do
protected: variable.protected?.to_s,
environment_scope: variable.environment_scope }
end
+
let(:new_variable_attributes) do
{ key: 'new_key',
secret_value: 'dummy_value',
diff --git a/spec/controllers/registrations/experience_levels_controller_spec.rb b/spec/controllers/registrations/experience_levels_controller_spec.rb
index 5a217a3a684..cd46cec1641 100644
--- a/spec/controllers/registrations/experience_levels_controller_spec.rb
+++ b/spec/controllers/registrations/experience_levels_controller_spec.rb
@@ -23,6 +23,7 @@ RSpec.describe Registrations::ExperienceLevelsController do
end
it { is_expected.to have_gitlab_http_status(:ok) }
+ it { is_expected.to render_template('layouts/devise_experimental_onboarding_issues') }
it { is_expected.to render_template(:show) }
context 'when not part of the onboarding issues experiment' do
diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb
index 66caa58666f..2c766035d87 100644
--- a/spec/controllers/registrations_controller_spec.rb
+++ b/spec/controllers/registrations_controller_spec.rb
@@ -18,16 +18,6 @@ RSpec.describe RegistrationsController do
stub_experiment_for_user(signup_flow: true)
end
- it 'tracks the event with the right parameters' do
- expect(Gitlab::Tracking).to receive(:event).with(
- 'Growth::Acquisition::Experiment::SignUpFlow',
- 'start',
- label: anything,
- property: 'experimental_group'
- )
- subject
- end
-
it 'renders new template and sets the resource variable' do
expect(subject).to render_template(:new)
expect(response).to have_gitlab_http_status(:ok)
@@ -41,17 +31,52 @@ RSpec.describe RegistrationsController do
stub_experiment_for_user(signup_flow: false)
end
- it 'does not track the event' do
- expect(Gitlab::Tracking).not_to receive(:event)
- subject
- end
-
it 'renders new template and sets the resource variable' do
subject
expect(response).to have_gitlab_http_status(:found)
expect(response).to redirect_to(new_user_session_path(anchor: 'register-pane'))
end
end
+
+ context 'with sign up flow and terms_opt_in experiment being enabled' do
+ before do
+ stub_experiment(signup_flow: true, terms_opt_in: true)
+ end
+
+ context 'when user is not part of the experiment' do
+ before do
+ stub_experiment_for_user(signup_flow: true, terms_opt_in: false)
+ end
+
+ it 'tracks event with right parameters' do
+ expect(Gitlab::Tracking).to receive(:event).with(
+ 'Growth::Acquisition::Experiment::TermsOptIn',
+ 'start',
+ label: anything,
+ property: 'control_group'
+ )
+
+ subject
+ end
+ end
+
+ context 'when user is part of the experiment' do
+ before do
+ stub_experiment_for_user(signup_flow: true, terms_opt_in: true)
+ end
+
+ it 'tracks event with right parameters' do
+ expect(Gitlab::Tracking).to receive(:event).with(
+ 'Growth::Acquisition::Experiment::TermsOptIn',
+ 'start',
+ label: anything,
+ property: 'experimental_group'
+ )
+
+ subject
+ end
+ end
+ end
end
describe '#create' do
@@ -250,35 +275,79 @@ RSpec.describe RegistrationsController do
expect(subject.current_user).to be_present
expect(subject.current_user.terms_accepted?).to be(true)
end
- end
- describe 'tracking data' do
- context 'with the experimental signup flow enabled and the user is part of the control group' do
+ context 'when experiment terms_opt_in is enabled' do
before do
- stub_experiment(signup_flow: true)
- stub_experiment_for_user(signup_flow: false)
+ stub_experiment(terms_opt_in: true)
end
- it 'tracks the event with the right parameters' do
- expect(Gitlab::Tracking).to receive(:event).with(
- 'Growth::Acquisition::Experiment::SignUpFlow',
- 'end',
- label: anything,
- property: 'control_group'
- )
- post :create, params: user_params
+ context 'when user is part of the experiment' do
+ before do
+ stub_experiment_for_user(terms_opt_in: true)
+ end
+
+ it 'creates the user with accepted terms' do
+ post :create, params: user_params
+
+ expect(subject.current_user).to be_present
+ expect(subject.current_user.terms_accepted?).to be(true)
+ end
+ end
+
+ context 'when user is not part of the experiment' do
+ before do
+ stub_experiment_for_user(terms_opt_in: false)
+ end
+
+ it 'creates the user without accepted terms' do
+ post :create, params: user_params
+
+ expect(flash[:alert]).to eq(_('You must accept our Terms of Service and privacy policy in order to register an account'))
+ end
end
end
+ end
+
+ describe 'tracking data' do
+ context 'with sign up flow and terms_opt_in experiment being enabled' do
+ subject { post :create, params: user_params }
- context 'with the experimental signup flow enabled and the user is part of the experimental group' do
before do
- stub_experiment(signup_flow: true)
- stub_experiment_for_user(signup_flow: true)
+ stub_experiment(signup_flow: true, terms_opt_in: true)
end
- it 'does not track the event' do
- expect(Gitlab::Tracking).not_to receive(:event)
- post :create, params: user_params
+ context 'when user is not part of the experiment' do
+ before do
+ stub_experiment_for_user(signup_flow: true, terms_opt_in: false)
+ end
+
+ it 'tracks event with right parameters' do
+ expect(Gitlab::Tracking).to receive(:event).with(
+ 'Growth::Acquisition::Experiment::TermsOptIn',
+ 'end',
+ label: anything,
+ property: 'control_group'
+ )
+
+ subject
+ end
+ end
+
+ context 'when user is part of the experiment' do
+ before do
+ stub_experiment_for_user(signup_flow: true, terms_opt_in: true)
+ end
+
+ it 'tracks event with right parameters' do
+ expect(Gitlab::Tracking).to receive(:event).with(
+ 'Growth::Acquisition::Experiment::TermsOptIn',
+ 'end',
+ label: anything,
+ property: 'experimental_group'
+ )
+
+ subject
+ end
end
end
end
@@ -386,24 +455,6 @@ RSpec.describe RegistrationsController do
end
end
- describe '#update_registration' do
- before do
- stub_experiment(signup_flow: true)
- stub_experiment_for_user(signup_flow: true)
- sign_in(create(:user))
- end
-
- it 'tracks the event with the right parameters' do
- expect(Gitlab::Tracking).to receive(:event).with(
- 'Growth::Acquisition::Experiment::SignUpFlow',
- 'end',
- label: anything,
- property: 'experimental_group'
- )
- patch :update_registration, params: { user: { role: 'software_developer', setup_for_company: 'false' } }
- end
- end
-
describe '#welcome' do
subject { get :welcome }
diff --git a/spec/controllers/repositories/git_http_controller_spec.rb b/spec/controllers/repositories/git_http_controller_spec.rb
index c938df8cf4e..851c1b7e519 100644
--- a/spec/controllers/repositories/git_http_controller_spec.rb
+++ b/spec/controllers/repositories/git_http_controller_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Repositories::GitHttpController do
repository_id: repository_id
}
end
+
let(:params) { container_params }
describe 'HEAD #info_refs' do
diff --git a/spec/controllers/repositories/lfs_storage_controller_spec.rb b/spec/controllers/repositories/lfs_storage_controller_spec.rb
new file mode 100644
index 00000000000..0201e73728f
--- /dev/null
+++ b/spec/controllers/repositories/lfs_storage_controller_spec.rb
@@ -0,0 +1,160 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Repositories::LfsStorageController do
+ using RSpec::Parameterized::TableSyntax
+ include GitHttpHelpers
+
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:pat) { create(:personal_access_token, user: user, scopes: ['write_repository']) }
+
+ let(:lfs_enabled) { true }
+
+ before do
+ stub_config(lfs: { enabled: lfs_enabled })
+ end
+
+ describe 'PUT #upload_finalize' do
+ let(:headers) { workhorse_internal_api_request_header }
+ let(:extra_headers) { {} }
+ let(:uploaded_file) { temp_file }
+
+ let(:params) do
+ {
+ namespace_id: project.namespace.path,
+ repository_id: "#{project.path}.git",
+ oid: '6b9765d3888aaec789e8c309eb05b05c3a87895d6ad70d2264bd7270fff665ac',
+ size: '6725030'
+ }
+ end
+
+ before do
+ request.headers.merge!(extra_headers)
+ request.headers.merge!(headers)
+
+ if uploaded_file
+ allow_next_instance_of(ActionController::Parameters) do |params|
+ allow(params).to receive(:[]).and_call_original
+ allow(params).to receive(:[]).with(:file).and_return(uploaded_file)
+ end
+ end
+ end
+
+ after do
+ FileUtils.rm_r(temp_file) if temp_file
+ end
+
+ subject do
+ put :upload_finalize, params: params
+ end
+
+ context 'with lfs enabled' do
+ context 'with unauthorized roles' do
+ where(:user_role, :expected_status) do
+ :guest | :forbidden
+ :anonymous | :unauthorized
+ end
+
+ with_them do
+ let(:extra_headers) do
+ if user_role == :anonymous
+ {}
+ else
+ { 'HTTP_AUTHORIZATION' => ActionController::HttpAuthentication::Basic.encode_credentials(user.username, pat.token) }
+ end
+ end
+
+ before do
+ project.send("add_#{user_role}", user) unless user_role == :anonymous
+ end
+
+ it_behaves_like 'returning response status', params[:expected_status]
+ end
+ end
+
+ context 'with at least developer role' do
+ let(:extra_headers) { { 'HTTP_AUTHORIZATION' => ActionController::HttpAuthentication::Basic.encode_credentials(user.username, pat.token) } }
+
+ before do
+ project.add_developer(user)
+ end
+
+ it 'creates the objects' do
+ expect { subject }
+ .to change { LfsObject.count }.by(1)
+ .and change { LfsObjectsProject.count }.by(1)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ context 'without the workhorse header' do
+ let(:headers) { {} }
+
+ it { expect { subject }.to raise_error(JWT::DecodeError) }
+ end
+
+ context 'without file' do
+ let(:uploaded_file) { nil }
+
+ it_behaves_like 'returning response status', :unprocessable_entity
+ end
+
+ context 'with an invalid file' do
+ let(:uploaded_file) { 'test' }
+
+ it_behaves_like 'returning response status', :unprocessable_entity
+ end
+
+ context 'when an expected error' do
+ [
+ ActiveRecord::RecordInvalid,
+ UploadedFile::InvalidPathError,
+ ObjectStorage::RemoteStoreError
+ ].each do |exception_class|
+ context "#{exception_class} raised" do
+ it 'renders lfs forbidden' do
+ expect(LfsObjectsProject).to receive(:safe_find_or_create_by!).and_raise(exception_class)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['documentation_url']).to be_present
+ expect(json_response['message']).to eq('Access forbidden. Check your access level.')
+ end
+ end
+ end
+ end
+
+ context 'when file is not stored' do
+ it 'renders unprocessable entity' do
+ expect(controller).to receive(:store_file!).and_return(nil)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(response.body).to eq('Unprocessable entity')
+ end
+ end
+ end
+ end
+
+ context 'with lfs disabled' do
+ let(:lfs_enabled) { false }
+ let(:extra_headers) { { 'HTTP_AUTHORIZATION' => ActionController::HttpAuthentication::Basic.encode_credentials(user.username, pat.token) } }
+
+ it_behaves_like 'returning response status', :not_implemented
+ end
+
+ def temp_file
+ upload_path = LfsObjectUploader.workhorse_local_upload_path
+ file_path = "#{upload_path}/lfs"
+
+ FileUtils.mkdir_p(upload_path)
+ File.write(file_path, 'test')
+
+ UploadedFile.new(file_path, filename: File.basename(file_path))
+ end
+ end
+end
diff --git a/spec/controllers/root_controller_spec.rb b/spec/controllers/root_controller_spec.rb
index 9eefbcb0835..1db99a09404 100644
--- a/spec/controllers/root_controller_spec.rb
+++ b/spec/controllers/root_controller_spec.rb
@@ -122,6 +122,30 @@ RSpec.describe RootController do
expect(response).to render_template 'dashboard/projects/index'
end
+
+ context 'when experiment is enabled' do
+ before do
+ stub_experiment_for_user(customize_homepage: true)
+ end
+
+ it 'renders the default dashboard' do
+ get :index
+
+ expect(assigns[:customize_homepage]).to be true
+ end
+ end
+
+ context 'when experiment not enabled' do
+ before do
+ stub_experiment(customize_homepage: false)
+ end
+
+ it 'renders the default dashboard' do
+ get :index
+
+ expect(assigns[:customize_homepage]).to be false
+ end
+ end
end
end
end
diff --git a/spec/controllers/search_controller_spec.rb b/spec/controllers/search_controller_spec.rb
index 0849fb00e73..a41ff28841d 100644
--- a/spec/controllers/search_controller_spec.rb
+++ b/spec/controllers/search_controller_spec.rb
@@ -216,4 +216,23 @@ RSpec.describe SearchController do
it_behaves_like 'when the user cannot read cross project', :autocomplete, { term: 'hello' }
it_behaves_like 'with external authorization service enabled', :autocomplete, { term: 'hello' }
end
+
+ describe '#append_info_to_payload' do
+ it 'appends search metadata for logging' do
+ last_payload = nil
+ original_append_info_to_payload = controller.method(:append_info_to_payload)
+
+ expect(controller).to receive(:append_info_to_payload) do |payload|
+ original_append_info_to_payload.call(payload)
+ last_payload = payload
+ end
+
+ get :show, params: { scope: 'issues', search: 'hello world', group_id: '123', project_id: '456' }
+
+ expect(last_payload[:metadata]['meta.search.group_id']).to eq('123')
+ expect(last_payload[:metadata]['meta.search.project_id']).to eq('456')
+ expect(last_payload[:metadata]['meta.search.search']).to eq('hello world')
+ expect(last_payload[:metadata]['meta.search.scope']).to eq('issues')
+ end
+ end
end
diff --git a/spec/controllers/sent_notifications_controller_spec.rb b/spec/controllers/sent_notifications_controller_spec.rb
index 0c4a77d5926..02aaa5b16f1 100644
--- a/spec/controllers/sent_notifications_controller_spec.rb
+++ b/spec/controllers/sent_notifications_controller_spec.rb
@@ -216,6 +216,7 @@ RSpec.describe SentNotificationsController do
merge_request.subscriptions.create(user: user, project: project, subscribed: true)
end
end
+
let(:sent_notification) { create(:sent_notification, project: project, noteable: merge_request, recipient: user) }
before do
diff --git a/spec/controllers/sessions_controller_spec.rb b/spec/controllers/sessions_controller_spec.rb
index 16a58112479..257dcce0899 100644
--- a/spec/controllers/sessions_controller_spec.rb
+++ b/spec/controllers/sessions_controller_spec.rb
@@ -59,39 +59,6 @@ RSpec.describe SessionsController do
end
end
end
-
- describe 'tracking data' do
- context 'when the user is part of the experimental group' do
- before do
- stub_experiment_for_user(signup_flow: true)
- end
-
- it 'doesn\'t pass tracking parameters to the frontend' do
- get(:new)
- expect(Gon.tracking_data).to be_nil
- end
- end
-
- context 'with the experimental signup flow enabled and the user is part of the control group' do
- before do
- stub_experiment(signup_flow: true)
- stub_experiment_for_user(signup_flow: false)
- allow_any_instance_of(described_class).to receive(:experimentation_subject_id).and_return('uuid')
- end
-
- it 'passes the right tracking parameters to the frontend' do
- get(:new)
- expect(Gon.tracking_data).to eq(
- {
- category: 'Growth::Acquisition::Experiment::SignUpFlow',
- action: 'start',
- label: 'uuid',
- property: 'control_group'
- }
- )
- end
- end
- end
end
describe '#create' do
@@ -216,7 +183,7 @@ RSpec.describe SessionsController do
before do
stub_application_setting(recaptcha_enabled: true)
- request.headers[described_class::CAPTCHA_HEADER] = 1
+ request.headers[described_class::CAPTCHA_HEADER] = '1'
end
it 'displays an error when the reCAPTCHA is not solved' do
@@ -399,7 +366,7 @@ RSpec.describe SessionsController do
end
it 'warns about invalid login' do
- expect(response).to set_flash.now[:alert].to /Your account is locked./
+ expect(flash[:alert]).to eq('Your account is locked.')
end
it 'locks the user' do
@@ -409,7 +376,7 @@ RSpec.describe SessionsController do
it 'keeps the user locked on future login attempts' do
post(:create, params: { user: { login: user.username, password: user.password } })
- expect(response).to set_flash.now[:alert].to /Your account is locked./
+ expect(flash[:alert]).to eq('Your account is locked.')
end
end
end
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index f31da943957..1c9167ef025 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe 'Database schema' do
approver_groups: %w[target_id],
approvers: %w[target_id user_id],
audit_events: %w[author_id entity_id],
+ audit_events_part_5fc467ac26: %w[author_id entity_id],
award_emoji: %w[awardable_id user_id],
aws_roles: %w[role_external_id],
boards: %w[milestone_id],
@@ -164,6 +165,9 @@ RSpec.describe 'Database schema' do
context 'for enums' do
ApplicationRecord.descendants.each do |model|
+ # skip model if it is an abstract class as it would not have an associated DB table
+ next if model.abstract_class?
+
describe model do
let(:ignored_enums) { ignored_limit_enums(model.name) }
let(:enums) { model.defined_enums.keys - ignored_enums }
@@ -185,6 +189,7 @@ RSpec.describe 'Database schema' do
"Operations::FeatureFlagScope" => %w[strategies],
"Operations::FeatureFlags::Strategy" => %w[parameters],
"Packages::Composer::Metadatum" => %w[composer_json],
+ "RawUsageData" => %w[payload], # Usage data payload changes often, we cannot use one schema
"Releases::Evidence" => %w[summary]
}.freeze
diff --git a/spec/factories/alert_management/alerts.rb b/spec/factories/alert_management/alerts.rb
index 881f633829a..d931947fff1 100644
--- a/spec/factories/alert_management/alerts.rb
+++ b/spec/factories/alert_management/alerts.rb
@@ -23,7 +23,7 @@ FactoryBot.define do
trait :with_assignee do |alert|
after(:create) do |alert|
- alert.alert_assignees.create(assignee: create(:user))
+ alert.alert_assignees.create!(assignee: create(:user))
end
end
diff --git a/spec/factories/audit_events.rb b/spec/factories/audit_events.rb
new file mode 100644
index 00000000000..38414400282
--- /dev/null
+++ b/spec/factories/audit_events.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :audit_event, class: 'SecurityEvent', aliases: [:user_audit_event] do
+ user
+
+ transient { target_user { create(:user) } }
+
+ entity_type { 'User' }
+ entity_id { target_user.id }
+ entity_path { target_user.full_path }
+ target_details { target_user.name }
+ ip_address { IPAddr.new '127.0.0.1' }
+ author_name { 'Jane Doe' }
+ details do
+ {
+ change: 'email address',
+ from: 'admin@gitlab.com',
+ to: 'maintainer@gitlab.com',
+ author_name: user.name,
+ target_id: target_user.id,
+ target_type: 'User',
+ target_details: target_user.name,
+ ip_address: '127.0.0.1',
+ entity_path: target_user.full_path
+ }
+ end
+
+ trait :project_event do
+ transient { target_project { create(:project) } }
+
+ entity_type { 'Project' }
+ entity_id { target_project.id }
+ entity_path { target_project.full_path }
+ target_details { target_project.name }
+ ip_address { IPAddr.new '127.0.0.1' }
+ details do
+ {
+ change: 'packges_enabled',
+ from: true,
+ to: false,
+ author_name: user.name,
+ target_id: target_project.id,
+ target_type: 'Project',
+ target_details: target_project.name,
+ ip_address: '127.0.0.1',
+ entity_path: target_project.full_path
+ }
+ end
+ end
+
+ trait :group_event do
+ transient { target_group { create(:group) } }
+
+ entity_type { 'Group' }
+ entity_id { target_group.id }
+ entity_path { target_group.full_path }
+ target_details { target_group.name }
+ ip_address { IPAddr.new '127.0.0.1' }
+ details do
+ {
+ change: 'project_creation_level',
+ from: nil,
+ to: 'Developers + Maintainers',
+ author_name: user.name,
+ target_id: target_group.id,
+ target_type: 'Group',
+ target_details: target_group.name,
+ ip_address: '127.0.0.1',
+ entity_path: target_group.full_path
+ }
+ end
+ end
+
+ factory :project_audit_event, traits: [:project_event]
+ factory :group_audit_event, traits: [:group_event]
+ end
+end
diff --git a/spec/factories/boards.rb b/spec/factories/boards.rb
index a201ca94380..cef7ec37f07 100644
--- a/spec/factories/boards.rb
+++ b/spec/factories/boards.rb
@@ -28,7 +28,7 @@ FactoryBot.define do
end
after(:create) do |board|
- board.lists.create(list_type: :closed)
+ board.lists.create!(list_type: :closed)
end
end
end
diff --git a/spec/factories/ci/pipeline_artifacts.rb b/spec/factories/ci/pipeline_artifacts.rb
new file mode 100644
index 00000000000..ecfd1e79e78
--- /dev/null
+++ b/spec/factories/ci/pipeline_artifacts.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_pipeline_artifact, class: 'Ci::PipelineArtifact' do
+ pipeline factory: :ci_pipeline
+ project { pipeline.project }
+ file_type { :code_coverage }
+ file_format { :raw }
+ file_store { Ci::PipelineArtifact::FILE_STORE_SUPPORTED.first }
+ size { 1.megabytes }
+
+ after(:build) do |artifact, _evaluator|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/pipeline_artifacts/code_coverage.json'), 'application/json')
+ end
+ end
+end
diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb
index 5bd5ab7d67a..2790be8b70d 100644
--- a/spec/factories/ci/pipelines.rb
+++ b/spec/factories/ci/pipelines.rb
@@ -17,7 +17,7 @@ FactoryBot.define do
after(:create) do |pipeline, evaluator|
merge_request = evaluator.head_pipeline_of
- merge_request&.update(head_pipeline: pipeline)
+ merge_request&.update!(head_pipeline: pipeline)
end
factory :ci_pipeline do
diff --git a/spec/factories/clusters/agent_tokens.rb b/spec/factories/clusters/agent_tokens.rb
new file mode 100644
index 00000000000..6f92f2217b3
--- /dev/null
+++ b/spec/factories/clusters/agent_tokens.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :cluster_agent_token, class: 'Clusters::AgentToken' do
+ association :agent, factory: :cluster_agent
+
+ token_encrypted { Gitlab::CryptoHelper.aes256_gcm_encrypt(SecureRandom.hex(50)) }
+ end
+end
diff --git a/spec/factories/clusters/agents.rb b/spec/factories/clusters/agents.rb
new file mode 100644
index 00000000000..334671f69f0
--- /dev/null
+++ b/spec/factories/clusters/agents.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :cluster_agent, class: 'Clusters::Agent' do
+ project
+
+ sequence(:name) { |n| "agent-#{n}" }
+ end
+end
diff --git a/spec/factories/design_management/designs.rb b/spec/factories/design_management/designs.rb
index 6d1229063d8..66c33c9ece0 100644
--- a/spec/factories/design_management/designs.rb
+++ b/spec/factories/design_management/designs.rb
@@ -22,8 +22,12 @@ FactoryBot.define do
imported { true }
end
+ trait :with_relative_position do
+ sequence(:relative_position) { |n| n * 1000 }
+ end
+
create_versions = ->(design, evaluator, commit_version) do
- unless evaluator.versions_count.zero?
+ unless evaluator.versions_count == 0
project = design.project
issue = design.issue
repository = project.design_repository
@@ -34,7 +38,7 @@ FactoryBot.define do
run_action = ->(action) do
sha = commit_version[action]
version = DesignManagement::Version.new(sha: sha, issue: issue, author: evaluator.author)
- version.save(validate: false) # We need it to have an ID, validate later
+ version.save!(validate: false) # We need it to have an ID, validate later
Gitlab::Database.bulk_insert(dv_table_name, [action.row_attrs(version)]) # rubocop:disable Gitlab/BulkInsert
end
diff --git a/spec/factories/design_management/versions.rb b/spec/factories/design_management/versions.rb
index e6d17ba691c..a5c0e7076e9 100644
--- a/spec/factories/design_management/versions.rb
+++ b/spec/factories/design_management/versions.rb
@@ -40,7 +40,7 @@ FactoryBot.define do
)
version.designs += specific_designs
- unless evaluator.designs_count.zero? || version.designs.present?
+ unless evaluator.designs_count == 0 || version.designs.present?
version.designs << create(:design, issue: version.issue)
end
end
@@ -135,7 +135,7 @@ FactoryBot.define do
actions: version_actions
)
- version.update(sha: sha)
+ version.update!(sha: sha)
end
end
end
diff --git a/spec/factories/emails.rb b/spec/factories/emails.rb
index 284ba631c37..b30fa8a5896 100644
--- a/spec/factories/emails.rb
+++ b/spec/factories/emails.rb
@@ -6,6 +6,6 @@ FactoryBot.define do
email { generate(:email_alias) }
trait(:confirmed) { confirmed_at { Time.now } }
- trait(:skip_validate) { to_create {|instance| instance.save(validate: false) } }
+ trait(:skip_validate) { to_create {|instance| instance.save!(validate: false) } }
end
end
diff --git a/spec/factories/experiments.rb b/spec/factories/experiments.rb
new file mode 100644
index 00000000000..2c51a6585f4
--- /dev/null
+++ b/spec/factories/experiments.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :experiment do
+ name { generate(:title) }
+ end
+end
diff --git a/spec/factories/group_deploy_keys_groups.rb b/spec/factories/group_deploy_keys_groups.rb
new file mode 100644
index 00000000000..14a4ca77392
--- /dev/null
+++ b/spec/factories/group_deploy_keys_groups.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :group_deploy_keys_group do
+ group_deploy_key
+ group
+ can_push { true }
+ end
+end
diff --git a/spec/factories/issues.rb b/spec/factories/issues.rb
index 4d0924a9412..99fe2ef9c0a 100644
--- a/spec/factories/issues.rb
+++ b/spec/factories/issues.rb
@@ -7,6 +7,7 @@ FactoryBot.define do
author { project.creator }
updated_by { author }
relative_position { RelativePositioning::START_POSITION }
+ issue_type { :issue }
trait :confidential do
confidential { true }
@@ -38,8 +39,12 @@ FactoryBot.define do
end
after(:create) do |issue, evaluator|
- issue.update(labels: evaluator.labels)
+ issue.update!(labels: evaluator.labels)
end
end
+
+ factory :incident do
+ issue_type { :incident }
+ end
end
end
diff --git a/spec/factories/iterations.rb b/spec/factories/iterations.rb
index f6be1d9d752..bd61cd469af 100644
--- a/spec/factories/iterations.rb
+++ b/spec/factories/iterations.rb
@@ -36,6 +36,12 @@ FactoryBot.define do
end
end
+ trait(:skip_project_validation) do
+ after(:stub, :build) do |iteration|
+ iteration.skip_project_validation = true
+ end
+ end
+
after(:build, :stub) do |iteration, evaluator|
if evaluator.group
iteration.group = evaluator.group
@@ -49,7 +55,7 @@ FactoryBot.define do
id = evaluator.resource_parent.id
evaluator.resource_parent.is_a?(Group) ? evaluator.group_id = id : evaluator.project_id = id
else
- iteration.project = create(:project)
+ iteration.group = create(:group)
end
end
diff --git a/spec/factories/labels.rb b/spec/factories/labels.rb
index 2e783adcc94..6725b571f19 100644
--- a/spec/factories/labels.rb
+++ b/spec/factories/labels.rb
@@ -27,7 +27,7 @@ FactoryBot.define do
after(:create) do |label, evaluator|
if evaluator.priority
- label.priorities.create(project: label.project, priority: evaluator.priority)
+ label.priorities.create!(project: label.project, priority: evaluator.priority)
end
end
end
diff --git a/spec/factories/merge_requests.rb b/spec/factories/merge_requests.rb
index 2a06690f894..af6e88f73b1 100644
--- a/spec/factories/merge_requests.rb
+++ b/spec/factories/merge_requests.rb
@@ -43,6 +43,21 @@ FactoryBot.define do
state_id { MergeRequest.available_states[:merged] }
end
+ trait :with_merged_metrics do
+ merged
+
+ transient do
+ merged_by { author }
+ end
+
+ after(:build) do |merge_request, evaluator|
+ metrics = merge_request.build_metrics
+ metrics.merged_at = 1.week.ago
+ metrics.merged_by = evaluator.merged_by
+ metrics.pipeline = create(:ci_empty_pipeline)
+ end
+ end
+
trait :merged_target do
source_branch { "merged-target" }
target_branch { "improve/awesome" }
@@ -268,7 +283,7 @@ FactoryBot.define do
end
after(:create) do |merge_request, evaluator|
- merge_request.update(labels: evaluator.labels)
+ merge_request.update!(labels: evaluator.labels)
end
end
end
diff --git a/spec/factories/namespace_settings.rb b/spec/factories/namespace_settings.rb
new file mode 100644
index 00000000000..388ea7dab91
--- /dev/null
+++ b/spec/factories/namespace_settings.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :namespace_settings, class: 'NamespaceSetting' do
+ namespace
+ end
+end
diff --git a/spec/factories/namespaces.rb b/spec/factories/namespaces.rb
index f4d5848e878..0dcec086da9 100644
--- a/spec/factories/namespaces.rb
+++ b/spec/factories/namespaces.rb
@@ -30,6 +30,10 @@ FactoryBot.define do
association :root_storage_statistics, factory: :namespace_root_storage_statistics
end
+ trait :with_namespace_settings do
+ association :namespace_settings, factory: :namespace_settings
+ end
+
# Construct a hierarchy underneath the namespace.
# Each namespace will have `children` amount of children,
# and `depth` levels of descendants.
diff --git a/spec/factories/packages.rb b/spec/factories/packages.rb
index 562269a67bc..a7902f6f105 100644
--- a/spec/factories/packages.rb
+++ b/spec/factories/packages.rb
@@ -10,7 +10,7 @@ FactoryBot.define do
maven_metadatum
after :build do |package|
- package.maven_metadatum.path = "#{package.name}/#{package.version}"
+ package.maven_metadatum.path = package.version? ? "#{package.name}/#{package.version}" : package.name
end
after :create do |package|
diff --git a/spec/factories/plans.rb b/spec/factories/plans.rb
index 81506edcf16..903c176ec2a 100644
--- a/spec/factories/plans.rb
+++ b/spec/factories/plans.rb
@@ -6,7 +6,7 @@ FactoryBot.define do
factory :"#{plan}_plan" do
name { plan }
title { name.titleize }
- initialize_with { Plan.find_or_create_by(name: plan) }
+ initialize_with { Plan.find_or_create_by!(name: plan) }
end
end
end
diff --git a/spec/factories/project_hooks.rb b/spec/factories/project_hooks.rb
index 6592141e26d..15b240acba4 100644
--- a/spec/factories/project_hooks.rb
+++ b/spec/factories/project_hooks.rb
@@ -21,6 +21,7 @@ FactoryBot.define do
job_events { true }
pipeline_events { true }
wiki_page_events { true }
+ deployment_events { true }
end
end
end
diff --git a/spec/factories/project_repository_storage_moves.rb b/spec/factories/project_repository_storage_moves.rb
index ea0b34e0338..69fb3af45e6 100644
--- a/spec/factories/project_repository_storage_moves.rb
+++ b/spec/factories/project_repository_storage_moves.rb
@@ -15,6 +15,10 @@ FactoryBot.define do
state { ProjectRepositoryStorageMove.state_machines[:state].states[:started].value }
end
+ trait :replicated do
+ state { ProjectRepositoryStorageMove.state_machines[:state].states[:replicated].value }
+ end
+
trait :finished do
state { ProjectRepositoryStorageMove.state_machines[:state].states[:finished].value }
end
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index e4b53186ea8..328b7f9a229 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -61,7 +61,7 @@ FactoryBot.define do
hash.store("pages_access_level", evaluator.pages_access_level)
end
- project.project_feature.update(hash)
+ project.project_feature.update!(hash)
# Normally the class Projects::CreateService is used for creating
# projects, and this class takes care of making sure the owner and current
@@ -82,7 +82,7 @@ FactoryBot.define do
import_state.jid = evaluator.import_jid
import_state.correlation_id_value = evaluator.import_correlation_id
import_state.last_error = evaluator.import_last_error
- import_state.save
+ import_state.save!
end
end
@@ -389,4 +389,8 @@ FactoryBot.define do
create(:design, project: project, issue: issue)
end
end
+
+ trait :in_subgroup do
+ namespace factory: [:group, :nested]
+ end
end
diff --git a/spec/factories/prometheus_alert.rb b/spec/factories/prometheus_alert.rb
index a9fede9efca..18cf1a20e0d 100644
--- a/spec/factories/prometheus_alert.rb
+++ b/spec/factories/prometheus_alert.rb
@@ -13,5 +13,9 @@ FactoryBot.define do
prometheus_metric do |alert|
build(:prometheus_metric, project: alert.project)
end
+
+ trait :with_runbook_url do
+ runbook_url { 'https://runbooks.gitlab.com/metric_gt_1'}
+ end
end
end
diff --git a/spec/factories/protected_branches/merge_access_levels.rb b/spec/factories/protected_branches/merge_access_levels.rb
new file mode 100644
index 00000000000..779a8617de1
--- /dev/null
+++ b/spec/factories/protected_branches/merge_access_levels.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :protected_branch_merge_access_level, class: 'ProtectedBranch::MergeAccessLevel' do
+ protected_branch
+ access_level { Gitlab::Access::DEVELOPER }
+ end
+end
diff --git a/spec/factories/protected_branches/push_access_levels.rb b/spec/factories/protected_branches/push_access_levels.rb
new file mode 100644
index 00000000000..fa3a35fe282
--- /dev/null
+++ b/spec/factories/protected_branches/push_access_levels.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :protected_branch_push_access_level, class: 'ProtectedBranch::PushAccessLevel' do
+ protected_branch
+ access_level { Gitlab::Access::DEVELOPER }
+ end
+end
diff --git a/spec/factories/raw_usage_data.rb b/spec/factories/raw_usage_data.rb
new file mode 100644
index 00000000000..e0ef357fdbd
--- /dev/null
+++ b/spec/factories/raw_usage_data.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :raw_usage_data do
+ recorded_at { Time.current }
+ payload { { test: 'test' } }
+ end
+end
diff --git a/spec/factories/resource_iteration_event.rb b/spec/factories/resource_iteration_event.rb
new file mode 100644
index 00000000000..85e7320f7a7
--- /dev/null
+++ b/spec/factories/resource_iteration_event.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :resource_iteration_event do
+ issue { merge_request.nil? ? create(:issue) : nil }
+ merge_request { nil }
+ iteration
+ action { :add }
+ user { issue&.author || merge_request&.author || create(:user) }
+ end
+end
diff --git a/spec/factories/services.rb b/spec/factories/services.rb
index 9a521336fee..7fbf6f16dc7 100644
--- a/spec/factories/services.rb
+++ b/spec/factories/services.rb
@@ -116,12 +116,6 @@ FactoryBot.define do
issue_tracker
end
- factory :gitlab_issue_tracker_service do
- project
- active { true }
- issue_tracker
- end
-
trait :issue_tracker do
transient do
create_data { true }
@@ -196,7 +190,7 @@ FactoryBot.define do
IssueTrackerService.skip_callback(:validation, :before, :handle_properties)
end
- to_create { |instance| instance.save(validate: false) }
+ to_create { |instance| instance.save!(validate: false) }
after(:create) do
IssueTrackerService.set_callback(:validation, :before, :handle_properties)
diff --git a/spec/factories/usage_data.rb b/spec/factories/usage_data.rb
index 518b5b5e76a..d2b8fd94aca 100644
--- a/spec/factories/usage_data.rb
+++ b/spec/factories/usage_data.rb
@@ -24,13 +24,15 @@ FactoryBot.define do
create(:service, project: projects[2], type: 'SlackService', active: true)
create(:service, project: projects[2], type: 'MattermostService', active: false)
create(:service, :template, type: 'MattermostService', active: true)
+ matermost_instance = create(:service, :instance, type: 'MattermostService', active: true)
+ create(:service, project: projects[1], type: 'MattermostService', active: true, inherit_from_id: matermost_instance.id)
create(:service, project: projects[2], type: 'CustomIssueTrackerService', active: true)
create(:project_error_tracking_setting, project: projects[0])
create(:project_error_tracking_setting, project: projects[1], enabled: false)
create(:alerts_service, project: projects[0])
create(:alerts_service, :inactive, project: projects[1])
- alert_bot_issues = create_list(:issue, 2, project: projects[0], author: User.alert_bot)
- create_list(:issue, 2, project: projects[1], author: User.alert_bot)
+ alert_bot_issues = create_list(:incident, 2, project: projects[0], author: User.alert_bot)
+ create_list(:incident, 2, project: projects[1], author: User.alert_bot)
issues = create_list(:issue, 4, project: projects[0])
create_list(:prometheus_alert, 2, project: projects[0])
create(:prometheus_alert, project: projects[1])
@@ -45,6 +47,8 @@ FactoryBot.define do
create(:zoom_meeting, project: projects[0], issue: projects[0].issues[2], issue_status: :added)
create_list(:zoom_meeting, 2, project: projects[0], issue: projects[0].issues[2], issue_status: :removed)
create(:sentry_issue, issue: projects[0].issues[0])
+ create(:protected_branch, project: projects[0])
+ create(:protected_branch, name: 'main', project: projects[0])
# Incident Labeled Issues
incident_label_attrs = IncidentManagement::CreateIncidentLabelService::LABEL_PROPERTIES
diff --git a/spec/factories/wiki_pages.rb b/spec/factories/wiki_pages.rb
index e7fcc19bbfe..cc866d336a4 100644
--- a/spec/factories/wiki_pages.rb
+++ b/spec/factories/wiki_pages.rb
@@ -31,7 +31,8 @@ FactoryBot.define do
end
to_create do |page, evaluator|
- page.create(message: evaluator.message)
+ # WikiPages is ActiveModel which doesn't support `create!`.
+ page.create(message: evaluator.message) # rubocop:disable Rails/SaveBang
end
end
diff --git a/spec/features/admin/admin_mode/login_spec.rb b/spec/features/admin/admin_mode/login_spec.rb
index 4b26ceb55e2..12046518aac 100644
--- a/spec/features/admin/admin_mode/login_spec.rb
+++ b/spec/features/admin/admin_mode/login_spec.rb
@@ -196,6 +196,7 @@ RSpec.describe 'Admin Mode Login', :clean_gitlab_redis_shared_state, :do_not_moc
'base' => 'dc=example,dc=com'
}
end
+
let(:user) { create(:omniauth_user, :admin, :two_factor, extern_uid: uid, provider: provider) }
before do
diff --git a/spec/features/admin/admin_projects_spec.rb b/spec/features/admin/admin_projects_spec.rb
index cbaa18509ba..522da760062 100644
--- a/spec/features/admin/admin_projects_spec.rb
+++ b/spec/features/admin/admin_projects_spec.rb
@@ -126,7 +126,7 @@ RSpec.describe "Admin::Projects" do
expect(page).to have_content('Developer')
end
- find(:css, '.content-list li', text: current_user.name).find(:css, 'a.btn-remove').click
+ find(:css, '.content-list li', text: current_user.name).find(:css, 'a.btn-danger').click
expect(page).not_to have_selector(:css, '.content-list')
end
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index 55f6a9930ff..f5b05c76e90 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -209,8 +209,7 @@ RSpec.describe 'Admin updates settings', :clean_gitlab_redis_shared_state, :do_n
context 'Integrations page' do
before do
- stub_feature_flags(instance_level_integrations: false)
- visit integrations_admin_application_settings_path
+ visit general_admin_application_settings_path
end
it 'Enable hiding third party offers' do
diff --git a/spec/features/atom/users_spec.rb b/spec/features/atom/users_spec.rb
index c79b812df46..ab874408e55 100644
--- a/spec/features/atom/users_spec.rb
+++ b/spec/features/atom/users_spec.rb
@@ -30,6 +30,7 @@ RSpec.describe "User Feed" do
author: user,
description: "Houston, we have a bug!\n\n***\n\nI guess.")
end
+
let(:note) do
create(:note,
noteable: issue,
@@ -37,6 +38,7 @@ RSpec.describe "User Feed" do
note: 'Bug confirmed :+1:',
project: project)
end
+
let(:merge_request) do
create(:merge_request,
title: 'Fix bug',
@@ -45,6 +47,7 @@ RSpec.describe "User Feed" do
target_project: project,
description: "Here is the fix: ![an image](image.png)")
end
+
let(:push_event) { create(:push_event, project: project, author: user) }
let!(:push_event_payload) { create(:push_event_payload, event: push_event) }
diff --git a/spec/features/boards/issue_ordering_spec.rb b/spec/features/boards/issue_ordering_spec.rb
index 03a76d9d3fd..87d29eed68d 100644
--- a/spec/features/boards/issue_ordering_spec.rb
+++ b/spec/features/boards/issue_ordering_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe 'Issue Boards', :js do
end
context 'un-ordered issues' do
- let!(:issue4) { create(:labeled_issue, project: project, labels: [label]) }
+ let!(:issue4) { create(:labeled_issue, project: project, labels: [label], relative_position: nil) }
before do
visit project_board_path(project, board)
diff --git a/spec/features/calendar_spec.rb b/spec/features/calendar_spec.rb
index 5b78d93ae04..346f305f0d0 100644
--- a/spec/features/calendar_spec.rb
+++ b/spec/features/calendar_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe 'Contributions Calendar', :js do
def get_cell_date_selector(contributions, date)
contribution_text =
- if contributions.zero?
+ if contributions == 0
'No contributions'
else
"#{contributions} #{'contribution'.pluralize(contributions)}"
diff --git a/spec/features/clusters/cluster_detail_page_spec.rb b/spec/features/clusters/cluster_detail_page_spec.rb
index 6058c35c2cf..4f7f62d00a5 100644
--- a/spec/features/clusters/cluster_detail_page_spec.rb
+++ b/spec/features/clusters/cluster_detail_page_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe 'Clusterable > Show page' do
expect(page).to have_content(cluster_type_label)
end
- it 'allow the user to set domain' do
+ it 'allow the user to set domain', :js do
visit cluster_path
within '.js-cluster-integration-form' do
@@ -28,20 +28,19 @@ RSpec.describe 'Clusterable > Show page' do
click_on 'Save changes'
end
- expect(page.status_code).to eq(200)
expect(page).to have_content('Kubernetes cluster was successfully updated.')
end
- context 'when there is a cluster with ingress and external ip' do
+ context 'when there is a cluster with ingress and external ip', :js do
before do
cluster.create_application_ingress!(external_ip: '192.168.1.100')
visit cluster_path
end
- it 'shows help text with the domain as an alternative to custom domain' do
+ it 'shows help text with the domain as an alternative to custom domain', :js do
within '.js-cluster-integration-form' do
- expect(find(cluster_ingress_help_text_selector)).not_to match_css(hide_modifier_selector)
+ expect(find(cluster_ingress_help_text_selector).text).to include('192.168.1.100')
end
end
end
@@ -51,7 +50,7 @@ RSpec.describe 'Clusterable > Show page' do
visit cluster_path
within '.js-cluster-integration-form' do
- expect(find(cluster_ingress_help_text_selector)).to match_css(hide_modifier_selector)
+ expect(page).not_to have_selector(cluster_ingress_help_text_selector)
end
end
end
diff --git a/spec/features/clusters/installing_applications_shared_examples.rb b/spec/features/clusters/installing_applications_shared_examples.rb
index 74150c42519..c422aa2be72 100644
--- a/spec/features/clusters/installing_applications_shared_examples.rb
+++ b/spec/features/clusters/installing_applications_shared_examples.rb
@@ -1,12 +1,10 @@
# frozen_string_literal: true
-RSpec.shared_examples "installing applications for a cluster" do |managed_apps_local_tiller|
+RSpec.shared_examples "installing applications for a cluster" do
before do
# Reduce interval from 10 seconds which is too long for an automated test
stub_const("#{Clusters::ClustersController}::STATUS_POLLING_INTERVAL", 500)
- stub_feature_flags(managed_apps_local_tiller: managed_apps_local_tiller)
-
visit cluster_path
end
@@ -31,12 +29,7 @@ RSpec.shared_examples "installing applications for a cluster" do |managed_apps_l
it 'user can install applications' do
wait_for_requests
- application_row =
- if managed_apps_local_tiller
- '.js-cluster-application-row-ingress'
- else
- '.js-cluster-application-row-helm'
- end
+ application_row = '.js-cluster-application-row-ingress'
page.within(application_row) do
expect(page).not_to have_css('.js-cluster-application-install-button[disabled]')
@@ -44,50 +37,11 @@ RSpec.shared_examples "installing applications for a cluster" do |managed_apps_l
end
end
- if managed_apps_local_tiller
- it 'does not show the Helm application' do
- expect(page).not_to have_selector(:css, '.js-cluster-application-row-helm')
- end
- else
- context 'when user installs Helm' do
- before do
- allow(ClusterInstallAppWorker).to receive(:perform_async)
- wait_for_requests
-
- page.within('.js-cluster-application-row-helm') do
- page.find(:css, '.js-cluster-application-install-button').click
- end
-
- wait_for_requests
- end
-
- it 'shows the status transition' do
- page.within('.js-cluster-application-row-helm') do
- # FE sends request and gets the response, then the buttons is "Installing"
- expect(page).to have_css('.js-cluster-application-install-button[disabled]', exact_text: 'Installing')
-
- Clusters::Cluster.last.application_helm.make_installing!
-
- # FE starts polling and update the buttons to "Installing"
- expect(page).to have_css('.js-cluster-application-install-button[disabled]', exact_text: 'Installing')
-
- Clusters::Cluster.last.application_helm.make_installed!
-
- expect(page).not_to have_css('button', exact_text: 'Install', visible: :all)
- expect(page).not_to have_css('button', exact_text: 'Installing', visible: :all)
- expect(page).to have_css('.js-cluster-application-uninstall-button:not([disabled])', exact_text: 'Uninstall')
- end
-
- expect(page).to have_content('Helm Tiller was successfully installed on your Kubernetes cluster')
- end
- end
+ it 'does not show the Helm application' do
+ expect(page).not_to have_selector(:css, '.js-cluster-application-row-helm')
end
context 'when user installs Knative' do
- before do
- create(:clusters_applications_helm, :installed, cluster: cluster) unless managed_apps_local_tiller
- end
-
context 'on an abac cluster' do
let(:cluster) { create(:cluster, :provided_by_gcp, :rbac_disabled, *cluster_factory_args) }
@@ -166,8 +120,6 @@ RSpec.shared_examples "installing applications for a cluster" do |managed_apps_l
allow(ClusterInstallAppWorker).to receive(:perform_async)
allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_in)
allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_async)
-
- create(:clusters_applications_helm, :installed, cluster: cluster) unless managed_apps_local_tiller
end
it 'shows status transition' do
@@ -223,8 +175,6 @@ RSpec.shared_examples "installing applications for a cluster" do |managed_apps_l
before do
allow(ClusterInstallAppWorker).to receive(:perform_async)
- create(:clusters_applications_helm, :installed, cluster: cluster) unless managed_apps_local_tiller
-
page.within('.js-cluster-application-row-elastic_stack') do
click_button 'Install'
end
@@ -255,8 +205,6 @@ RSpec.shared_examples "installing applications for a cluster" do |managed_apps_l
allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_in)
allow(ClusterWaitForIngressIpAddressWorker).to receive(:perform_async)
- create(:clusters_applications_helm, :installed, cluster: cluster) unless managed_apps_local_tiller
-
page.within('.js-cluster-application-row-ingress') do
expect(page).to have_css('.js-cluster-application-install-button:not([disabled])')
page.find(:css, '.js-cluster-application-install-button').click
diff --git a/spec/features/commits_spec.rb b/spec/features/commits_spec.rb
index 60c37d1e125..e66a40720da 100644
--- a/spec/features/commits_spec.rb
+++ b/spec/features/commits_spec.rb
@@ -112,7 +112,7 @@ RSpec.describe 'Commits' do
describe 'Cancel build' do
it 'cancels build', :js, :sidekiq_might_not_need_inline do
visit pipeline_path(pipeline)
- find('.js-btn-cancel-pipeline').click
+ find('[data-testid="cancelPipeline"]').click
expect(page).to have_content 'canceled'
end
end
diff --git a/spec/features/explore/groups_spec.rb b/spec/features/explore/groups_spec.rb
index 6e9749f29c3..201dc24b359 100644
--- a/spec/features/explore/groups_spec.rb
+++ b/spec/features/explore/groups_spec.rb
@@ -26,10 +26,6 @@ RSpec.describe 'Explore Groups', :js do
end
end
- before do
- stub_feature_flags(vue_issuables_list: false)
- end
-
shared_examples 'renders public and internal projects' do
it do
visit_page
diff --git a/spec/features/global_search_spec.rb b/spec/features/global_search_spec.rb
index c878ee7329f..0ca626381d4 100644
--- a/spec/features/global_search_spec.rb
+++ b/spec/features/global_search_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe 'Global search' do
end
end
- it 'closes the dropdown on blur', :js do
+ it 'closes the dropdown on blur', :js, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/201841' do
fill_in 'search', with: "a"
dropdown = find('.js-dashboard-search-options')
diff --git a/spec/features/groups/empty_states_spec.rb b/spec/features/groups/empty_states_spec.rb
index aaa59108b95..4488f53a03f 100644
--- a/spec/features/groups/empty_states_spec.rb
+++ b/spec/features/groups/empty_states_spec.rb
@@ -7,8 +7,6 @@ RSpec.describe 'Group empty states' do
let(:user) { create(:group_member, :developer, user: create(:user), group: group ).user }
before do
- stub_feature_flags(vue_issuables_list: false)
-
sign_in(user)
end
@@ -34,42 +32,52 @@ RSpec.describe 'Group empty states' do
expect(page).not_to have_selector('.empty-state')
end
- it "displays link to create new #{issuable} when no open #{issuable} is found" do
+ it "displays link to create new #{issuable} when no open #{issuable} is found", :js do
create("closed_#{issuable}", project_relation => project)
issuable_link_fn = "project_#{issuable}s_path"
visit public_send(issuable_link_fn, project)
+ wait_for_all_requests
+
page.within(find('.empty-state')) do
expect(page).to have_content(/There are no open #{issuable.to_s.humanize.downcase}/)
- expect(page).to have_selector("#new_#{issuable}_body_link")
+ new_issuable_path = issuable == :issue ? 'new_project_issue_path' : 'project_new_merge_request_path'
+
+ path = public_send(new_issuable_path, project)
+
+ expect(page.find('a')['href']).to have_content(path)
end
end
- it 'displays link to create new issue when the current search gave no results' do
+ it 'displays link to create new issue when the current search gave no results', :js do
create(issuable, project_relation => project)
issuable_link_fn = "project_#{issuable}s_path"
visit public_send(issuable_link_fn, project, author_username: 'foo', scope: 'all', state: 'opened')
+ wait_for_all_requests
+
page.within(find('.empty-state')) do
expect(page).to have_content(/Sorry, your filter produced no results/)
new_issuable_path = issuable == :issue ? 'new_project_issue_path' : 'project_new_merge_request_path'
path = public_send(new_issuable_path, project)
- expect(page).to have_selector("#new_#{issuable}_body_link[href='#{path}']")
+ expect(page.find('a')['href']).to have_content(path)
end
end
- it "displays conditional text when no closed #{issuable} is found" do
+ it "displays conditional text when no closed #{issuable} is found", :js do
create(issuable, project_relation => project)
issuable_link_fn = "project_#{issuable}s_path"
visit public_send(issuable_link_fn, project, state: 'closed')
+ wait_for_all_requests
+
page.within(find('.empty-state')) do
expect(page).to have_content(/There are no closed #{issuable.to_s.humanize.downcase}/)
end
diff --git a/spec/features/groups/group_settings_spec.rb b/spec/features/groups/group_settings_spec.rb
index 8972be45acb..60cd1ebbbd7 100644
--- a/spec/features/groups/group_settings_spec.rb
+++ b/spec/features/groups/group_settings_spec.rb
@@ -158,7 +158,7 @@ RSpec.describe 'Edit group settings' do
page.within('.gs-advanced') do
fill_in 'group_path', with: new_group_path
- click_button 'Change group path'
+ click_button 'Change group URL'
end
end
diff --git a/spec/features/groups/issues_spec.rb b/spec/features/groups/issues_spec.rb
index c76e0c311a6..8ecd2beba68 100644
--- a/spec/features/groups/issues_spec.rb
+++ b/spec/features/groups/issues_spec.rb
@@ -11,11 +11,7 @@ RSpec.describe 'Group issues page' do
let(:project_with_issues_disabled) { create(:project, :issues_disabled, group: group) }
let(:path) { issues_group_path(group) }
- before do
- stub_feature_flags(vue_issuables_list: false)
- end
-
- context 'with shared examples' do
+ context 'with shared examples', :js do
let(:issuable) { create(:issue, project: project, title: "this is my created issuable")}
include_examples 'project features apply to issuables', Issue
@@ -30,19 +26,33 @@ RSpec.describe 'Group issues page' do
user_in_group
end
- it_behaves_like "it has an RSS button with current_user's feed token"
it_behaves_like "an autodiscoverable RSS feed with current_user's feed token"
+
+ # Note: The one from rss_shared_example.rb uses a css pseudo-class `:has`
+ # which is VERY experimental and only supported in Nokogiri used by Capybara
+ # However,`:js` option forces Capybara to use Selenium that doesn't support`:has`
+ context "it has an RSS button with current_user's feed token" do
+ it "shows the RSS button with current_user's feed token" do
+ expect(find('[data-testid="rss-feed-link"]')['href']).to have_content(user.feed_token)
+ end
+ end
end
context 'when signed out' do
let(:user) { nil }
- it_behaves_like "it has an RSS button without a feed token"
it_behaves_like "an autodiscoverable RSS feed without a feed token"
+
+ # Note: please see the above
+ context "it has an RSS button without a feed token" do
+ it "shows the RSS button without a feed token" do
+ expect(find('[data-testid="rss-feed-link"]')['href']).not_to have_content('feed_token')
+ end
+ end
end
end
- context 'assignee', :js do
+ context 'assignee' do
let(:access_level) { ProjectFeature::ENABLED }
let(:user) { user_in_group }
let(:user2) { user_outside_group }
@@ -56,7 +66,7 @@ RSpec.describe 'Group issues page' do
end
end
- context 'issues list' do
+ context 'issues list', :js do
let(:subgroup) { create(:group, parent: group) }
let(:subgroup_project) { create(:project, :public, group: subgroup)}
let(:user_in_group) { create(:group_member, :maintainer, user: create(:user), group: group ).user }
@@ -100,8 +110,6 @@ RSpec.describe 'Group issues page' do
find('.empty-state .js-lazy-loaded')
find('.new-project-item-link').click
- find('.select2-input').set(group.name)
-
page.within('.select2-results') do
expect(page).to have_content(project.full_name)
expect(page).not_to have_content(project_with_issues_disabled.full_name)
@@ -110,7 +118,7 @@ RSpec.describe 'Group issues page' do
end
end
- context 'manual ordering' do
+ context 'manual ordering', :js do
let(:user_in_group) { create(:group_member, :maintainer, user: create(:user), group: group ).user }
let!(:issue1) { create(:issue, project: project, title: 'Issue #1', relative_position: 1) }
@@ -143,9 +151,11 @@ RSpec.describe 'Group issues page' do
end
end
- it 'issues should be draggable and persist order', :js do
+ it 'issues should be draggable and persist order' do
visit issues_group_path(group, sort: 'relative_position')
+ wait_for_requests
+
drag_to(selector: '.manual-ordering',
from_index: 0,
to_index: 2)
@@ -159,11 +169,13 @@ RSpec.describe 'Group issues page' do
check_issue_order
end
- it 'issues should not be draggable when user is not logged in', :js do
+ it 'issues should not be draggable when user is not logged in' do
sign_out(user_in_group)
visit issues_group_path(group, sort: 'relative_position')
+ wait_for_requests
+
drag_to(selector: '.manual-ordering',
from_index: 0,
to_index: 2)
@@ -187,7 +199,7 @@ RSpec.describe 'Group issues page' do
end
end
- context 'issues pagination' do
+ context 'issues pagination', :js do
let(:user_in_group) { create(:group_member, :maintainer, user: create(:user), group: group ).user }
let!(:issues) do
@@ -204,7 +216,9 @@ RSpec.describe 'Group issues page' do
end
it 'first pagination item is active' do
- expect(page).to have_css(".js-first-button a.page-link.active")
+ page.within('.gl-pagination') do
+ expect(find('.active')).to have_content('1')
+ end
end
end
end
diff --git a/spec/features/groups/members/manage_groups_spec.rb b/spec/features/groups/members/manage_groups_spec.rb
index f1cf04417c0..faf455e4ed9 100644
--- a/spec/features/groups/members/manage_groups_spec.rb
+++ b/spec/features/groups/members/manage_groups_spec.rb
@@ -20,26 +20,28 @@ RSpec.describe 'Groups > Members > Manage groups', :js do
add_group(shared_with_group.id, 'Reporter')
+ click_groups_tab
+
page.within(first_row) do
expect(page).to have_content(shared_with_group.name)
expect(page).to have_content('Reporter')
end
end
- it 'remove user from group' do
+ it 'remove group from group' do
create(:group_group_link, shared_group: shared_group,
shared_with_group: shared_with_group, group_access: ::Gitlab::Access::DEVELOPER)
visit group_group_members_path(shared_group)
+ click_groups_tab
+
expect(page).to have_content(shared_with_group.name)
accept_confirm do
- find(:css, '#existing_shares li', text: shared_with_group.name).find(:css, 'a.btn-remove').click
+ find(:css, '#tab-groups li', text: shared_with_group.name).find(:css, 'a.btn-remove').click
end
- wait_for_requests
-
expect(page).not_to have_content(shared_with_group.name)
end
@@ -49,6 +51,8 @@ RSpec.describe 'Groups > Members > Manage groups', :js do
visit group_group_members_path(shared_group)
+ click_groups_tab
+
page.within(first_row) do
click_button('Developer')
click_link('Maintainer')
@@ -67,4 +71,8 @@ RSpec.describe 'Groups > Members > Manage groups', :js do
click_button "Invite"
end
end
+
+ def click_groups_tab
+ click_link "Groups"
+ end
end
diff --git a/spec/features/groups/members/manage_members_spec.rb b/spec/features/groups/members/manage_members_spec.rb
index 99846ecee27..0267bea2f53 100644
--- a/spec/features/groups/members/manage_members_spec.rb
+++ b/spec/features/groups/members/manage_members_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe 'Groups > Members > Manage members' do
visit group_group_members_path(group)
# Open modal
- find(:css, '.project-members-page li', text: user2.name).find(:css, 'button.btn-remove').click
+ find(:css, '.project-members-page li', text: user2.name).find(:css, 'button.btn-danger').click
expect(page).to have_unchecked_field 'Also unassign this user from related issues and merge requests'
@@ -101,7 +101,7 @@ RSpec.describe 'Groups > Members > Manage members' do
add_user('test@example.com', 'Reporter')
- click_link('Pending')
+ click_link('Invited')
page.within('.content-list.members-list') do
expect(page).to have_content('test@example.com')
@@ -124,7 +124,7 @@ RSpec.describe 'Groups > Members > Manage members' do
expect(page).not_to have_button 'Developer'
# Can not remove user2
- expect(page).not_to have_css('a.btn-remove')
+ expect(page).not_to have_css('a.btn-danger')
end
end
diff --git a/spec/features/groups/members/master_manages_access_requests_spec.rb b/spec/features/groups/members/master_manages_access_requests_spec.rb
index 2a17e7d2a5c..71c9b280ebe 100644
--- a/spec/features/groups/members/master_manages_access_requests_spec.rb
+++ b/spec/features/groups/members/master_manages_access_requests_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'Groups > Members > Maintainer manages access requests' do
it_behaves_like 'Maintainer manages access requests' do
+ let(:has_tabs) { true }
let(:entity) { create(:group, :public) }
let(:members_page_path) { group_group_members_path(entity) }
end
diff --git a/spec/features/groups/members/search_members_spec.rb b/spec/features/groups/members/search_members_spec.rb
index 4c34ccf87c3..ad4f5c0b579 100644
--- a/spec/features/groups/members/search_members_spec.rb
+++ b/spec/features/groups/members/search_members_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe 'Search group member' do
end
it 'renders member users' do
- page.within '.user-search-form' do
+ page.within '[data-testid="user-search-form"]' do
fill_in 'search', with: member.name
find('.user-search-btn').click
end
diff --git a/spec/features/groups/members/tabs_spec.rb b/spec/features/groups/members/tabs_spec.rb
new file mode 100644
index 00000000000..fa77d1a2ff8
--- /dev/null
+++ b/spec/features/groups/members/tabs_spec.rb
@@ -0,0 +1,112 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Groups > Members > Tabs' do
+ using RSpec::Parameterized::TableSyntax
+
+ shared_examples 'active "Members" tab' do
+ it 'displays "Members" tab' do
+ expect(page).to have_selector('.nav-link.active', text: 'Members')
+ end
+ end
+
+ shared_examples 'active "Invited" tab' do
+ it 'displays "Invited" tab' do
+ expect(page).to have_selector('.nav-link.active', text: 'Invited')
+ end
+ end
+
+ let(:owner) { create(:user) }
+ let(:group) { create(:group) }
+
+ before do
+ stub_const('Groups::GroupMembersController::MEMBER_PER_PAGE_LIMIT', 1)
+ allow_any_instance_of(Member).to receive(:send_request).and_return(true)
+
+ group.add_owner(owner)
+ sign_in(owner)
+
+ create_list(:group_member, 2, group: group)
+ create_list(:group_member, 2, :invited, group: group)
+ create_list(:group_group_link, 2, shared_group: group)
+ create_list(:group_member, 2, :access_request, group: group)
+ end
+
+ where(:tab, :count) do
+ 'Members' | 3
+ 'Invited' | 2
+ 'Groups' | 2
+ 'Access requests' | 2
+ end
+
+ with_them do
+ it "renders #{params[:tab]} tab" do
+ visit group_group_members_path(group)
+
+ expect(page).to have_selector('.nav-link', text: "#{tab} #{count}")
+ end
+ end
+
+ context 'displays "Members" tab by default' do
+ before do
+ visit group_group_members_path(group)
+ end
+
+ it_behaves_like 'active "Members" tab'
+ end
+
+ context 'when searching "Invited"', :js do
+ before do
+ visit group_group_members_path(group)
+
+ click_link 'Invited'
+
+ page.within '[data-testid="user-search-form"]' do
+ fill_in 'search_invited', with: 'email'
+ find('button[type="submit"]').click
+ end
+ end
+
+ it_behaves_like 'active "Invited" tab'
+
+ context 'and then searching "Members"' do
+ before do
+ click_link 'Members'
+
+ page.within '[data-testid="user-search-form"]' do
+ fill_in 'search', with: 'test'
+ find('button[type="submit"]').click
+ end
+ end
+
+ it_behaves_like 'active "Members" tab'
+ end
+ end
+
+ context 'when using "Invited" pagination', :js do
+ before do
+ visit group_group_members_path(group)
+
+ click_link 'Invited'
+
+ page.within '.pagination' do
+ click_link '2'
+ end
+ end
+
+ it_behaves_like 'active "Invited" tab'
+
+ context 'and then using "Members" pagination' do
+ before do
+ click_link 'Members'
+
+ page.within '.pagination' do
+ click_link '2'
+ end
+ end
+
+ it_behaves_like 'active "Members" tab'
+ end
+ end
+end
diff --git a/spec/features/groups/navbar_spec.rb b/spec/features/groups/navbar_spec.rb
index 06ff33ff0eb..6803b3a5785 100644
--- a/spec/features/groups/navbar_spec.rb
+++ b/spec/features/groups/navbar_spec.rb
@@ -45,6 +45,8 @@ RSpec.describe 'Group navbar' do
end
before do
+ insert_package_nav(_('Kubernetes'))
+
stub_feature_flags(group_push_rules: false)
stub_feature_flags(group_iterations: false)
stub_feature_flags(group_wiki: false)
@@ -62,13 +64,8 @@ RSpec.describe 'Group navbar' do
before do
stub_config(registry: { enabled: true })
- insert_after_nav_item(
- _('Kubernetes'),
- new_nav_item: {
- nav_item: _('Packages & Registries'),
- nav_sub_items: [_('Container Registry')]
- }
- )
+ insert_container_nav(_('Kubernetes'))
+
visit group_path(group)
end
diff --git a/spec/features/groups/packages_spec.rb b/spec/features/groups/packages_spec.rb
new file mode 100644
index 00000000000..d81e4aa70cf
--- /dev/null
+++ b/spec/features/groups/packages_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Group Packages' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+
+ before do
+ sign_in(user)
+ group.add_maintainer(user)
+ end
+
+ context 'when feature is not available' do
+ context 'packages feature is disabled by config' do
+ before do
+ allow(Gitlab.config.packages).to receive(:enabled).and_return(false)
+ end
+
+ it 'gives 404' do
+ visit_group_packages
+
+ expect(page).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'when feature is available', :js do
+ before do
+ visit_group_packages
+ end
+
+ it 'sidebar menu is open' do
+ sidebar = find('.nav-sidebar')
+ expect(sidebar).to have_link _('Package Registry')
+ end
+
+ context 'when there are packages' do
+ let_it_be(:second_project) { create(:project, name: 'second-project', group: group) }
+ let_it_be(:conan_package) { create(:conan_package, project: project, name: 'zzz', created_at: 1.day.ago, version: '1.0.0') }
+ let_it_be(:maven_package) { create(:maven_package, project: second_project, name: 'aaa', created_at: 2.days.ago, version: '2.0.0') }
+ let_it_be(:packages) { [conan_package, maven_package] }
+
+ it_behaves_like 'packages list', check_project_name: true
+
+ it_behaves_like 'package details link'
+
+ it 'allows you to navigate to the project page' do
+ page.within('[data-qa-selector="packages-table"]') do
+ click_link project.name
+ end
+
+ expect(page).to have_current_path(project_path(project))
+ expect(page).to have_content(project.name)
+ end
+
+ context 'sorting' do
+ it_behaves_like 'shared package sorting' do
+ let_it_be(:package_one) { maven_package }
+ let_it_be(:package_two) { conan_package }
+ end
+
+ it_behaves_like 'correctly sorted packages list', 'Project' do
+ let(:packages) { [maven_package, conan_package] }
+ end
+
+ it_behaves_like 'correctly sorted packages list', 'Project', ascending: true do
+ let(:packages) { [conan_package, maven_package] }
+ end
+ end
+ end
+
+ it_behaves_like 'when there are no packages'
+ end
+
+ def visit_group_packages
+ visit group_packages_path(group)
+ end
+end
diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb
index 8104ff3f987..8264ec2eddd 100644
--- a/spec/features/groups_spec.rb
+++ b/spec/features/groups_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe 'Group' do
expect(group.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
expect(current_path).to eq(group_path(group))
- expect(page).to have_selector '.visibility-icon .fa-globe'
+ expect(page).to have_selector '.visibility-icon [data-testid="earth-icon"]'
end
end
diff --git a/spec/features/import/manifest_import_spec.rb b/spec/features/import/manifest_import_spec.rb
index 1efbc5642d4..9c359e932d5 100644
--- a/spec/features/import/manifest_import_spec.rb
+++ b/spec/features/import/manifest_import_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe 'Import multiple repositories by uploading a manifest file', :js
expect(page).to have_content('https://android-review.googlesource.com/platform/build/blueprint')
end
- it 'imports successfully imports a project', :sidekiq_inline do
+ it 'imports a project successfully', :sidekiq_inline, :js do
visit new_import_manifest_path
attach_file('manifest', Rails.root.join('spec/fixtures/aosp_manifest.xml'))
@@ -32,7 +32,11 @@ RSpec.describe 'Import multiple repositories by uploading a manifest file', :js
page.within(second_row) do
click_on 'Import'
+ end
+
+ wait_for_requests
+ page.within(second_row) do
expect(page).to have_content 'Done'
expect(page).to have_content("#{group.full_path}/build/blueprint")
end
@@ -48,6 +52,6 @@ RSpec.describe 'Import multiple repositories by uploading a manifest file', :js
end
def second_row
- page.all('table.import-jobs tbody tr')[1]
+ page.all('table.import-table tbody tr')[1]
end
end
diff --git a/spec/features/invites_spec.rb b/spec/features/invites_spec.rb
index d91fae5cdfd..e7bcd7876ea 100644
--- a/spec/features/invites_spec.rb
+++ b/spec/features/invites_spec.rb
@@ -48,6 +48,14 @@ RSpec.describe 'Invites', :aggregate_failures do
expect(page).to have_content('To accept this invitation, sign in')
end
+ it 'pre-fills the "Username or email" field on the sign in box with the invite_email from the invite' do
+ expect(find_field('Username or email').value).to eq(group_invite.invite_email)
+ end
+
+ it 'pre-fills the Email field on the sign up box with the invite_email from the invite' do
+ expect(find_field('Email').value).to eq(group_invite.invite_email)
+ end
+
it 'sign in, grants access and redirects to group page' do
fill_in_sign_in_form(user)
@@ -63,6 +71,8 @@ RSpec.describe 'Invites', :aggregate_failures do
it 'shows message user already a member' do
visit invite_path(group_invite.raw_invite_token)
+
+ expect(page).to have_link(owner.name, href: user_url(owner))
expect(page).to have_content('However, you are already a member of this group.')
end
end
@@ -197,8 +207,10 @@ RSpec.describe 'Invites', :aggregate_failures do
it 'declines application and redirects to dashboard' do
page.click_link 'Decline'
+
expect(current_path).to eq(dashboard_projects_path)
expect(page).to have_content('You have declined the invitation to join group Owned.')
+ expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
end
end
@@ -209,7 +221,9 @@ RSpec.describe 'Invites', :aggregate_failures do
it 'declines application and redirects to sign in page' do
expect(current_path).to eq(new_user_session_path)
+
expect(page).to have_content('You have declined the invitation to join group Owned.')
+ expect { group_invite.reload }.to raise_error ActiveRecord::RecordNotFound
end
end
end
@@ -223,9 +237,13 @@ RSpec.describe 'Invites', :aggregate_failures do
end
it 'grants access and redirects to group page' do
+ expect(group.users.include?(user)).to be false
+
page.click_link 'Accept invitation'
+
expect(current_path).to eq(group_path(group))
expect(page).to have_content('You have been granted Owner access to group Owned.')
+ expect(group.users.include?(user)).to be true
end
end
end
diff --git a/spec/features/issuables/issuable_list_spec.rb b/spec/features/issuables/issuable_list_spec.rb
index 259c09b9d11..7790d8f1c4c 100644
--- a/spec/features/issuables/issuable_list_spec.rb
+++ b/spec/features/issuables/issuable_list_spec.rb
@@ -9,15 +9,13 @@ RSpec.describe 'issuable list', :js do
issuable_types = [:issue, :merge_request]
before do
- stub_feature_flags(vue_issuables_list: false)
- # something is going on
project.add_user(user, :developer)
sign_in(user)
issuable_types.each { |type| create_issuables(type) }
end
issuable_types.each do |issuable_type|
- it "avoids N+1 database queries for #{issuable_type.to_s.humanize.pluralize}" do
+ it "avoids N+1 database queries for #{issuable_type.to_s.humanize.pluralize}", quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/231426' } do
control_count = ActiveRecord::QueryRecorder.new { visit_issuable_list(issuable_type) }.count
create_issuables(issuable_type)
diff --git a/spec/features/issues/bulk_assignment_labels_spec.rb b/spec/features/issues/bulk_assignment_labels_spec.rb
index 91f0e983fa8..aa61aff3b05 100644
--- a/spec/features/issues/bulk_assignment_labels_spec.rb
+++ b/spec/features/issues/bulk_assignment_labels_spec.rb
@@ -14,9 +14,6 @@ RSpec.describe 'Issues > Labels bulk assignment' do
context 'as an allowed user', :js do
before do
- # Make sure that issuables list FF is not turned on.
- stub_feature_flags(vue_issuables_list: false)
-
project.add_maintainer(user)
sign_in user
diff --git a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
index 6fc648954b4..12682905559 100644
--- a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
+++ b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
@@ -8,10 +8,14 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j
let(:merge_request) { create(:merge_request, source_project: project) }
let!(:discussion) { create(:diff_note_on_merge_request, noteable: merge_request, project: project).to_discussion }
- def resolve_all_discussions_link_selector
- text = "Resolve all threads in new issue"
+ def resolve_all_discussions_link_selector(title: "")
url = new_project_issue_path(project, merge_request_to_resolve_discussions_of: merge_request.iid)
- %Q{a[title="#{text}"][href="#{url}"]}
+
+ if title.empty?
+ %Q{a[href="#{url}"]}
+ else
+ %Q{a[title="#{title}"][href="#{url}"]}
+ end
end
describe 'as a user with access to the project' do
@@ -23,7 +27,7 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j
it 'shows a button to resolve all threads by creating a new issue' do
within('.line-resolve-all-container') do
- expect(page).to have_selector resolve_all_discussions_link_selector
+ expect(page).to have_selector resolve_all_discussions_link_selector( title: "Resolve all threads in new issue" )
end
end
@@ -34,6 +38,7 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j
it 'hides the link for creating a new issue' do
expect(page).not_to have_selector resolve_all_discussions_link_selector
+ expect(page).not_to have_content "Resolve all threads in new issue"
end
end
@@ -57,7 +62,7 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j
end
it 'does not show a link to create a new issue' do
- expect(page).not_to have_link 'Create an issue to resolve them later'
+ expect(page).not_to have_link 'Resolve all threads in new issue'
end
end
@@ -67,18 +72,20 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j
end
it 'shows a warning that the merge request contains unresolved threads' do
- expect(page).to have_content 'There are unresolved threads.'
+ expect(page).to have_content 'Before this can be merged,'
end
it 'has a link to resolve all threads by creating an issue' do
page.within '.mr-widget-body' do
- expect(page).to have_link 'Create an issue to resolve them later', href: new_project_issue_path(project, merge_request_to_resolve_discussions_of: merge_request.iid)
+ expect(page).to have_link 'Resolve all threads in new issue', href: new_project_issue_path(project, merge_request_to_resolve_discussions_of: merge_request.iid)
end
end
context 'creating an issue for threads' do
before do
- page.click_link 'Create an issue to resolve them later', href: new_project_issue_path(project, merge_request_to_resolve_discussions_of: merge_request.iid)
+ page.within '.mr-widget-body' do
+ page.click_link 'Resolve all threads in new issue', href: new_project_issue_path(project, merge_request_to_resolve_discussions_of: merge_request.iid)
+ end
end
it_behaves_like 'creating an issue for a thread'
diff --git a/spec/features/issues/filtered_search/recent_searches_spec.rb b/spec/features/issues/filtered_search/recent_searches_spec.rb
index 85b7a093536..61c1e35f3c8 100644
--- a/spec/features/issues/filtered_search/recent_searches_spec.rb
+++ b/spec/features/issues/filtered_search/recent_searches_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'Recent searches', :js do
include FilteredSearchHelpers
+ include MobileHelpers
let(:project_1) { create(:project, :public) }
let(:project_2) { create(:project, :public) }
@@ -104,4 +105,24 @@ RSpec.describe 'Recent searches', :js do
expect(find('.flash-alert')).to have_text('An error occurred while parsing recent searches')
end
+
+ context 'on tablet/mobile screen' do
+ it 'shows only the history icon in the dropdown' do
+ resize_screen_sm
+ visit project_issues_path(project_1)
+
+ expect(find('.filtered-search-history-dropdown-wrapper')).to have_selector('svg', visible: true)
+ expect(find('.filtered-search-history-dropdown-wrapper')).to have_selector('span', text: 'Recent searches', visible: false)
+ end
+ end
+
+ context 'on PC screen' do
+ it 'shows only the Recent searches text in the dropdown' do
+ restore_window_size
+ visit project_issues_path(project_1)
+
+ expect(find('.filtered-search-history-dropdown-wrapper')).to have_selector('svg', visible: false)
+ expect(find('.filtered-search-history-dropdown-wrapper')).to have_selector('span', text: 'Recent searches', visible: true)
+ end
+ end
end
diff --git a/spec/features/issues/filtered_search/visual_tokens_spec.rb b/spec/features/issues/filtered_search/visual_tokens_spec.rb
index 59588978a8e..c585d7f6194 100644
--- a/spec/features/issues/filtered_search/visual_tokens_spec.rb
+++ b/spec/features/issues/filtered_search/visual_tokens_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe 'Visual tokens', :js do
end
it 'ends editing mode when document is clicked' do
- find('#content-body').click
+ find('.js-navbar').click
expect_filtered_search_input_empty
expect(page).to have_css('#js-dropdown-author', visible: false)
@@ -142,7 +142,7 @@ RSpec.describe 'Visual tokens', :js do
it 'does not tokenize incomplete token' do
filtered_search.send_keys('author:=')
- find('body').click
+ find('.js-navbar').click
token = page.all('.tokens-container .js-visual-token')[1]
expect_filtered_search_input_empty
diff --git a/spec/features/issues/gfm_autocomplete_spec.rb b/spec/features/issues/gfm_autocomplete_spec.rb
index 0b2e8013304..3757985f99c 100644
--- a/spec/features/issues/gfm_autocomplete_spec.rb
+++ b/spec/features/issues/gfm_autocomplete_spec.rb
@@ -487,7 +487,7 @@ RSpec.describe 'GFM autocomplete', :js do
wait_for_requests
- find('.tribute-container .highlight').click
+ find('.tribute-container .highlight', visible: true).click
click_button 'Save changes'
@@ -501,30 +501,36 @@ RSpec.describe 'GFM autocomplete', :js do
find('#note-body').native.send_keys('@')
end
- expect(page).to have_selector('.tribute-container')
+ expect(page).to have_selector('.tribute-container', visible: true)
end
- it 'opens autocomplete menu for Username when field starts with text with item escaping HTML characters' do
+ it 'opens autocomplete menu for Issues when field starts with text with item escaping HTML characters' do
+ issue_xss_title = 'This will execute alert<img src=x onerror=alert(2)&lt;img src=x onerror=alert(1)&gt;'
+ create(:issue, project: project, title: issue_xss_title)
+
page.within '.timeline-content-form' do
- find('#note-body').native.send_keys('@ev')
+ find('#note-body').native.send_keys('#')
end
wait_for_requests
- expect(page).to have_selector('.tribute-container')
+ expect(page).to have_selector('.tribute-container', visible: true)
page.within '.tribute-container ul' do
- expect(find('li').text).to have_content(user_xss.username)
+ expect(page.all('li').first.text).to include(issue_xss_title)
end
end
- it 'doesnt open autocomplete menu character is prefixed with text' do
+ it 'opens autocomplete menu for Username when field starts with text with item escaping HTML characters' do
page.within '.timeline-content-form' do
- find('#note-body').native.send_keys('testing')
- find('#note-body').native.send_keys('@')
+ find('#note-body').native.send_keys('@ev')
end
- expect(page).not_to have_selector('.tribute-container')
+ wait_for_requests
+
+ expect(page).to have_selector('.tribute-container', visible: true)
+
+ expect(find('.tribute-container ul', visible: true).text).to have_content(user_xss.username)
end
it 'selects the first item for assignee dropdowns' do
@@ -532,11 +538,11 @@ RSpec.describe 'GFM autocomplete', :js do
find('#note-body').native.send_keys('@')
end
- expect(page).to have_selector('.tribute-container')
+ expect(page).to have_selector('.tribute-container', visible: true)
wait_for_requests
- expect(find('.tribute-container ul')).to have_selector('.highlight:first-of-type')
+ expect(find('.tribute-container ul', visible: true)).to have_selector('.highlight:first-of-type')
end
it 'includes items for assignee dropdowns with non-ASCII characters in name' do
@@ -545,14 +551,26 @@ RSpec.describe 'GFM autocomplete', :js do
simulate_input('#note-body', "@#{user.name[0...8]}")
end
- expect(page).to have_selector('.tribute-container')
+ expect(page).to have_selector('.tribute-container', visible: true)
wait_for_requests
- expect(find('.tribute-container')).to have_content(user.name)
+ expect(find('.tribute-container ul', visible: true)).to have_content(user.name)
end
context 'if a selected value has special characters' do
+ it 'wraps the result in double quotes' do
+ note = find('#note-body')
+ page.within '.timeline-content-form' do
+ find('#note-body').native.send_keys('')
+ simulate_input('#note-body', "~#{label.title[0]}")
+ end
+
+ label_item = find('.tribute-container ul', text: label.title, visible: true)
+
+ expect_to_wrap(true, label_item, note, label.title)
+ end
+
it "shows dropdown after a new line" do
note = find('#note-body')
page.within '.timeline-content-form' do
@@ -562,7 +580,7 @@ RSpec.describe 'GFM autocomplete', :js do
note.native.send_keys('@')
end
- expect(page).to have_selector('.tribute-container')
+ expect(page).to have_selector('.tribute-container', visible: true)
end
it "does not show dropdown when preceded with a special character" do
@@ -571,12 +589,21 @@ RSpec.describe 'GFM autocomplete', :js do
note.native.send_keys("@")
end
- expect(page).to have_selector('.tribute-container')
+ expect(page).to have_selector('.tribute-container', visible: true)
page.within '.timeline-content-form' do
note.native.send_keys("@")
end
+ expect(page).not_to have_selector('.tribute-container')
+ end
+
+ it "does not throw an error if no labels exist" do
+ note = find('#note-body')
+ page.within '.timeline-content-form' do
+ note.native.send_keys('~')
+ end
+
expect(page).to have_selector('.tribute-container', visible: false)
end
@@ -586,7 +613,7 @@ RSpec.describe 'GFM autocomplete', :js do
note.native.send_keys("@#{user.username[0]}")
end
- user_item = find('.tribute-container li', text: user.username)
+ user_item = find('.tribute-container ul', text: user.username, visible: true)
expect_to_wrap(false, user_item, note, user.username)
end
@@ -611,7 +638,7 @@ RSpec.describe 'GFM autocomplete', :js do
wait_for_requests
- user_item = find('.tribute-container li', text: user.username)
+ user_item = find('.tribute-container ul', text: user.username, visible: true)
expect(user_item).to have_content(user.username)
end
end
@@ -640,8 +667,139 @@ RSpec.describe 'GFM autocomplete', :js do
wait_for_requests
- expect(find('.tribute-container ul')).not_to have_content(user.username)
- expect(find('.tribute-container ul')).to have_content(unassigned_user.username)
+ expect(find('.tribute-container ul', visible: true)).not_to have_content(user.username)
+ expect(find('.tribute-container ul', visible: true)).to have_content(unassigned_user.username)
+ end
+
+ it 'lists users who are currently not assigned to the issue when using /assign on the second line' do
+ visit project_issue_path(project, issue_assignee)
+
+ note = find('#note-body')
+ page.within '.timeline-content-form' do
+ note.native.send_keys('/assign @user2')
+ note.native.send_keys(:enter)
+ note.native.send_keys('/assign @')
+ note.native.send_keys(:right)
+ end
+
+ wait_for_requests
+
+ expect(find('.tribute-container ul', visible: true)).not_to have_content(user.username)
+ expect(find('.tribute-container ul', visible: true)).to have_content(unassigned_user.username)
+ end
+ end
+
+ context 'labels' do
+ it 'opens autocomplete menu for Labels when field starts with text with item escaping HTML characters' do
+ label_xss_title = 'alert label &lt;img src=x onerror="alert(\'Hello xss\');" a'
+ create(:label, project: project, title: label_xss_title)
+
+ note = find('#note-body')
+
+ # It should show all the labels on "~".
+ type(note, '~')
+
+ wait_for_requests
+
+ expect(find('.tribute-container ul', visible: true).text).to have_content('alert label')
+ end
+
+ it 'allows colons when autocompleting scoped labels' do
+ create(:label, project: project, title: 'scoped:label')
+
+ note = find('#note-body')
+ type(note, '~scoped:')
+
+ wait_for_requests
+
+ expect(find('.tribute-container ul', visible: true).text).to have_content('scoped:label')
+ end
+
+ it 'allows colons when autocompleting scoped labels with double colons' do
+ create(:label, project: project, title: 'scoped::label')
+
+ note = find('#note-body')
+ type(note, '~scoped::')
+
+ wait_for_requests
+
+ expect(find('.tribute-container ul', visible: true).text).to have_content('scoped::label')
+ end
+
+ it 'autocompletes multi-word labels' do
+ create(:label, project: project, title: 'Accepting merge requests')
+
+ note = find('#note-body')
+ type(note, '~Acceptingmerge')
+
+ wait_for_requests
+
+ expect(find('.tribute-container ul', visible: true).text).to have_content('Accepting merge requests')
+ end
+
+ it 'only autocompletes the latest label' do
+ create(:label, project: project, title: 'documentation')
+ create(:label, project: project, title: 'feature')
+
+ note = find('#note-body')
+ type(note, '~documentation foo bar ~feat')
+ note.native.send_keys(:right)
+
+ wait_for_requests
+
+ expect(find('.tribute-container ul', visible: true).text).to have_content('feature')
+ expect(find('.tribute-container ul', visible: true).text).not_to have_content('documentation')
+ end
+
+ it 'does not autocomplete labels if no tilde is typed' do
+ create(:label, project: project, title: 'documentation')
+
+ note = find('#note-body')
+ type(note, 'document')
+
+ wait_for_requests
+
+ expect(page).not_to have_selector('.tribute-container')
+ end
+ end
+
+ context 'when other notes are destroyed' do
+ let!(:discussion) { create(:discussion_note_on_issue, noteable: issue, project: issue.project) }
+
+ # This is meant to protect against this issue https://gitlab.com/gitlab-org/gitlab/-/issues/228729
+ it 'keeps autocomplete key listeners' do
+ visit project_issue_path(project, issue)
+ note = find('#note-body')
+
+ start_comment_with_emoji(note)
+
+ start_and_cancel_discussion
+
+ note.fill_in(with: '')
+ start_comment_with_emoji(note)
+ note.native.send_keys(:enter)
+
+ expect(note.value).to eql('Hello :100: ')
+ end
+
+ def start_comment_with_emoji(note)
+ note.native.send_keys('Hello :10')
+
+ wait_for_requests
+
+ find('.atwho-view li', text: '100')
+ end
+
+ def start_and_cancel_discussion
+ click_button('Reply...')
+
+ fill_in('note_note', with: 'Whoops!')
+
+ page.accept_alert 'Are you sure you want to cancel creating this comment?' do
+ click_button('Cancel')
+ end
+
+ wait_for_requests
end
end
end
diff --git a/spec/features/issues/service_desk_spec.rb b/spec/features/issues/service_desk_spec.rb
index 0995aa11654..2912ac33625 100644
--- a/spec/features/issues/service_desk_spec.rb
+++ b/spec/features/issues/service_desk_spec.rb
@@ -7,8 +7,6 @@ RSpec.describe 'Service Desk Issue Tracker', :js do
let(:user) { create(:user) }
before do
- stub_feature_flags(vue_issuables_list: false)
-
allow(Gitlab::IncomingEmail).to receive(:enabled?).and_return(true)
allow(Gitlab::IncomingEmail).to receive(:supports_wildcard?).and_return(true)
@@ -78,11 +76,9 @@ RSpec.describe 'Service Desk Issue Tracker', :js do
context 'when service desk has been activated' do
context 'when there are no issues' do
describe 'service desk info content' do
- before do
+ it 'displays the large info box, documentation, and the address' do
visit service_desk_project_issues_path(project)
- end
- it 'displays the large info box, documentation, and the address' do
aggregate_failures do
expect(page).to have_css('.empty-state')
expect(page).to have_link('Read more', href: help_page_path('user/project/service_desk'))
diff --git a/spec/features/issues/update_issues_spec.rb b/spec/features/issues/update_issues_spec.rb
index dfe3a1bf1b3..eb78e4e2456 100644
--- a/spec/features/issues/update_issues_spec.rb
+++ b/spec/features/issues/update_issues_spec.rb
@@ -8,7 +8,6 @@ RSpec.describe 'Multiple issue updating from issues#index', :js do
let!(:user) { create(:user)}
before do
- stub_feature_flags(vue_issuables_list: false)
project.add_maintainer(user)
sign_in(user)
end
@@ -52,7 +51,7 @@ RSpec.describe 'Multiple issue updating from issues#index', :js do
click_update_issues_button
page.within('.issue .controls') do
- expect(find('.author-link')["title"]).to have_content(user.name)
+ expect(find('.author-link')['href']).to have_content(user.website_url)
end
end
@@ -83,13 +82,15 @@ RSpec.describe 'Multiple issue updating from issues#index', :js do
find('.dropdown-menu-milestone a', text: milestone.title).click
click_update_issues_button
- expect(find('.issue')).to have_content milestone.title
+ expect(page.find('.issue')).to have_content milestone.title
end
it 'sets to no milestone' do
create_with_milestone
visit project_issues_path(project)
+ wait_for_requests
+
expect(first('.issue')).to have_content milestone.title
click_button 'Edit issues'
diff --git a/spec/features/issues/user_filters_issues_spec.rb b/spec/features/issues/user_filters_issues_spec.rb
index 54a600910ef..1b246181523 100644
--- a/spec/features/issues/user_filters_issues_spec.rb
+++ b/spec/features/issues/user_filters_issues_spec.rb
@@ -2,13 +2,11 @@
require 'spec_helper'
-RSpec.describe 'User filters issues' do
+RSpec.describe 'User filters issues', :js do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project_empty_repo, :public) }
before do
- stub_feature_flags(vue_issuables_list: false)
-
%w[foobar barbaz].each do |title|
create(:issue,
author: user,
diff --git a/spec/features/issues/user_sees_empty_state_spec.rb b/spec/features/issues/user_sees_empty_state_spec.rb
index e39369b0150..b43ba01606a 100644
--- a/spec/features/issues/user_sees_empty_state_spec.rb
+++ b/spec/features/issues/user_sees_empty_state_spec.rb
@@ -2,14 +2,10 @@
require 'spec_helper'
-RSpec.describe 'Issues > User sees empty state' do
+RSpec.describe 'Issues > User sees empty state', :js do
let_it_be(:project) { create(:project, :public) }
let_it_be(:user) { project.creator }
- before do
- stub_feature_flags(vue_issuables_list: false)
- end
-
shared_examples_for 'empty state with filters' do
it 'user sees empty state with filters' do
create(:issue, author: user, project: project)
diff --git a/spec/features/issues/user_sorts_issues_spec.rb b/spec/features/issues/user_sorts_issues_spec.rb
index 91c6419b464..f0bb055c6f2 100644
--- a/spec/features/issues/user_sorts_issues_spec.rb
+++ b/spec/features/issues/user_sorts_issues_spec.rb
@@ -16,8 +16,6 @@ RSpec.describe "User sorts issues" do
let_it_be(:later_due_milestone) { create(:milestone, project: project, due_date: '2013-12-12') }
before do
- stub_feature_flags(vue_issuables_list: false)
-
create_list(:award_emoji, 2, :upvote, awardable: issue1)
create_list(:award_emoji, 2, :downvote, awardable: issue2)
create(:award_emoji, :downvote, awardable: issue1)
@@ -48,7 +46,7 @@ RSpec.describe "User sorts issues" do
expect(find('.issues-filters a.is-active')).to have_content('Milestone')
end
- it "sorts by popularity" do
+ it 'sorts by popularity', :js do
find('.filter-dropdown-container .dropdown').click
page.within('ul.dropdown-menu.dropdown-menu-right li') do
@@ -70,14 +68,14 @@ RSpec.describe "User sorts issues" do
end
end
- it 'sorts by newest' do
+ it 'sorts by newest', :js do
visit project_issues_path(project, sort: sort_value_created_date)
expect(first_issue).to include('foo')
expect(last_issue).to include('baz')
end
- it 'sorts by most recently updated' do
+ it 'sorts by most recently updated', :js do
issue3.updated_at = Time.now + 100
issue3.save
visit project_issues_path(project, sort: sort_value_recently_updated)
@@ -85,7 +83,7 @@ RSpec.describe "User sorts issues" do
expect(first_issue).to include('baz')
end
- describe 'sorting by due date' do
+ describe 'sorting by due date', :js do
before do
issue1.update(due_date: 1.day.from_now)
issue2.update(due_date: 6.days.from_now)
@@ -122,7 +120,7 @@ RSpec.describe "User sorts issues" do
end
end
- describe 'filtering by due date' do
+ describe 'filtering by due date', :js do
before do
issue1.update(due_date: 1.day.from_now)
issue2.update(due_date: 6.days.from_now)
@@ -205,7 +203,7 @@ RSpec.describe "User sorts issues" do
end
end
- describe 'sorting by milestone' do
+ describe 'sorting by milestone', :js do
before do
issue1.milestone = newer_due_milestone
issue1.save
@@ -221,7 +219,7 @@ RSpec.describe "User sorts issues" do
end
end
- describe 'combine filter and sort' do
+ describe 'combine filter and sort', :js do
let(:user2) { create(:user) }
before do
diff --git a/spec/features/issues/user_views_issues_spec.rb b/spec/features/issues/user_views_issues_spec.rb
index 34cea7f3b0b..165f4b10cff 100644
--- a/spec/features/issues/user_views_issues_spec.rb
+++ b/spec/features/issues/user_views_issues_spec.rb
@@ -10,10 +10,6 @@ RSpec.describe "User views issues" do
let_it_be(:user) { create(:user) }
- before do
- stub_feature_flags(vue_issuables_list: false)
- end
-
shared_examples "opens issue from list" do
it "opens issue" do
click_link(issue.title)
@@ -112,7 +108,7 @@ RSpec.describe "User views issues" do
end
end
- context "when signed in as developer" do
+ context "when signed in as developer", :js do
before do
project.add_developer(user)
sign_in(user)
@@ -122,27 +118,7 @@ RSpec.describe "User views issues" do
include_examples "internal project"
end
- context "when not signed in" do
+ context "when not signed in", :js do
include_examples "public project"
end
-
- context 'when vue_issuables_list feature is enabled', :js do
- before do
- stub_feature_flags(vue_issuables_list: true)
- end
-
- context 'when signed in' do
- before do
- project.add_developer(user)
- sign_in(user)
- end
-
- include_examples "public project"
- include_examples "internal project"
- end
-
- context 'when not signed in' do
- include_examples "public project"
- end
- end
end
diff --git a/spec/features/markdown/copy_as_gfm_spec.rb b/spec/features/markdown/copy_as_gfm_spec.rb
index 80dcdd08f74..57362ed2d54 100644
--- a/spec/features/markdown/copy_as_gfm_spec.rb
+++ b/spec/features/markdown/copy_as_gfm_spec.rb
@@ -737,6 +737,7 @@ RSpec.describe 'Copy as GFM', :js do
context 'inline diff' do
before do
visit project_commit_path(project, sample_commit.id, view: 'inline')
+ wait_for_requests
end
it_behaves_like 'copying code from a diff'
@@ -745,6 +746,7 @@ RSpec.describe 'Copy as GFM', :js do
context 'parallel diff' do
before do
visit project_commit_path(project, sample_commit.id, view: 'parallel')
+ wait_for_requests
end
it_behaves_like 'copying code from a diff'
diff --git a/spec/features/markdown/metrics_spec.rb b/spec/features/markdown/metrics_spec.rb
index 3e63ae67f19..9716c660fa9 100644
--- a/spec/features/markdown/metrics_spec.rb
+++ b/spec/features/markdown/metrics_spec.rb
@@ -18,10 +18,7 @@ RSpec.describe 'Metrics rendering', :js, :kubeclient, :use_clean_rails_memory_st
before do
clear_host_from_memoized_variables
-
- allow(::Gitlab.config.gitlab)
- .to receive(:url)
- .and_return(urls.root_url.chomp('/'))
+ stub_gitlab_domain
project.add_developer(user)
sign_in(user)
@@ -86,6 +83,7 @@ RSpec.describe 'Metrics rendering', :js, :kubeclient, :use_clean_rails_memory_st
y_label: 'Total Cores'
}
end
+
let(:metrics_url_2) { urls.metrics_project_environment_url(project, environment, **chart_params_2) }
let(:description) { "See [metrics dashboard](#{metrics_url}) for info. \n See [metrics dashboard](#{metrics_url_2}) for info." }
let(:issue) { create(:issue, project: project, description: description) }
@@ -144,11 +142,11 @@ RSpec.describe 'Metrics rendering', :js, :kubeclient, :use_clean_rails_memory_st
{
panel_groups: [{
panels: [{
- type: "line-graph",
+ type: 'area-chart',
title: title,
- y_label: "metric",
+ y_label: 'metric',
metrics: [{
- query_range: "metric * 0.5 < 1"
+ query_range: 'metric * 0.5 < 1'
}]
}]
}]
diff --git a/spec/features/merge_request/user_approves_spec.rb b/spec/features/merge_request/user_approves_spec.rb
index d319fdcb87b..f401dd598f3 100644
--- a/spec/features/merge_request/user_approves_spec.rb
+++ b/spec/features/merge_request/user_approves_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe 'Merge request > User approves', :js do
verify_approvals_count_on_index!
click_approval_button('Revoke approval')
- expect(page).to have_content('No approval required; you can still approve')
+ expect(page).to have_content('Approval is optional')
end
def verify_approvals_count_on_index!
diff --git a/spec/features/merge_request/user_comments_on_diff_spec.rb b/spec/features/merge_request/user_comments_on_diff_spec.rb
index 30bf82e3665..3a199951b56 100644
--- a/spec/features/merge_request/user_comments_on_diff_spec.rb
+++ b/spec/features/merge_request/user_comments_on_diff_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe 'User comments on a diff', :js do
let(:merge_request) do
create(:merge_request_with_diffs, source_project: project, target_project: project, source_branch: 'merge-test')
end
+
let(:user) { create(:user) }
before do
diff --git a/spec/features/merge_request/user_customizes_merge_commit_message_spec.rb b/spec/features/merge_request/user_customizes_merge_commit_message_spec.rb
index 23df7635aa1..1d3d76d3486 100644
--- a/spec/features/merge_request/user_customizes_merge_commit_message_spec.rb
+++ b/spec/features/merge_request/user_customizes_merge_commit_message_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe 'Merge request < User customizes merge commit message', :js do
description: "Description\n\nclosing #{issue_1.to_reference}, #{issue_2.to_reference}"
)
end
+
let(:textbox) { page.find(:css, '#merge-message-edit', visible: false) }
let(:default_message) do
[
@@ -24,6 +25,7 @@ RSpec.describe 'Merge request < User customizes merge commit message', :js do
"See merge request #{merge_request.to_reference(full: true)}"
].join("\n\n")
end
+
let(:message_with_description) do
[
"Merge branch 'feature' into 'master'",
diff --git a/spec/features/merge_request/user_edits_merge_request_spec.rb b/spec/features/merge_request/user_edits_merge_request_spec.rb
index 6c5f508c8c6..364af8d8a76 100644
--- a/spec/features/merge_request/user_edits_merge_request_spec.rb
+++ b/spec/features/merge_request/user_edits_merge_request_spec.rb
@@ -85,13 +85,24 @@ RSpec.describe 'User edits a merge request', :js do
end
end
- it 'changes the target branch' do
- expect(page).to have_content('From master into feature')
+ describe 'changing target branch' do
+ it 'allows user to change target branch' do
+ expect(page).to have_content('From master into feature')
- select2('merge-test', from: '#merge_request_target_branch')
- click_button('Save changes')
+ select2('merge-test', from: '#merge_request_target_branch')
+ click_button('Save changes')
+
+ expect(page).to have_content("Request to merge #{merge_request.source_branch} into merge-test")
+ expect(page).to have_content("changed target branch from #{merge_request.target_branch} to merge-test")
+ end
- expect(page).to have_content("Request to merge #{merge_request.source_branch} into merge-test")
- expect(page).to have_content("changed target branch from #{merge_request.target_branch} to merge-test")
+ describe 'merged merge request' do
+ let(:merge_request) { create(:merge_request, source_project: project, target_project: project, state: :merged) }
+
+ it 'does not allow user to change target branch' do
+ expect(page).to have_content('From master into feature')
+ expect(page).not_to have_selector('.select2-container')
+ end
+ end
end
end
diff --git a/spec/features/merge_request/user_jumps_to_discussion_spec.rb b/spec/features/merge_request/user_jumps_to_discussion_spec.rb
new file mode 100644
index 00000000000..9bded1c5572
--- /dev/null
+++ b/spec/features/merge_request/user_jumps_to_discussion_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User jumps to the next unresolved discussion', :js do
+ let(:project) { create(:project, :repository) }
+ let(:merge_request) do
+ create(:merge_request_with_diffs, source_project: project, target_project: project, source_branch: 'merge-test')
+ end
+
+ let(:user) { create(:user) }
+
+ before do
+ create(:discussion_note, noteable: merge_request, project: project, author: user)
+
+ project.add_maintainer(user)
+ sign_in(user)
+
+ visit(diffs_project_merge_request_path(project, merge_request))
+
+ wait_for_requests
+ end
+
+ it 'jumps to overview tab' do
+ find('.discussion-next-btn').click
+
+ expect(page).to have_css('.notes-tab.active')
+ end
+end
diff --git a/spec/features/merge_request/user_merges_immediately_spec.rb b/spec/features/merge_request/user_merges_immediately_spec.rb
index 47dc09ae79f..0fb081ec507 100644
--- a/spec/features/merge_request/user_merges_immediately_spec.rb
+++ b/spec/features/merge_request/user_merges_immediately_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe 'Merge requests > User merges immediately', :js do
head_pipeline: pipeline,
source_branch: pipeline.ref)
end
+
let(:pipeline) do
create(:ci_pipeline, project: project,
ref: 'master',
diff --git a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
index d5ff31de073..3dc49fb4dea 100644
--- a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
+++ b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js do
title: 'Bug NS-04',
merge_params: { force_remove_source_branch: '1' })
end
+
let(:pipeline) do
create(:ci_pipeline, project: project,
sha: merge_request.diff_head_sha,
@@ -115,6 +116,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js do
merge_user: user,
title: 'MepMep')
end
+
let!(:build) do
create(:ci_build, pipeline: pipeline)
end
@@ -154,7 +156,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js do
context 'view merge request with MWPS enabled but automatically merge fails' do
before do
- merge_request.update(
+ merge_request.update!(
merge_user: merge_request.author,
merge_error: 'Something went wrong.'
)
@@ -173,7 +175,7 @@ RSpec.describe 'Merge request > User merges when pipeline succeeds', :js do
context 'view merge request with MWPS enabled but automatically merge fails' do
before do
- merge_request.update(
+ merge_request.update!(
merge_user: merge_request.author,
merge_error: 'Something went wrong.'
)
diff --git a/spec/features/merge_request/user_posts_diff_notes_spec.rb b/spec/features/merge_request/user_posts_diff_notes_spec.rb
index 6ecffb05009..9556142ecb8 100644
--- a/spec/features/merge_request/user_posts_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_diff_notes_spec.rb
@@ -193,7 +193,7 @@ RSpec.describe 'Merge request > User posts diff notes', :js do
context 'when the MR only supports legacy diff notes' do
before do
- merge_request.merge_request_diff.update(start_commit_sha: nil)
+ merge_request.merge_request_diff.update!(start_commit_sha: nil)
visit diffs_project_merge_request_path(project, merge_request, view: 'inline')
end
diff --git a/spec/features/merge_request/user_posts_notes_spec.rb b/spec/features/merge_request/user_posts_notes_spec.rb
index 3c70819319d..4c079b98c90 100644
--- a/spec/features/merge_request/user_posts_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_notes_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe 'Merge request > User posts notes', :js do
let(:merge_request) do
create(:merge_request, source_project: project, target_project: project)
end
+
let!(:note) do
create(:note_on_merge_request, :with_attachment, noteable: merge_request,
project: project)
@@ -94,20 +95,31 @@ RSpec.describe 'Merge request > User posts notes', :js do
end
end
- describe 'reply on a deleted conversation' do
- before do
- visit project_merge_request_path(project, merge_request)
- end
-
- it 'shows an error message' do
+ describe 'replying to a comment' do
+ it 'makes the discussion resolvable' do
find('.js-reply-button').click
- note.delete
page.within('.discussion-reply-holder') do
fill_in 'note[note]', with: 'A reply'
click_button 'Add comment now'
- wait_for_requests
- expect(page).to have_content('Your comment could not be submitted because discussion to reply to cannot be found')
+
+ expect(page).to have_button('Resolve thread')
+ end
+ end
+
+ context 'when comment is deleted' do
+ it 'shows an error message' do
+ find('.js-reply-button').click
+
+ page.within('.discussion-reply-holder') do
+ fill_in 'note[note]', with: 'A reply'
+
+ note.delete
+
+ click_button 'Add comment now'
+
+ expect(page).to have_content('Your comment could not be submitted because discussion to reply to cannot be found')
+ end
end
end
end
diff --git a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
index aa3840b4376..f2adfd21e49 100644
--- a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
+++ b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
before do
project.add_maintainer(user)
sign_in(user)
- note.destroy
+ note.destroy!
visit_merge_request
end
diff --git a/spec/features/merge_request/user_resolves_wip_mr_spec.rb b/spec/features/merge_request/user_resolves_wip_mr_spec.rb
index 34a3490a152..a9d4c4df507 100644
--- a/spec/features/merge_request/user_resolves_wip_mr_spec.rb
+++ b/spec/features/merge_request/user_resolves_wip_mr_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe 'Merge request > User resolves Work in Progress', :js do
title: 'WIP: Bug NS-04',
merge_params: { force_remove_source_branch: '1' })
end
+
let(:pipeline) do
create(:ci_pipeline, project: project,
sha: merge_request.diff_head_sha,
@@ -32,9 +33,9 @@ RSpec.describe 'Merge request > User resolves Work in Progress', :js do
it 'retains merge request data after clicking Resolve WIP status' do
expect(page.find('.ci-widget-content')).to have_content("Pipeline ##{pipeline.id}")
- expect(page).to have_content "This is a Work in Progress"
+ expect(page).to have_content "This merge request is still a work in progress."
- click_button('Resolve WIP status')
+ click_button('Mark as ready')
wait_for_requests
@@ -42,7 +43,7 @@ RSpec.describe 'Merge request > User resolves Work in Progress', :js do
# merge request widget refreshes, which masks missing elements
# that should already be present.
expect(page.find('.ci-widget-content', wait: 0)).to have_content("Pipeline ##{pipeline.id}")
- expect(page).not_to have_content('This is a Work in Progress')
+ expect(page).not_to have_content('This merge request is still a work in progress.')
end
end
end
diff --git a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
index 415e6b29d5a..7fad805866b 100644
--- a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe 'Merge request > User sees avatars on diff notes', :js do
diff_refs: merge_request.diff_refs
)
end
+
let!(:note) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, position: position) }
before do
diff --git a/spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb b/spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb
index ec2fb856be5..7f4249336fe 100644
--- a/spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb
+++ b/spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe 'Merge request > User cherry-picks', :js do
context 'without a merge commit' do
before do
merge_request.merge_commit_sha = nil
- merge_request.save
+ merge_request.save!
end
it 'does not show a Cherry-pick button' do
diff --git a/spec/features/merge_request/user_sees_closing_issues_message_spec.rb b/spec/features/merge_request/user_sees_closing_issues_message_spec.rb
index baef547a480..d6cdc15005b 100644
--- a/spec/features/merge_request/user_sees_closing_issues_message_spec.rb
+++ b/spec/features/merge_request/user_sees_closing_issues_message_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe 'Merge request > User sees closing issues message', :js do
title: merge_request_title
)
end
+
let(:merge_request_description) { 'Merge Request Description' }
let(:merge_request_title) { 'Merge Request Title' }
diff --git a/spec/features/merge_request/user_sees_discussions_spec.rb b/spec/features/merge_request/user_sees_discussions_spec.rb
index ca8c4f84677..289c861739f 100644
--- a/spec/features/merge_request/user_sees_discussions_spec.rb
+++ b/spec/features/merge_request/user_sees_discussions_spec.rb
@@ -13,8 +13,8 @@ RSpec.describe 'Merge request > User sees threads', :js do
end
describe "Diff discussions" do
- let!(:old_merge_request_diff) { merge_request.merge_request_diffs.create(diff_refs: outdated_diff_refs) }
- let!(:new_merge_request_diff) { merge_request.merge_request_diffs.create }
+ let!(:old_merge_request_diff) { merge_request.merge_request_diffs.create!(diff_refs: outdated_diff_refs) }
+ let!(:new_merge_request_diff) { merge_request.merge_request_diffs.create! }
let!(:outdated_discussion) { create(:diff_note_on_merge_request, project: project, noteable: merge_request, position: outdated_position).to_discussion }
let!(:active_discussion) { create(:diff_note_on_merge_request, noteable: merge_request, project: project).to_discussion }
let(:outdated_position) do
@@ -24,6 +24,7 @@ RSpec.describe 'Merge request > User sees threads', :js do
diff_refs: outdated_diff_refs
)
end
+
let(:outdated_diff_refs) { project.commit("874797c3a73b60d2187ed6e2fcabd289ff75171e").diff_refs }
before do
diff --git a/spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb b/spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb
index cae04dd1693..ac38b2b854c 100644
--- a/spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe 'Merge request > User sees merge button depending on unresolved t
context 'with unresolved threads' do
it 'does not allow to merge' do
expect(page).not_to have_button 'Merge'
- expect(page).to have_content('There are unresolved threads.')
+ expect(page).to have_content('Before this can be merged,')
end
end
diff --git a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
index e2aa10d80dd..7b319f6aff8 100644
--- a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
@@ -8,7 +8,6 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
- let(:enable_mr_tabs_position_flag) { true }
let(:config) do
{
@@ -27,7 +26,6 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
end
before do
- stub_feature_flags(mr_tabs_position: enable_mr_tabs_position_flag)
stub_application_setting(auto_devops_enabled: false)
stub_feature_flags(ci_merge_request_pipeline: true)
stub_ci_pipeline_yaml_file(YAML.dump(config))
@@ -36,7 +34,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
end
context 'when a user created a merge request in the parent project' do
- let(:merge_request) do
+ let!(:merge_request) do
create(:merge_request,
source_project: project,
target_project: project,
@@ -53,7 +51,6 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
Ci::CreatePipelineService.new(project, user, ref: 'feature')
.execute(:merge_request_event, merge_request: merge_request)
end
- let(:enable_mr_tabs_position_flag) { false }
before do
visit project_merge_request_path(project, merge_request)
@@ -70,23 +67,11 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
end
end
- context 'when merge request tabs feature flag is disabled' do
- it 'sees the latest detached merge request pipeline as the head pipeline', :sidekiq_might_not_need_inline do
- page.within('.ci-widget-content') do
- expect(page).to have_content("##{detached_merge_request_pipeline.id}")
- end
- end
- end
-
- context 'when merge request tabs feature flag is enabled' do
- let(:enable_mr_tabs_position_flag) { true }
+ it 'sees the latest detached merge request pipeline as the head pipeline', :sidekiq_might_not_need_inline do
+ click_link "Overview"
- it 'sees the latest detached merge request pipeline as the head pipeline', :sidekiq_might_not_need_inline do
- click_link "Overview"
-
- page.within('.ci-widget-content') do
- expect(page).to have_content("##{detached_merge_request_pipeline.id}")
- end
+ page.within('.ci-widget-content') do
+ expect(page).to have_content("##{detached_merge_request_pipeline.id}")
end
end
@@ -144,6 +129,8 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
end
it 'sees the latest detached merge request pipeline as the head pipeline' do
+ click_link 'Overview'
+
page.within('.ci-widget-content') do
expect(page).to have_content("##{detached_merge_request_pipeline_2.id}")
end
@@ -152,6 +139,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
context 'when a user merges a merge request in the parent project', :sidekiq_might_not_need_inline do
before do
+ click_link 'Overview'
click_button 'Merge when pipeline succeeds'
wait_for_requests
@@ -179,6 +167,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
context 'when branch pipeline succeeds' do
before do
+ click_link 'Overview'
push_pipeline.succeed!
wait_for_requests
@@ -214,6 +203,8 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
end
it 'sees the latest branch pipeline as the head pipeline', :sidekiq_might_not_need_inline do
+ click_link 'Overview'
+
page.within('.ci-widget-content') do
expect(page).to have_content("##{push_pipeline.id}")
end
@@ -260,23 +251,11 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
end
end
- context 'when merge request tabs feature flag is enabled' do
- it 'sees the latest detached merge request pipeline as the head pipeline' do
- click_link "Overview"
+ it 'sees the latest detached merge request pipeline as the head pipeline' do
+ click_link "Overview"
- page.within('.ci-widget-content') do
- expect(page).to have_content("##{detached_merge_request_pipeline.id}")
- end
- end
- end
-
- context 'when merge request tabs feature flag is disabled' do
- let(:enable_mr_tabs_position_flag) { false }
-
- it 'sees the latest detached merge request pipeline as the head pipeline' do
- page.within('.ci-widget-content') do
- expect(page).to have_content("##{detached_merge_request_pipeline.id}")
- end
+ page.within('.ci-widget-content') do
+ expect(page).to have_content("##{detached_merge_request_pipeline.id}")
end
end
diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb
index ce49e9f4141..c7d26dfc814 100644
--- a/spec/features/merge_request/user_sees_merge_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb
@@ -302,7 +302,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
context 'view merge request with MWPS enabled but automatically merge fails' do
before do
- merge_request.update(
+ merge_request.update!(
auto_merge_enabled: true,
auto_merge_strategy: AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS,
merge_user: merge_request.author,
@@ -324,7 +324,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
context 'view merge request with MWPS enabled but automatically merge fails' do
before do
- merge_request.update(
+ merge_request.update!(
merge_when_pipeline_succeeds: true,
merge_user: merge_request.author,
merge_error: 'Something went wrong'
@@ -345,9 +345,9 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
context 'view merge request where fast-forward merge is not possible' do
before do
- project.update(merge_requests_ff_only_enabled: true)
+ project.update!(merge_requests_ff_only_enabled: true)
- merge_request.update(
+ merge_request.update!(
merge_user: merge_request.author,
merge_status: :cannot_be_merged
)
@@ -380,19 +380,19 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
end
end
- context 'user can merge into source project but cannot push to fork', :js do
- let(:fork_project) { create(:project, :public, :repository) }
+ context 'user can merge into target project but cannot push to fork', :js do
+ let(:forked_project) { fork_project(project, nil, repository: true) }
let(:user2) { create(:user) }
before do
project.add_maintainer(user2)
sign_out(:user)
sign_in(user2)
- merge_request.update(target_project: fork_project)
+ merge_request.update!(source_project: forked_project)
visit project_merge_request_path(project, merge_request)
end
- it 'user can merge into the source project' do
+ it 'user can merge into the target project', :sidekiq_inline do
expect(page).to have_button('Merge', disabled: false)
end
@@ -409,7 +409,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
project.add_developer(user2)
sign_out(:user)
sign_in(user2)
- merge_request.update(
+ merge_request.update!(
source_project: forked_project,
target_project: project,
merge_params: { 'force_remove_source_branch' => '1' }
@@ -879,7 +879,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
let!(:pipeline) { create(:ci_pipeline, status: 'success', sha: sha, project: project, ref: merge_request.source_branch) }
before do
- project.update(
+ project.update!(
visibility_level: Gitlab::VisibilityLevel::PUBLIC,
public_builds: false
)
diff --git a/spec/features/merge_request/user_sees_pipelines_from_forked_project_spec.rb b/spec/features/merge_request/user_sees_pipelines_from_forked_project_spec.rb
index 56092da5136..a9fefc89d6c 100644
--- a/spec/features/merge_request/user_sees_pipelines_from_forked_project_spec.rb
+++ b/spec/features/merge_request/user_sees_pipelines_from_forked_project_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe 'Merge request > User sees pipelines from forked project', :js do
target_project: target_project,
description: 'Test merge request')
end
+
let(:pipeline) do
create(:ci_pipeline,
project: forked_project,
diff --git a/spec/features/merge_request/user_sees_pipelines_spec.rb b/spec/features/merge_request/user_sees_pipelines_spec.rb
index d693eec91c8..5d41e49c478 100644
--- a/spec/features/merge_request/user_sees_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_pipelines_spec.rb
@@ -123,14 +123,24 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
context 'when actor is a developer in parent project' do
let(:actor) { developer_in_parent }
- it 'creates a pipeline in the parent project' do
+ it 'creates a pipeline in the parent project when user proceeds with the warning' do
visit project_merge_request_path(parent_project, merge_request)
create_merge_request_pipeline
+ act_on_security_warning(action: 'Run Pipeline')
check_pipeline(expected_project: parent_project)
check_head_pipeline(expected_project: parent_project)
end
+
+ it 'does not create a pipeline in the parent project when user cancels the action' do
+ visit project_merge_request_path(parent_project, merge_request)
+
+ create_merge_request_pipeline
+ act_on_security_warning(action: 'Cancel')
+
+ check_no_pipelines
+ end
end
context 'when actor is a developer in fork project' do
@@ -187,6 +197,19 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
expect(page.find('.pipeline-id')[:href]).to include(expected_project.full_path)
end
end
+
+ def act_on_security_warning(action:)
+ page.within('#create-pipeline-for-fork-merge-request-modal') do
+ expect(page).to have_content('Are you sure you want to run this pipeline?')
+ click_button(action)
+ end
+ end
+
+ def check_no_pipelines
+ page.within('.ci-table') do
+ expect(page).to have_selector('.commit', count: 1)
+ end
+ end
end
describe 'race condition' do
diff --git a/spec/features/merge_request/user_sees_versions_spec.rb b/spec/features/merge_request/user_sees_versions_spec.rb
index 75319c8a22d..60e054ddbee 100644
--- a/spec/features/merge_request/user_sees_versions_spec.rb
+++ b/spec/features/merge_request/user_sees_versions_spec.rb
@@ -5,14 +5,15 @@ require 'spec_helper'
RSpec.describe 'Merge request > User sees versions', :js do
let(:merge_request) do
create(:merge_request).tap do |mr|
- mr.merge_request_diff.destroy
+ mr.merge_request_diff.destroy!
end
end
+
let(:project) { merge_request.source_project }
let(:user) { project.creator }
- let!(:merge_request_diff1) { merge_request.merge_request_diffs.create(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') }
- let!(:merge_request_diff2) { merge_request.merge_request_diffs.create(head_commit_sha: nil) }
- let!(:merge_request_diff3) { merge_request.merge_request_diffs.create(head_commit_sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e') }
+ let!(:merge_request_diff1) { merge_request.merge_request_diffs.create!(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') }
+ let!(:merge_request_diff2) { merge_request.merge_request_diffs.create!(head_commit_sha: nil) }
+ let!(:merge_request_diff3) { merge_request.merge_request_diffs.create!(head_commit_sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e') }
let!(:params) { {} }
before do
diff --git a/spec/features/merge_request/user_sees_wip_help_message_spec.rb b/spec/features/merge_request/user_sees_wip_help_message_spec.rb
index 42fe18cfc93..204df5b3995 100644
--- a/spec/features/merge_request/user_sees_wip_help_message_spec.rb
+++ b/spec/features/merge_request/user_sees_wip_help_message_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User sees WIP help message' do
+RSpec.describe 'Merge request > User sees draft help message' do
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
@@ -11,8 +11,8 @@ RSpec.describe 'Merge request > User sees WIP help message' do
sign_in(user)
end
- context 'with WIP commits' do
- it 'shows a specific WIP hint' do
+ context 'with draft commits' do
+ it 'shows a specific draft hint' do
visit project_new_merge_request_path(
project,
merge_request: {
@@ -24,14 +24,14 @@ RSpec.describe 'Merge request > User sees WIP help message' do
within_wip_explanation do
expect(page).to have_text(
- 'It looks like you have some WIP commits in this branch'
+ 'It looks like you have some draft commits in this branch'
)
end
end
end
- context 'without WIP commits' do
- it 'shows the regular WIP message' do
+ context 'without draft commits' do
+ it 'shows the regular draft message' do
visit project_new_merge_request_path(
project,
merge_request: {
@@ -43,11 +43,11 @@ RSpec.describe 'Merge request > User sees WIP help message' do
within_wip_explanation do
expect(page).not_to have_text(
- 'It looks like you have some WIP commits in this branch'
+ 'It looks like you have some draft commits in this branch'
)
expect(page).to have_text(
- "Start the title with WIP: to prevent a Work In Progress merge \
-request from being merged before it's ready"
+ "Start the title with Draft: or WIP: to prevent a merge request that is a \
+work in progress from being merged before it's ready."
)
end
end
diff --git a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
index 0506d190487..39495832547 100644
--- a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
+++ b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
@@ -22,6 +22,7 @@ RSpec.describe 'User comments on a diff', :js do
let(:merge_request) do
create(:merge_request_with_diffs, source_project: project, target_project: project, source_branch: 'merge-test')
end
+
let(:user) { create(:user) }
before do
diff --git a/spec/features/merge_request/user_tries_to_access_private_project_info_through_new_mr_spec.rb b/spec/features/merge_request/user_tries_to_access_private_project_info_through_new_mr_spec.rb
index b864cb55785..96a1cd81c93 100644
--- a/spec/features/merge_request/user_tries_to_access_private_project_info_through_new_mr_spec.rb
+++ b/spec/features/merge_request/user_tries_to_access_private_project_info_through_new_mr_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe 'Merge Request > User tries to access private project information
name: 'nothing to see here',
repository_access_level: ProjectFeature::PRIVATE)
end
+
let(:owned_project) do
create(:project, :public, :repository,
namespace: current_user.namespace,
diff --git a/spec/features/merge_request/user_views_auto_expanding_diff_spec.rb b/spec/features/merge_request/user_views_auto_expanding_diff_spec.rb
new file mode 100644
index 00000000000..20a5910e66d
--- /dev/null
+++ b/spec/features/merge_request/user_views_auto_expanding_diff_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User views diffs file-by-file', :js do
+ let(:merge_request) do
+ create(:merge_request, source_branch: 'squash-large-files', source_project: project, target_project: project)
+ end
+
+ let(:project) { create(:project, :repository) }
+ let(:user) { create(:user, view_diffs_file_by_file: true) }
+
+ before do
+ allow(Gitlab::Git::Diff).to receive(:size_limit).and_return(100.kilobytes)
+ allow(Gitlab::Git::Diff).to receive(:collapse_limit).and_return(10.kilobytes)
+
+ project.add_developer(user)
+
+ sign_in(user)
+
+ visit(diffs_project_merge_request_path(project, merge_request, anchor: '5091f7b9dd6202e37eaedd73d7b75d82f25fdb61'))
+
+ wait_for_requests
+ end
+
+ it 'shows diffs file-by-file' do
+ page.within('#diffs') do
+ expect(page).not_to have_content('This diff is collapsed')
+
+ click_button 'Next'
+
+ expect(page).not_to have_content('This diff is collapsed')
+ end
+ end
+end
diff --git a/spec/features/merge_request/user_views_diffs_file_by_file_spec.rb b/spec/features/merge_request/user_views_diffs_file_by_file_spec.rb
index c254a142349..abb313cb529 100644
--- a/spec/features/merge_request/user_views_diffs_file_by_file_spec.rb
+++ b/spec/features/merge_request/user_views_diffs_file_by_file_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'User views diffs file-by-file', :js do
let(:merge_request) do
create(:merge_request_with_diffs, source_project: project, target_project: project, source_branch: 'merge-test')
end
+
let(:project) { create(:project, :repository) }
let(:user) { create(:user, view_diffs_file_by_file: true) }
diff --git a/spec/features/merge_request/user_views_diffs_spec.rb b/spec/features/merge_request/user_views_diffs_spec.rb
index 14d10fc1c9f..537c0473fa4 100644
--- a/spec/features/merge_request/user_views_diffs_spec.rb
+++ b/spec/features/merge_request/user_views_diffs_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'User views diffs', :js do
let(:merge_request) do
create(:merge_request_with_diffs, source_project: project, target_project: project, source_branch: 'merge-test')
end
+
let(:project) { create(:project, :public, :repository) }
let(:view) { 'inline' }
diff --git a/spec/features/merge_requests/user_mass_updates_spec.rb b/spec/features/merge_requests/user_mass_updates_spec.rb
index df94fe2cbd0..179bf84a729 100644
--- a/spec/features/merge_requests/user_mass_updates_spec.rb
+++ b/spec/features/merge_requests/user_mass_updates_spec.rb
@@ -37,6 +37,15 @@ RSpec.describe 'Merge requests > User mass updates', :js do
expect(page).to have_selector('.merge-request', count: 0)
end
end
+
+ it 'does not exist in merged state' do
+ merge_request.close
+ visit project_merge_requests_path(project, state: 'merged')
+
+ click_button 'Edit merge requests'
+
+ expect(page).not_to have_css('.js-issue-status')
+ end
end
context 'assignee' do
@@ -86,7 +95,7 @@ RSpec.describe 'Merge requests > User mass updates', :js do
describe 'unset milestone' do
before do
merge_request.milestone = milestone
- merge_request.save
+ merge_request.save!
visit project_merge_requests_path(project)
end
diff --git a/spec/features/merge_requests/user_views_diffs_commit_spec.rb b/spec/features/merge_requests/user_views_diffs_commit_spec.rb
new file mode 100644
index 00000000000..fcaabf9b0e7
--- /dev/null
+++ b/spec/features/merge_requests/user_views_diffs_commit_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User views diff by commit', :js do
+ let(:merge_request) do
+ create(:merge_request_with_diffs, source_project: project, target_project: project, source_branch: 'merge-test')
+ end
+
+ let(:project) { create(:project, :public, :repository) }
+
+ before do
+ stub_feature_flags(diffs_batch_load: false)
+ visit(diffs_project_merge_request_path(project, merge_request, commit_id: merge_request.diff_head_sha))
+ end
+
+ it 'shows full commit description by default' do
+ expect(page).to have_selector('.commit-row-description', visible: true)
+ end
+end
diff --git a/spec/features/populate_new_pipeline_vars_with_params_spec.rb b/spec/features/populate_new_pipeline_vars_with_params_spec.rb
index f931e8497fc..37fea5331a3 100644
--- a/spec/features/populate_new_pipeline_vars_with_params_spec.rb
+++ b/spec/features/populate_new_pipeline_vars_with_params_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe "Populate new pipeline CI variables with url params", :js do
let(:page_path) { new_project_pipeline_path(project) }
before do
+ stub_feature_flags(new_pipeline_form: false)
sign_in(user)
project.add_maintainer(user)
diff --git a/spec/features/profiles/chat_names_spec.rb b/spec/features/profiles/chat_names_spec.rb
index 80b36aa37b8..ca888018cad 100644
--- a/spec/features/profiles/chat_names_spec.rb
+++ b/spec/features/profiles/chat_names_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe 'Profile > Chat' do
let(:params) do
{ team_id: 'T00', team_domain: 'my_chat_team', user_id: 'U01', user_name: 'my_chat_user' }
end
+
let!(:authorize_url) { ChatNames::AuthorizeUserService.new(service, params).execute }
let(:authorize_path) { URI.parse(authorize_url).request_uri }
diff --git a/spec/features/profiles/password_spec.rb b/spec/features/profiles/password_spec.rb
index a274f2b6d96..039966080d8 100644
--- a/spec/features/profiles/password_spec.rb
+++ b/spec/features/profiles/password_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe 'Profile > Password' do
fill_passwords('mypassword', 'mypassword')
page.within('.flash-notice') do
- expect(page).to have_content('Password was successfully updated. Please login with it')
+ expect(page).to have_content('Password was successfully updated. Please sign in again.')
end
end
end
diff --git a/spec/features/profiles/personal_access_tokens_spec.rb b/spec/features/profiles/personal_access_tokens_spec.rb
index 21a0d01a9bf..4438831fb76 100644
--- a/spec/features/profiles/personal_access_tokens_spec.rb
+++ b/spec/features/profiles/personal_access_tokens_spec.rb
@@ -100,14 +100,11 @@ RSpec.describe 'Profile > Personal Access Tokens', :js do
context "when revocation fails" do
it "displays an error message" do
visit profile_personal_access_tokens_path
- allow_any_instance_of(PersonalAccessToken).to receive(:update!).and_return(false)
-
- errors = ActiveModel::Errors.new(PersonalAccessToken.new).tap { |e| e.add(:name, "cannot be nil") }
- allow_any_instance_of(PersonalAccessToken).to receive(:errors).and_return(errors)
+ allow_any_instance_of(PersonalAccessTokens::RevokeService).to receive(:revocation_permitted?).and_return(false)
accept_confirm { click_on "Revoke" }
expect(active_personal_access_tokens).to have_text(personal_access_token.name)
- expect(page).to have_content("Could not revoke")
+ expect(page).to have_content("Not permitted to revoke")
end
end
end
diff --git a/spec/features/profiles/user_edit_preferences_spec.rb b/spec/features/profiles/user_edit_preferences_spec.rb
index 817228edca7..d489d92c524 100644
--- a/spec/features/profiles/user_edit_preferences_spec.rb
+++ b/spec/features/profiles/user_edit_preferences_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe 'User edit preferences profile' do
expect(page).to have_select(
'user[preferred_language]',
selected: 'Spanish - español',
- options: Gitlab::I18n::AVAILABLE_LANGUAGES.values,
+ options: Gitlab::I18n.selectable_locales.values,
visible: :all
)
end
diff --git a/spec/features/profiles/user_visits_profile_preferences_page_spec.rb b/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
index 2747b5894dc..56db7efff51 100644
--- a/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
+++ b/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
@@ -76,13 +76,13 @@ RSpec.describe 'User visits the profile preferences page' do
it 'updates their preference' do
wait_for_requests
- select2('eo', from: '#user_preferred_language')
+ select2('pt_BR', from: '#user_preferred_language')
click_button 'Save'
wait_for_requests
refresh
- expect(page).to have_css('html[lang="eo"]')
+ expect(page).to have_css('html[lang="pt-BR"]')
end
end
diff --git a/spec/features/projects/badges/coverage_spec.rb b/spec/features/projects/badges/coverage_spec.rb
index 4c144037acd..1760ec880bc 100644
--- a/spec/features/projects/badges/coverage_spec.rb
+++ b/spec/features/projects/badges/coverage_spec.rb
@@ -63,7 +63,7 @@ RSpec.describe 'test coverage badge' do
create(:ci_pipeline, opts).tap do |pipeline|
yield pipeline
- pipeline.update_legacy_status
+ ::Ci::ProcessPipelineService.new(pipeline).execute
end
end
diff --git a/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb b/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb
index 6bd6634822c..a65a82fab43 100644
--- a/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb
+++ b/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe 'Blob button line permalinks (BlobLinePermalinkUpdater)', :js do
visit_blob
find("##{ending_fragment}").hover
- find("##{ending_fragment} i").click
+ find("##{ending_fragment} svg").click
expect(find('.js-data-file-blob-permalink-url')['href']).to eq(get_absolute_url(project_blob_path(project, tree_join(sha, path), anchor: ending_fragment)))
end
@@ -100,7 +100,7 @@ RSpec.describe 'Blob button line permalinks (BlobLinePermalinkUpdater)', :js do
visit_blob
find("##{ending_fragment}").hover
- find("##{ending_fragment} i").click
+ find("##{ending_fragment} svg").click
expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), anchor: ending_fragment)))
end
diff --git a/spec/features/projects/ci/lint_spec.rb b/spec/features/projects/ci/lint_spec.rb
index f3845bb8dec..ba063acbe70 100644
--- a/spec/features/projects/ci/lint_spec.rb
+++ b/spec/features/projects/ci/lint_spec.rb
@@ -21,32 +21,48 @@ RSpec.describe 'CI Lint', :js do
end
describe 'YAML parsing' do
- before do
- click_on 'Validate'
- end
+ shared_examples 'validates the YAML' do
+ before do
+ click_on 'Validate'
+ end
- context 'YAML is correct' do
- let(:yaml_content) do
- File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
+ context 'YAML is correct' do
+ let(:yaml_content) do
+ File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml'))
+ end
+
+ it 'parses Yaml and displays the jobs' do
+ expect(page).to have_content('Status: syntax is correct')
+
+ within "table" do
+ aggregate_failures do
+ expect(page).to have_content('Job - rspec')
+ expect(page).to have_content('Job - spinach')
+ expect(page).to have_content('Deploy Job - staging')
+ expect(page).to have_content('Deploy Job - production')
+ end
+ end
+ end
end
- it 'parses Yaml' do
- within "table" do
- expect(page).to have_content('Job - rspec')
- expect(page).to have_content('Job - spinach')
- expect(page).to have_content('Deploy Job - staging')
- expect(page).to have_content('Deploy Job - production')
+ context 'YAML is incorrect' do
+ let(:yaml_content) { 'value: cannot have :' }
+
+ it 'displays information about an error' do
+ expect(page).to have_content('Status: syntax is incorrect')
+ expect(page).to have_selector('.ace_content', text: yaml_content)
end
end
end
- context 'YAML is incorrect' do
- let(:yaml_content) { 'value: cannot have :' }
+ it_behaves_like 'validates the YAML'
- it 'displays information about an error' do
- expect(page).to have_content('Status: syntax is incorrect')
- expect(page).to have_selector('.ace_content', text: yaml_content)
+ context 'when Dry Run is checked' do
+ before do
+ check 'Simulate a pipeline created for the default branch'
end
+
+ it_behaves_like 'validates the YAML'
end
describe 'YAML revalidate' do
diff --git a/spec/features/projects/classification_label_on_project_pages_spec.rb b/spec/features/projects/classification_label_on_project_pages_spec.rb
index 0f07ca7635b..9522e5ce2cf 100644
--- a/spec/features/projects/classification_label_on_project_pages_spec.rb
+++ b/spec/features/projects/classification_label_on_project_pages_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe 'Classification label on project pages' do
let(:project) do
create(:project, external_authorization_classification_label: 'authorized label')
end
+
let(:user) { create(:user) }
before do
diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb
index 2e6a366f77a..63e5546b43c 100644
--- a/spec/features/projects/clusters/gcp_spec.rb
+++ b/spec/features/projects/clusters/gcp_spec.rb
@@ -205,11 +205,10 @@ RSpec.describe 'Gcp Cluster', :js, :do_not_mock_admin_mode do
let(:admin) { create(:admin) }
before do
- stub_feature_flags(instance_level_integrations: false)
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
- visit integrations_admin_application_settings_path
+ visit general_admin_application_settings_path
end
it 'user does not see the offer' do
diff --git a/spec/features/projects/clusters/user_spec.rb b/spec/features/projects/clusters/user_spec.rb
index 15fed0c2727..450eaa7f004 100644
--- a/spec/features/projects/clusters/user_spec.rb
+++ b/spec/features/projects/clusters/user_spec.rb
@@ -73,6 +73,7 @@ RSpec.describe 'User Cluster', :js do
end
it 'user sees a cluster details page' do
+ expect(page).to have_content('GitLab Integration')
expect(page).to have_button('Save changes')
end
diff --git a/spec/features/projects/commit/diff_notes_spec.rb b/spec/features/projects/commit/diff_notes_spec.rb
index ff86047d812..6cebff1cc9a 100644
--- a/spec/features/projects/commit/diff_notes_spec.rb
+++ b/spec/features/projects/commit/diff_notes_spec.rb
@@ -8,30 +8,35 @@ RSpec.describe 'Commit diff', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
- before do
- project.add_maintainer(user)
- sign_in user
+ using RSpec::Parameterized::TableSyntax
+
+ where(:view, :async_diff_file_loading) do
+ 'inline' | true
+ 'inline' | false
+ 'parallel' | true
+ 'parallel' | false
end
- %w(inline parallel).each do |view|
- context "#{view} view" do
- before do
- visit project_commit_path(project, sample_commit.id, view: view)
- end
+ with_them do
+ before do
+ stub_feature_flags(async_commit_diff_files: async_diff_file_loading)
+ project.add_maintainer(user)
+ sign_in user
+ visit project_commit_path(project, sample_commit.id, view: view)
+ end
- it "adds comment to diff" do
- diff_line_num = first('.diff-line-num.new')
+ it "adds comment to diff" do
+ diff_line_num = first('.diff-line-num.new')
- diff_line_num.hover
- diff_line_num.find('.js-add-diff-note-button').click
+ diff_line_num.hover
+ diff_line_num.find('.js-add-diff-note-button').click
- page.within(first('.diff-viewer')) do
- find('.js-note-text').set 'test comment'
+ page.within(first('.diff-viewer')) do
+ find('.js-note-text').set 'test comment'
- click_button 'Comment'
+ click_button 'Comment'
- expect(page).to have_content('test comment')
- end
+ expect(page).to have_content('test comment')
end
end
end
diff --git a/spec/features/projects/commit/mini_pipeline_graph_spec.rb b/spec/features/projects/commit/mini_pipeline_graph_spec.rb
index 9349f36282d..7bd3bce85d5 100644
--- a/spec/features/projects/commit/mini_pipeline_graph_spec.rb
+++ b/spec/features/projects/commit/mini_pipeline_graph_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe 'Mini Pipeline Graph in Commit View', :js do
ref: project.default_branch,
sha: project.commit.sha)
end
+
let(:build) { create(:ci_build, pipeline: pipeline) }
it 'display icon with status' do
@@ -25,6 +26,8 @@ RSpec.describe 'Mini Pipeline Graph in Commit View', :js do
build.run
visit project_commit_path(project, project.commit.id)
+ wait_for_all_requests
+
expect(page).to have_selector('.mr-widget-pipeline-graph')
first('.mini-pipeline-graph-dropdown-toggle').click
diff --git a/spec/features/projects/commit/user_views_user_status_on_commit_spec.rb b/spec/features/projects/commit/user_views_user_status_on_commit_spec.rb
index 71405cf917d..89ff2f4b26d 100644
--- a/spec/features/projects/commit/user_views_user_status_on_commit_spec.rb
+++ b/spec/features/projects/commit/user_views_user_status_on_commit_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe 'Project > Commit > View user status' do
end
end
- describe 'status for a diff note on the commit' do
+ describe 'status for a diff note on the commit', :js do
let(:note) { create(:diff_note_on_commit, project: project) }
it_behaves_like 'showing user status' do
diff --git a/spec/features/projects/commits/user_browses_commits_spec.rb b/spec/features/projects/commits/user_browses_commits_spec.rb
index dee964005a4..596b4773716 100644
--- a/spec/features/projects/commits/user_browses_commits_spec.rb
+++ b/spec/features/projects/commits/user_browses_commits_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe 'User browses commits' do
.and have_selector('ul.breadcrumb a', count: 4)
end
- it 'renders diff links to both the previous and current image' do
+ it 'renders diff links to both the previous and current image', :js do
visit project_commit_path(project, sample_image_commit.id)
links = page.all('.file-actions a')
diff --git a/spec/features/projects/environments/environment_metrics_spec.rb b/spec/features/projects/environments/environment_metrics_spec.rb
index c72f88205b5..8315c821b6d 100644
--- a/spec/features/projects/environments/environment_metrics_spec.rb
+++ b/spec/features/projects/environments/environment_metrics_spec.rb
@@ -28,9 +28,8 @@ RSpec.describe 'Environment > Metrics' do
shared_examples 'has environment selector' do
it 'has a working environment selector', :js do
click_link('See metrics')
- # TODO: See metrics on the sidebar still points to the old metrics URL
- # https://gitlab.com/gitlab-org/gitlab/-/issues/229277
- expect(page).to have_current_path(metrics_project_environment_path(project, id: environment.id))
+
+ expect(page).to have_current_path(project_metrics_dashboard_path(project, environment: environment.id))
expect(page).to have_css('[data-qa-selector="environments_dropdown"]')
within('[data-qa-selector="environments_dropdown"]') do
diff --git a/spec/features/projects/environments/environments_spec.rb b/spec/features/projects/environments/environments_spec.rb
index b3f671d57a9..a05910cd892 100644
--- a/spec/features/projects/environments/environments_spec.rb
+++ b/spec/features/projects/environments/environments_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe 'Environments page', :js do
expect(page).to have_css('.environments-container')
expect(page.all('.environment-name').length).to eq(1)
- expect(page.all('.ic-stop').length).to eq(1)
+ expect(page.all('[data-testid="stop-icon"]').length).to eq(1)
end
end
diff --git a/spec/features/projects/features_visibility_spec.rb b/spec/features/projects/features_visibility_spec.rb
index d84c39de8d8..8d3ca9d9fd1 100644
--- a/spec/features/projects/features_visibility_spec.rb
+++ b/spec/features/projects/features_visibility_spec.rb
@@ -186,7 +186,7 @@ RSpec.describe 'Edit Project Settings' do
click_button "Save changes"
end
- expect(find(".sharing-permissions")).to have_selector(".project-feature-toggle.is-disabled", count: 3)
+ expect(find(".sharing-permissions")).to have_selector(".project-feature-toggle.is-disabled", count: 4)
end
it "shows empty features project homepage" do
diff --git a/spec/features/projects/files/user_browses_files_spec.rb b/spec/features/projects/files/user_browses_files_spec.rb
index e5259bd88be..a6126fbcb33 100644
--- a/spec/features/projects/files/user_browses_files_spec.rb
+++ b/spec/features/projects/files/user_browses_files_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe "User browses files" do
"You're not allowed to make changes to this project directly. "\
"A fork of this project has been created that you can make changes in, so you can submit a merge request."
end
+
let(:project) { create(:project, :repository, name: "Shop") }
let(:project2) { create(:project, :repository, name: "Another Project", path: "another-project") }
let(:tree_path_root_ref) { project_tree_path(project, project.repository.root_ref) }
diff --git a/spec/features/projects/files/user_creates_directory_spec.rb b/spec/features/projects/files/user_creates_directory_spec.rb
index 47c5d667f4f..f2074c78dba 100644
--- a/spec/features/projects/files/user_creates_directory_spec.rb
+++ b/spec/features/projects/files/user_creates_directory_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Projects > Files > User creates a directory', :js do
"You're not allowed to make changes to this project directly. "\
"A fork of this project has been created that you can make changes in, so you can submit a merge request."
end
+
let(:project) { create(:project, :repository) }
let(:project2) { create(:project, :repository, name: 'Another Project', path: 'another-project') }
let(:project2_tree_path_root_ref) { project_tree_path(project2, project2.repository.root_ref) }
diff --git a/spec/features/projects/files/user_creates_files_spec.rb b/spec/features/projects/files/user_creates_files_spec.rb
index 5abc048c135..39bc139656b 100644
--- a/spec/features/projects/files/user_creates_files_spec.rb
+++ b/spec/features/projects/files/user_creates_files_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Projects > Files > User creates files', :js do
"You're not allowed to make changes to this project directly. "\
"A fork of this project has been created that you can make changes in, so you can submit a merge request."
end
+
let(:project) { create(:project, :repository, name: 'Shop') }
let(:project2) { create(:project, :repository, name: 'Another Project', path: 'another-project') }
let(:project_tree_path_root_ref) { project_tree_path(project, project.repository.root_ref) }
diff --git a/spec/features/projects/files/user_deletes_files_spec.rb b/spec/features/projects/files/user_deletes_files_spec.rb
index 4df23b852ff..b6e300e9e59 100644
--- a/spec/features/projects/files/user_deletes_files_spec.rb
+++ b/spec/features/projects/files/user_deletes_files_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'Projects > Files > User deletes files', :js do
"You're not allowed to make changes to this project directly. "\
"A fork of this project has been created that you can make changes in, so you can submit a merge request."
end
+
let(:project) { create(:project, :repository, name: 'Shop') }
let(:project2) { create(:project, :repository, name: 'Another Project', path: 'another-project') }
let(:project_tree_path_root_ref) { project_tree_path(project, project.repository.root_ref) }
diff --git a/spec/features/projects/files/user_replaces_files_spec.rb b/spec/features/projects/files/user_replaces_files_spec.rb
index b11cf732c95..c9b472260bd 100644
--- a/spec/features/projects/files/user_replaces_files_spec.rb
+++ b/spec/features/projects/files/user_replaces_files_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe 'Projects > Files > User replaces files', :js do
"You're not allowed to make changes to this project directly. "\
"A fork of this project has been created that you can make changes in, so you can submit a merge request."
end
+
let(:project) { create(:project, :repository, name: 'Shop') }
let(:project2) { create(:project, :repository, name: 'Another Project', path: 'another-project') }
let(:project_tree_path_root_ref) { project_tree_path(project, project.repository.root_ref) }
diff --git a/spec/features/projects/fork_spec.rb b/spec/features/projects/fork_spec.rb
index f0ed4013230..1e84d1552a1 100644
--- a/spec/features/projects/fork_spec.rb
+++ b/spec/features/projects/fork_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'Project fork' do
it 'allows user to fork project' do
visit project_path(project)
- expect(page).not_to have_css('a.disabled', text: 'Fork')
+ expect(page).not_to have_css('a.disabled', text: 'Select')
end
it 'disables fork button when user has exceeded project limit' do
@@ -40,7 +40,7 @@ RSpec.describe 'Project fork' do
visit project_path(project)
expect(page).to have_css('a', text: 'Fork')
- expect(page).not_to have_css('a.disabled', text: 'Fork')
+ expect(page).not_to have_css('a.disabled', text: 'Select')
end
it 'renders new project fork page' do
@@ -116,7 +116,7 @@ RSpec.describe 'Project fork' do
click_link 'Fork'
page.within '.fork-thumbnail-container' do
- click_link user.name
+ click_link 'Select'
end
expect(page).to have_content 'Forked from'
@@ -156,7 +156,7 @@ RSpec.describe 'Project fork' do
click_link 'Fork'
page.within '.fork-thumbnail-container' do
- click_link user.name
+ click_link 'Select'
end
visit project_forks_path(project)
@@ -193,7 +193,7 @@ RSpec.describe 'Project fork' do
click_link 'Fork'
page.within '.fork-thumbnail-container' do
- click_link user.name
+ click_link 'Select'
end
visit project_forks_path(project)
@@ -218,7 +218,7 @@ RSpec.describe 'Project fork' do
click_link 'Fork'
page.within '.fork-thumbnail-container' do
- click_link user.name
+ click_link 'Select'
end
expect(page).to have_content "Name has already been taken"
@@ -232,39 +232,43 @@ RSpec.describe 'Project fork' do
group.add_maintainer(user)
end
- it 'allows user to fork project to group or to user namespace' do
+ it 'allows user to fork project to group or to user namespace', :js do
visit project_path(project)
+ wait_for_requests
expect(page).not_to have_css('a.disabled', text: 'Fork')
click_link 'Fork'
- expect(page).to have_css('.fork-thumbnail', count: 2)
+ expect(page).to have_css('.fork-thumbnail')
+ expect(page).to have_css('.group-row')
expect(page).not_to have_css('.fork-thumbnail.disabled')
end
- it 'allows user to fork project to group and not user when exceeded project limit' do
+ it 'allows user to fork project to group and not user when exceeded project limit', :js do
user.projects_limit = 0
user.save!
visit project_path(project)
+ wait_for_requests
expect(page).not_to have_css('a.disabled', text: 'Fork')
click_link 'Fork'
- expect(page).to have_css('.fork-thumbnail', count: 2)
expect(page).to have_css('.fork-thumbnail.disabled')
+ expect(page).to have_css('.group-row')
end
- it 'links to the fork if the project was already forked within that namespace', :sidekiq_might_not_need_inline do
+ it 'links to the fork if the project was already forked within that namespace', :sidekiq_might_not_need_inline, :js do
forked_project = fork_project(project, user, namespace: group, repository: true)
visit new_project_fork_path(project)
+ wait_for_requests
- expect(page).to have_css('div.forked', text: group.full_name)
+ expect(page).to have_css('.group-row a.btn', text: 'Go to fork')
- click_link group.full_name
+ click_link 'Go to fork'
expect(current_path).to eq(project_path(forked_project))
end
diff --git a/spec/features/projects/import_export/export_file_spec.rb b/spec/features/projects/import_export/export_file_spec.rb
index 86aeb2bc80c..7f8ded4fa43 100644
--- a/spec/features/projects/import_export/export_file_spec.rb
+++ b/spec/features/projects/import_export/export_file_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe 'Import/Export - project export integration test', :js do
key: [Project, Ci::Variable, :yaml_variables]
}
end
+
let(:safe_hashes) { { yaml_variables: %w[key value public] } }
let(:project) { setup_project }
diff --git a/spec/features/projects/issues/design_management/user_uploads_designs_spec.rb b/spec/features/projects/issues/design_management/user_uploads_designs_spec.rb
index 8070fee5804..29a27992a0d 100644
--- a/spec/features/projects/issues/design_management/user_uploads_designs_spec.rb
+++ b/spec/features/projects/issues/design_management/user_uploads_designs_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe 'User uploads new design', :js do
include DesignManagementTestHelpers
- let_it_be(:project) { create(:project_empty_repo, :public) }
- let_it_be(:user) { project.owner }
- let_it_be(:issue) { create(:issue, project: project) }
+ let(:project) { create(:project_empty_repo, :public) }
+ let(:user) { project.owner }
+ let(:issue) { create(:issue, project: project) }
before do
sign_in(user)
@@ -28,7 +28,7 @@ RSpec.describe 'User uploads new design', :js do
let(:feature_enabled) { true }
it 'uploads designs' do
- attach_file(:design_file, logo_fixture, make_visible: true)
+ upload_design(logo_fixture, count: 1)
expect(page).to have_selector('.js-design-list-item', count: 1)
@@ -36,9 +36,12 @@ RSpec.describe 'User uploads new design', :js do
expect(page).to have_content('dk.png')
end
- attach_file(:design_file, gif_fixture, make_visible: true)
+ upload_design(gif_fixture, count: 2)
+ # Known bug in the legacy implementation: new designs are inserted
+ # in the beginning on the frontend.
expect(page).to have_selector('.js-design-list-item', count: 2)
+ expect(page.all('.js-design-list-item').map(&:text)).to eq(['banana_sample.gif', 'dk.png'])
end
end
@@ -61,8 +64,8 @@ RSpec.describe 'User uploads new design', :js do
context "when the feature is available" do
let(:feature_enabled) { true }
- it 'uploads designs', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/225616' do
- attach_file(:design_file, logo_fixture, make_visible: true)
+ it 'uploads designs' do
+ upload_design(logo_fixture, count: 1)
expect(page).to have_selector('.js-design-list-item', count: 1)
@@ -70,9 +73,10 @@ RSpec.describe 'User uploads new design', :js do
expect(page).to have_content('dk.png')
end
- attach_file(:design_file, gif_fixture, make_visible: true)
+ upload_design(gif_fixture, count: 2)
expect(page).to have_selector('.js-design-list-item', count: 2)
+ expect(page.all('.js-design-list-item').map(&:text)).to eq(['dk.png', 'banana_sample.gif'])
end
end
@@ -92,4 +96,12 @@ RSpec.describe 'User uploads new design', :js do
def gif_fixture
Rails.root.join('spec', 'fixtures', 'banana_sample.gif')
end
+
+ def upload_design(fixture, count:)
+ attach_file(:design_file, fixture, match: :first, make_visible: true)
+
+ wait_for('designs uploaded') do
+ issue.reload.designs.count == count
+ end
+ end
end
diff --git a/spec/features/projects/issues/viewing_issues_with_external_authorization_enabled_spec.rb b/spec/features/projects/issues/viewing_issues_with_external_authorization_enabled_spec.rb
index 6feefff9207..b423543dc33 100644
--- a/spec/features/projects/issues/viewing_issues_with_external_authorization_enabled_spec.rb
+++ b/spec/features/projects/issues/viewing_issues_with_external_authorization_enabled_spec.rb
@@ -11,33 +11,39 @@ RSpec.describe 'viewing an issue with cross project references' do
create(:project, :public,
external_authorization_classification_label: 'other_label')
end
+
let(:other_issue) do
create(:issue, :closed,
title: 'I am in another project',
project: other_project)
end
+
let(:other_confidential_issue) do
create(:issue, :confidential, :closed,
title: 'I am in another project and confidential',
project: other_project)
end
+
let(:other_merge_request) do
create(:merge_request, :closed,
title: 'I am a merge request in another project',
source_project: other_project)
end
+
let(:description_referencing_other_issue) do
"Referencing: #{other_issue.to_reference(project)}, "\
"a confidential issue #{confidential_issue.to_reference}, "\
"a cross project confidential issue #{other_confidential_issue.to_reference(project)}, and "\
"a cross project merge request #{other_merge_request.to_reference(project)}"
end
+
let(:project) { create(:project) }
let(:issue) do
create(:issue,
project: project,
description: description_referencing_other_issue )
end
+
let(:confidential_issue) do
create(:issue, :confidential, :closed,
title: "I am in the same project and confidential",
diff --git a/spec/features/projects/jobs/user_browses_job_spec.rb b/spec/features/projects/jobs/user_browses_job_spec.rb
index 67299e852b3..b935b99642b 100644
--- a/spec/features/projects/jobs/user_browses_job_spec.rb
+++ b/spec/features/projects/jobs/user_browses_job_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe 'User browses a job', :js do
# scroll to the top of the page first
execute_script "window.scrollTo(0,0)"
- accept_confirm { find('.js-erase-link').click }
+ accept_confirm { find('[data-testid="job-log-erase-link"]').click }
expect(page).to have_no_css('.artifacts')
expect(build).not_to have_trace
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index fc005dd4718..0a6f204454e 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -282,7 +282,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
href = new_project_issue_path(project, options)
page.within('.build-sidebar') do
- expect(find('.js-new-issue')['href']).to include(href)
+ expect(find('[data-testid="job-new-issue"]')['href']).to include(href)
end
end
end
@@ -425,7 +425,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
it do
wait_for_all_requests
- expect(page).to have_css('.js-raw-link-controller')
+ expect(page).to have_css('[data-testid="job-raw-link-controller"]')
end
end
@@ -875,7 +875,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
visit project_job_path(project, job)
wait_for_requests
- page.within('.js-job-erased-block') do
+ page.within('[data-testid="job-erased-block"]') do
expect(page).to have_content('Job has been erased')
end
end
@@ -888,7 +888,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
visit project_job_path(project, job)
wait_for_requests
- expect(page).not_to have_css('.js-job-erased-block')
+ expect(page).not_to have_css('[data-testid="job-erased-block"]')
end
end
@@ -901,8 +901,8 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
visit project_job_path(project, job)
wait_for_requests
- expect(page).to have_css('.js-job-sidebar.right-sidebar-collapsed', visible: false)
- expect(page).not_to have_css('.js-job-sidebar.right-sidebar-expanded', visible: false)
+ expect(page).to have_css('[data-testid="job-sidebar"].right-sidebar-collapsed', visible: false)
+ expect(page).not_to have_css('[data-testid="job-sidebar"].right-sidebar-expanded', visible: false)
end
end
@@ -913,8 +913,8 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
visit project_job_path(project, job)
wait_for_requests
- expect(page).to have_css('.js-job-sidebar.right-sidebar-expanded')
- expect(page).not_to have_css('.js-job-sidebar.right-sidebar-collapsed')
+ expect(page).to have_css('[data-testid="job-sidebar"].right-sidebar-expanded')
+ expect(page).not_to have_css('[data-testid="job-sidebar"].right-sidebar-collapsed')
end
end
@@ -929,7 +929,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
let(:job) { create(:ci_build, :pending, pipeline: pipeline, runner: runner) }
it 'renders message about job being stuck because no runners are active' do
- expect(page).to have_css('.js-stuck-no-active-runner')
+ expect(page).to have_selector('[data-testid="job-stuck-no-active-runners"]')
expect(page).to have_content("This job is stuck because you don't have any active runners that can run this job.")
end
end
@@ -939,7 +939,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
let(:job) { create(:ci_build, :pending, pipeline: pipeline, runner: runner, tag_list: %w(docker linux)) }
it 'renders message about job being stuck because of no runners with the specified tags' do
- expect(page).to have_css('.js-stuck-with-tags')
+ expect(page).to have_selector('[data-testid="job-stuck-with-tags"')
expect(page).to have_content("This job is stuck because you don't have any active runners online or available with any of these tags assigned to them:")
end
end
@@ -949,7 +949,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
let(:job) { create(:ci_build, :pending, pipeline: pipeline, runner: runner, tag_list: %w(docker linux)) }
it 'renders message about job being stuck because of no runners with the specified tags' do
- expect(page).to have_css('.js-stuck-with-tags')
+ expect(page).to have_selector('[data-testid="job-stuck-with-tags"')
expect(page).to have_content("This job is stuck because you don't have any active runners online or available with any of these tags assigned to them:")
end
end
@@ -957,8 +957,8 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
context 'without any runners available' do
let(:job) { create(:ci_build, :pending, pipeline: pipeline) }
- it 'renders message about job being stuck because not runners are available' do
- expect(page).to have_css('.js-stuck-no-active-runner')
+ it 'renders message about job being stuck because no runners are available' do
+ expect(page).to have_selector('[data-testid="job-stuck-no-active-runners"]')
expect(page).to have_content("This job is stuck because you don't have any active runners that can run this job.")
end
end
@@ -968,7 +968,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
let(:job) { create(:ci_build, :pending, pipeline: pipeline, runner: runner) }
it 'renders message about job being stuck because runners are offline' do
- expect(page).to have_css('.js-stuck-no-runners')
+ expect(page).to have_selector('[data-testid="job-stuck-no-runners"')
expect(page).to have_content("This job is stuck because the project doesn't have any runners online assigned to it.")
end
end
diff --git a/spec/features/projects/members/list_spec.rb b/spec/features/projects/members/list_spec.rb
index 56b807e08d7..b32ccb0ccef 100644
--- a/spec/features/projects/members/list_spec.rb
+++ b/spec/features/projects/members/list_spec.rb
@@ -65,7 +65,7 @@ RSpec.describe 'Project members list' do
visit_members_page
# Open modal
- find(:css, 'li.project_member', text: other_user.name).find(:css, 'button.btn-remove').click
+ find(:css, 'li.project_member', text: other_user.name).find(:css, 'button.btn-danger').click
expect(page).to have_unchecked_field 'Also unassign this user from related issues and merge requests'
diff --git a/spec/features/projects/members/master_manages_access_requests_spec.rb b/spec/features/projects/members/master_manages_access_requests_spec.rb
index 4c3eaa93352..2fdc75dca91 100644
--- a/spec/features/projects/members/master_manages_access_requests_spec.rb
+++ b/spec/features/projects/members/master_manages_access_requests_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe 'Projects > Members > Maintainer manages access requests' do
it_behaves_like 'Maintainer manages access requests' do
+ let(:has_tabs) { false }
let(:entity) { create(:project, :public) }
let(:members_page_path) { project_project_members_path(entity) }
end
diff --git a/spec/features/projects/navbar_spec.rb b/spec/features/projects/navbar_spec.rb
index 22cd832ff06..dcb901bcf11 100644
--- a/spec/features/projects/navbar_spec.rb
+++ b/spec/features/projects/navbar_spec.rb
@@ -12,6 +12,10 @@ RSpec.describe 'Project navbar' do
let_it_be(:project) { create(:project, :repository) }
before do
+ stub_feature_flags(project_iterations: false)
+
+ insert_package_nav(_('Operations'))
+
project.add_maintainer(user)
sign_in(user)
end
@@ -58,13 +62,8 @@ RSpec.describe 'Project navbar' do
before do
stub_config(registry: { enabled: true })
- insert_after_nav_item(
- _('Operations'),
- new_nav_item: {
- nav_item: _('Packages & Registries'),
- nav_sub_items: [_('Container Registry')]
- }
- )
+ insert_container_nav(_('Operations'))
+
visit project_path(project)
end
diff --git a/spec/features/projects/package_files_spec.rb b/spec/features/projects/package_files_spec.rb
new file mode 100644
index 00000000000..bea9a9929b9
--- /dev/null
+++ b/spec/features/projects/package_files_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'PackageFiles' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let!(:package) { create(:maven_package, project: project) }
+ let!(:package_file) { package.package_files.first }
+
+ before do
+ sign_in(user)
+ end
+
+ context 'user with master role' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'allows direct download by url' do
+ visit download_project_package_file_path(project, package_file)
+
+ expect(status_code).to eq(200)
+ end
+
+ it 'renders the download link with the correct url', :js do
+ visit project_package_path(project, package)
+
+ download_url = download_project_package_file_path(project, package_file)
+
+ expect(page).to have_link(package_file.file_name, href: download_url)
+ end
+
+ it 'does not allow download of package belonging to different project' do
+ another_package = create(:maven_package)
+ another_file = another_package.package_files.first
+
+ visit download_project_package_file_path(project, another_file)
+
+ expect(status_code).to eq(404)
+ end
+ end
+
+ it 'does not allow direct download when no access to the project' do
+ visit download_project_package_file_path(project, package_file)
+
+ expect(status_code).to eq(404)
+ end
+
+ it 'gives 404 when no package file exist' do
+ visit download_project_package_file_path(project, non_existing_record_id)
+
+ expect(status_code).to eq(404)
+ end
+end
diff --git a/spec/features/projects/packages_spec.rb b/spec/features/projects/packages_spec.rb
new file mode 100644
index 00000000000..e5c684bdff5
--- /dev/null
+++ b/spec/features/projects/packages_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Packages' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ before do
+ sign_in(user)
+ project.add_maintainer(user)
+ end
+
+ context 'when feature is not available' do
+ context 'packages feature is disabled by config' do
+ before do
+ allow(Gitlab.config.packages).to receive(:enabled).and_return(false)
+ end
+
+ it 'gives 404' do
+ visit_project_packages
+
+ expect(status_code).to eq(404)
+ end
+ end
+ end
+
+ context 'when feature is available', :js do
+ before do
+ visit_project_packages
+ end
+
+ context 'when there are packages' do
+ let_it_be(:conan_package) { create(:conan_package, project: project, name: 'zzz', created_at: 1.day.ago, version: '1.0.0') }
+ let_it_be(:maven_package) { create(:maven_package, project: project, name: 'aaa', created_at: 2.days.ago, version: '2.0.0') }
+ let_it_be(:packages) { [conan_package, maven_package] }
+
+ it_behaves_like 'packages list'
+
+ it_behaves_like 'package details link'
+
+ context 'deleting a package' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:package) { create(:package, project: project) }
+
+ it 'allows you to delete a package' do
+ first('[title="Remove package"]').click
+ click_button('Delete package')
+
+ expect(page).to have_content 'Package deleted successfully'
+ expect(page).not_to have_content(package.name)
+ end
+ end
+
+ it_behaves_like 'shared package sorting' do
+ let_it_be(:package_one) { maven_package }
+ let_it_be(:package_two) { conan_package }
+ end
+ end
+
+ it_behaves_like 'when there are no packages'
+ end
+
+ def visit_project_packages
+ visit project_packages_path(project)
+ end
+end
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index 2ca584ab8f6..26c46190e7d 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -137,6 +137,7 @@ RSpec.describe 'Pipeline', :js do
source_project: project,
source_branch: pipeline.ref)
end
+
let!(:merge_request2) do
create(:merge_request,
source_project: project,
@@ -363,66 +364,29 @@ RSpec.describe 'Pipeline', :js do
describe 'test tabs' do
let(:pipeline) { create(:ci_pipeline, :with_test_reports, :with_report_results, project: project) }
- context 'with build_report_summary feature flag disabled' do
- before do
- stub_feature_flags(build_report_summary: false)
- visit_pipeline
- wait_for_requests
- end
-
- context 'with test reports' do
- it 'shows badge counter in Tests tab' do
- expect(pipeline.test_reports.total_count).to eq(4)
- expect(page.find('.js-test-report-badge-counter').text).to eq(pipeline.test_reports.total_count.to_s)
- end
-
- it 'does not call test_report.json endpoint by default', :js do
- expect(page).to have_selector('.js-no-tests-to-show', visible: :all)
- end
-
- it 'does call test_report.json endpoint when tab is selected', :js do
- find('.js-tests-tab-link').click
- wait_for_requests
-
- expect(page).to have_content('Jobs')
- expect(page).to have_selector('.js-tests-detail', visible: :all)
- end
- end
-
- context 'without test reports' do
- let(:pipeline) { create(:ci_pipeline, project: project) }
-
- it 'shows zero' do
- expect(page.find('.js-test-report-badge-counter', visible: :all).text).to eq("0")
- end
- end
+ before do
+ visit_pipeline
+ wait_for_requests
end
- context 'with build_report_summary feature flag enabled' do
- before do
- visit_pipeline
- wait_for_requests
+ context 'with test reports' do
+ it 'shows badge counter in Tests tab' do
+ expect(page.find('.js-test-report-badge-counter').text).to eq(pipeline.test_report_summary.total[:count].to_s)
end
- context 'with test reports' do
- it 'shows badge counter in Tests tab' do
- expect(page.find('.js-test-report-badge-counter').text).to eq(pipeline.test_report_summary.total_count.to_s)
- end
-
- it 'calls summary.json endpoint', :js do
- find('.js-tests-tab-link').click
+ it 'calls summary.json endpoint', :js do
+ find('.js-tests-tab-link').click
- expect(page).to have_content('Jobs')
- expect(page).to have_selector('.js-tests-detail', visible: :all)
- end
+ expect(page).to have_content('Jobs')
+ expect(page).to have_selector('.js-tests-detail', visible: :all)
end
+ end
- context 'without test reports' do
- let(:pipeline) { create(:ci_pipeline, project: project) }
+ context 'without test reports' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
- it 'shows zero' do
- expect(page.find('.js-test-report-badge-counter', visible: :all).text).to eq("0")
- end
+ it 'shows zero' do
+ expect(page.find('.js-test-report-badge-counter', visible: :all).text).to eq("0")
end
end
end
@@ -436,7 +400,7 @@ RSpec.describe 'Pipeline', :js do
context 'when retrying' do
before do
- find('.js-retry-button').click
+ find('[data-testid="retryButton"]').click
end
it 'does not show a "Retry" button', :sidekiq_might_not_need_inline do
@@ -938,7 +902,7 @@ RSpec.describe 'Pipeline', :js do
context 'when retrying' do
before do
- find('.js-retry-button').click
+ find('[data-testid="retryButton"]').click
end
it 'does not show a "Retry" button', :sidekiq_might_not_need_inline do
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index 0eb92f3e679..8747b3ab54c 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -652,6 +652,7 @@ RSpec.describe 'Pipelines', :js do
let(:project) { create(:project, :repository) }
before do
+ stub_feature_flags(new_pipeline_form: false)
visit new_project_pipeline_path(project)
end
@@ -718,6 +719,7 @@ RSpec.describe 'Pipelines', :js do
let(:project) { create(:project, :repository) }
before do
+ stub_feature_flags(new_pipeline_form: false)
visit new_project_pipeline_path(project)
end
diff --git a/spec/features/projects/product_analytics/events_spec.rb b/spec/features/projects/product_analytics/events_spec.rb
new file mode 100644
index 00000000000..12f1c4d291a
--- /dev/null
+++ b/spec/features/projects/product_analytics/events_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Product Analytics > Events' do
+ let_it_be(:project) { create(:project_empty_repo) }
+ let_it_be(:user) { create(:user) }
+ let(:event) { create(:product_analytics_event, project: project) }
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ end
+
+ it 'shows no events message' do
+ visit(project_product_analytics_path(project))
+
+ expect(page).to have_content('There are currently no events')
+ end
+
+ it 'shows events' do
+ event
+
+ visit(project_product_analytics_path(project))
+
+ expect(page).to have_content('dvce_created_tstamp')
+ expect(page).to have_content(event.event_id)
+ end
+end
diff --git a/spec/features/projects/product_analytics/graphs_spec.rb b/spec/features/projects/product_analytics/graphs_spec.rb
new file mode 100644
index 00000000000..e2293893589
--- /dev/null
+++ b/spec/features/projects/product_analytics/graphs_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Product Analytics > Graphs' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ end
+
+ it 'shows graphs', :js do
+ create(:product_analytics_event, project: project)
+
+ visit(graphs_project_product_analytics_path(project))
+
+ expect(page).to have_content('Showing graphs based on events')
+ expect(page).to have_content('platform')
+ expect(page).to have_content('os_timezone')
+ expect(page).to have_content('br_lang')
+ expect(page).to have_content('doc_charset')
+ end
+end
diff --git a/spec/features/projects/product_analytics/setup_spec.rb b/spec/features/projects/product_analytics/setup_spec.rb
new file mode 100644
index 00000000000..45c2b67502c
--- /dev/null
+++ b/spec/features/projects/product_analytics/setup_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Product Analytics > Setup' do
+ let_it_be(:project) { create(:project_empty_repo) }
+ let_it_be(:user) { create(:user) }
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ end
+
+ it 'shows the setup instructions' do
+ visit(setup_project_product_analytics_path(project))
+
+ expect(page).to have_content('Copy the code below to implement tracking in your application')
+ end
+end
diff --git a/spec/features/projects/product_analytics/test_spec.rb b/spec/features/projects/product_analytics/test_spec.rb
new file mode 100644
index 00000000000..8984fb409d1
--- /dev/null
+++ b/spec/features/projects/product_analytics/test_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Product Analytics > Test' do
+ let_it_be(:project) { create(:project_empty_repo) }
+ let_it_be(:user) { create(:user) }
+
+ before do
+ project.add_maintainer(user)
+ sign_in(user)
+ end
+
+ it 'says it sends a payload' do
+ visit(test_project_product_analytics_path(project))
+
+ expect(page).to have_content('This page sends a payload.')
+ end
+
+ it 'shows the last event if there is one' do
+ event = create(:product_analytics_event, project: project)
+
+ visit(test_project_product_analytics_path(project))
+
+ expect(page).to have_content(event.event_id)
+ end
+end
diff --git a/spec/features/projects/settings/operations_settings_spec.rb b/spec/features/projects/settings/operations_settings_spec.rb
index 878794bd897..de7251db5c9 100644
--- a/spec/features/projects/settings/operations_settings_spec.rb
+++ b/spec/features/projects/settings/operations_settings_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe 'Projects > Settings > For a forked project', :js do
end
it 'renders form for incident management' do
- expect(page).to have_selector('h3', text: 'Incidents')
+ expect(page).to have_selector('h4', text: 'Incidents')
end
it 'sets correct default values' do
@@ -46,11 +46,14 @@ RSpec.describe 'Projects > Settings > For a forked project', :js do
it 'updates form values' do
check(create_issue)
uncheck(send_email)
+ click_on('No template selected')
+ click_on('bug')
save_form
click_expand_incident_management_button
expect(find_field(create_issue)).to be_checked
+ expect(page).to have_selector(:id, 'alert-integration-settings-issue-template', text: 'bug')
expect(find_field(send_email)).not_to be_checked
end
diff --git a/spec/features/projects/settings/packages_settings_spec.rb b/spec/features/projects/settings/packages_settings_spec.rb
new file mode 100644
index 00000000000..0b40cbee582
--- /dev/null
+++ b/spec/features/projects/settings/packages_settings_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Projects > Settings > Packages', :js do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ project.add_maintainer(user)
+ end
+
+ context 'Packages enabled in config' do
+ before do
+ allow(Gitlab.config.packages).to receive(:enabled).and_return(true)
+ end
+
+ it 'displays the packages toggle button' do
+ visit edit_project_path(project)
+
+ expect(page).to have_content('Packages')
+ expect(page).to have_selector('input[name="project[packages_enabled]"] + button', visible: true)
+ end
+ end
+
+ context 'Packages disabled in config' do
+ before do
+ allow(Gitlab.config.packages).to receive(:enabled).and_return(false)
+ end
+
+ it 'does not show up in UI' do
+ visit edit_project_path(project)
+
+ expect(page).not_to have_content('Packages')
+ end
+ end
+end
diff --git a/spec/features/projects/show/user_manages_notifications_spec.rb b/spec/features/projects/show/user_manages_notifications_spec.rb
index 58a2c793b7b..9d9a75c22be 100644
--- a/spec/features/projects/show/user_manages_notifications_spec.rb
+++ b/spec/features/projects/show/user_manages_notifications_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe 'Projects > Show > User manages notifications', :js do
click_notifications_button
expect(find('.update-notification.is-active')).to have_content('On mention')
- expect(find('.notifications-icon use')[:'xlink:href']).to end_with('#notifications')
+ expect(page).to have_css('.notifications-icon[data-testid="notifications-icon"]')
end
it 'changes the notification setting to disabled' do
@@ -32,7 +32,7 @@ RSpec.describe 'Projects > Show > User manages notifications', :js do
wait_for_requests
- expect(find('.notifications-icon use')[:'xlink:href']).to end_with('#notifications-off')
+ expect(page).to have_css('.notifications-icon[data-testid="notifications-off-icon"]')
end
context 'custom notification settings' do
@@ -52,7 +52,8 @@ RSpec.describe 'Projects > Show > User manages notifications', :js do
:merge_merge_request,
:failed_pipeline,
:fixed_pipeline,
- :success_pipeline
+ :success_pipeline,
+ :moved_project
]
end
@@ -67,20 +68,6 @@ RSpec.describe 'Projects > Show > User manages notifications', :js do
end
end
end
-
- context 'when ci_pipeline_fixed_notifications is disabled' do
- before do
- stub_feature_flags(ci_pipeline_fixed_notifications: false)
- end
-
- it 'hides fixed_pipeline checkbox' do
- visit project_path(project)
- click_notifications_button
- page.find('a[data-notification-level="custom"]').click
-
- expect(page).not_to have_selector("input[name='notification_setting[fixed_pipeline]']")
- end
- end
end
context 'when project emails are disabled' do
diff --git a/spec/features/projects/snippets/create_snippet_spec.rb b/spec/features/projects/snippets/create_snippet_spec.rb
index 73d033cbdb8..3db870f229a 100644
--- a/spec/features/projects/snippets/create_snippet_spec.rb
+++ b/spec/features/projects/snippets/create_snippet_spec.rb
@@ -2,9 +2,28 @@
require 'spec_helper'
-RSpec.shared_examples_for 'snippet editor' do
+RSpec.describe 'Projects > Snippets > Create Snippet', :js do
+ include DropzoneHelper
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) do
+ create(:project, :public, creator: user).tap do |p|
+ p.add_maintainer(user)
+ end
+ end
+
+ let(:title) { 'My Snippet Title' }
+ let(:file_content) { 'Hello World!' }
+ let(:md_description) { 'My Snippet **Description**' }
+ let(:description) { 'My Snippet Description' }
+
before do
+ stub_feature_flags(snippets_vue: false)
stub_feature_flags(snippets_edit_vue: false)
+
+ sign_in(user)
+
+ visit new_project_snippet_path(project)
end
def description_field
@@ -12,137 +31,81 @@ RSpec.shared_examples_for 'snippet editor' do
end
def fill_form
- fill_in 'project_snippet_title', with: 'My Snippet Title'
+ fill_in 'project_snippet_title', with: title
# Click placeholder first to expand full description field
description_field.click
- fill_in 'project_snippet_description', with: 'My Snippet **Description**'
+ fill_in 'project_snippet_description', with: md_description
page.within('.file-editor') do
el = find('.inputarea')
- el.send_keys 'Hello World!'
+ el.send_keys file_content
end
end
- context 'when a user is authenticated' do
- before do
- stub_feature_flags(snippets_vue: false)
- project.add_maintainer(user)
- sign_in(user)
+ it 'shows collapsible description input' do
+ collapsed = description_field
- visit project_snippets_path(project)
-
- # Wait for the SVG to ensure the button location doesn't shift
- within('.empty-state') { find('img.js-lazy-loaded') }
- click_on('New snippet')
- wait_for_requests
- end
+ expect(page).not_to have_field('project_snippet_description')
+ expect(collapsed).to be_visible
- it 'shows collapsible description input' do
- collapsed = description_field
+ collapsed.click
- expect(page).not_to have_field('project_snippet_description')
- expect(collapsed).to be_visible
+ expect(page).to have_field('project_snippet_description')
+ expect(collapsed).not_to be_visible
+ end
- collapsed.click
+ it 'creates a new snippet' do
+ fill_form
+ click_button('Create snippet')
+ wait_for_requests
- expect(page).to have_field('project_snippet_description')
- expect(collapsed).not_to be_visible
+ expect(page).to have_content(title)
+ expect(page).to have_content(file_content)
+ page.within('.snippet-header .description') do
+ expect(page).to have_content(description)
+ expect(page).to have_selector('strong')
end
+ end
- it 'creates a new snippet' do
- fill_form
- click_button('Create snippet')
- wait_for_requests
+ it 'uploads a file when dragging into textarea' do
+ fill_form
+ dropzone_file Rails.root.join('spec', 'fixtures', 'banana_sample.gif')
- expect(page).to have_content('My Snippet Title')
- expect(page).to have_content('Hello World!')
- page.within('.snippet-header .description') do
- expect(page).to have_content('My Snippet Description')
- expect(page).to have_selector('strong')
- end
- end
+ expect(page.find_field('project_snippet_description').value).to have_content('banana_sample')
- it 'uploads a file when dragging into textarea' do
- fill_form
- dropzone_file Rails.root.join('spec', 'fixtures', 'banana_sample.gif')
+ click_button('Create snippet')
+ wait_for_requests
- expect(page.find_field("project_snippet_description").value).to have_content('banana_sample')
+ link = find('a.no-attachment-icon img[alt="banana_sample"]')['src']
+ expect(link).to match(%r{/#{Regexp.escape(project.full_path)}/uploads/\h{32}/banana_sample\.gif\z})
+ end
- click_button('Create snippet')
- wait_for_requests
+ it 'displays validation errors' do
+ fill_in 'project_snippet_title', with: title
+ click_button('Create snippet')
+ wait_for_requests
- link = find('a.no-attachment-icon img[alt="banana_sample"]')['src']
- expect(link).to match(%r{/#{Regexp.escape(project.full_path)}/uploads/\h{32}/banana_sample\.gif\z})
- end
+ expect(page).to have_selector('#error_explanation')
+ end
- it 'creates a snippet when all required fields are filled in after validation failing' do
- fill_in 'project_snippet_title', with: 'My Snippet Title'
- click_button('Create snippet')
+ context 'when the git operation fails' do
+ let(:error) { 'Error creating the snippet' }
- expect(page).to have_selector('#error_explanation')
+ before do
+ allow_next_instance_of(Snippets::CreateService) do |instance|
+ allow(instance).to receive(:create_commit).and_raise(StandardError, error)
+ end
fill_form
- dropzone_file Rails.root.join('spec', 'fixtures', 'banana_sample.gif')
- find("input[value='Create snippet']").send_keys(:return)
+ click_button('Create snippet')
wait_for_requests
-
- expect(page).to have_content('My Snippet Title')
- expect(page).to have_content('Hello World!')
- page.within('.snippet-header .description') do
- expect(page).to have_content('My Snippet Description')
- expect(page).to have_selector('strong')
- end
- link = find('a.no-attachment-icon img[alt="banana_sample"]')['src']
- expect(link).to match(%r{/#{Regexp.escape(project.full_path)}/uploads/\h{32}/banana_sample\.gif\z})
- end
-
- context 'when the git operation fails' do
- let(:error) { 'Error creating the snippet' }
-
- before do
- allow_next_instance_of(Snippets::CreateService) do |instance|
- allow(instance).to receive(:create_commit).and_raise(StandardError, error)
- end
-
- fill_form
-
- click_button('Create snippet')
- wait_for_requests
- end
-
- it 'displays the error' do
- expect(page).to have_content(error)
- end
-
- it 'renders new page' do
- expect(page).to have_content('New Snippet')
- end
end
- end
-
- context 'when a user is not authenticated' do
- before do
- stub_feature_flags(snippets_vue: false)
- end
-
- it 'shows a public snippet on the index page but not the New snippet button' do
- snippet = create(:project_snippet, :public, :repository, project: project)
-
- visit project_snippets_path(project)
- expect(page).to have_content(snippet.title)
- expect(page).not_to have_content('New snippet')
+ it 'renders the new page and displays the error' do
+ expect(page).to have_content(error)
+ expect(page).to have_content('New Snippet')
end
end
end
-
-RSpec.describe 'Projects > Snippets > Create Snippet', :js do
- include DropzoneHelper
-
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :public) }
-
- it_behaves_like "snippet editor"
-end
diff --git a/spec/features/projects/snippets/show_spec.rb b/spec/features/projects/snippets/show_spec.rb
index 0f6429d49f6..8fded3cde80 100644
--- a/spec/features/projects/snippets/show_spec.rb
+++ b/spec/features/projects/snippets/show_spec.rb
@@ -3,157 +3,41 @@
require 'spec_helper'
RSpec.describe 'Projects > Snippets > Project snippet', :js do
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
- let(:snippet) { create(:project_snippet, project: project, file_name: file_name, content: content) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) do
+ create(:project, creator: user).tap do |p|
+ p.add_maintainer(user)
+ end
+ end
+
+ let_it_be(:snippet) { create(:project_snippet, :repository, project: project, author: user) }
before do
stub_feature_flags(snippets_vue: false)
- project.add_maintainer(user)
+
sign_in(user)
end
- context 'Ruby file' do
- let(:file_name) { 'popen.rb' }
- let(:content) { project.repository.blob_at('master', 'files/ruby/popen.rb').data }
+ it_behaves_like 'show and render proper snippet blob' do
+ let(:anchor) { nil }
- before do
- visit project_snippet_path(project, snippet)
+ subject do
+ visit project_snippet_path(project, snippet, anchor: anchor)
wait_for_requests
end
-
- it 'displays the blob' do
- aggregate_failures do
- # shows highlighted Ruby code
- expect(page).to have_content("require 'fileutils'")
-
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
-
- # shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
-
- # shows a raw button
- expect(page).to have_link('Open raw')
-
- # shows a download button
- expect(page).to have_link('Download')
- end
- end
end
- context 'Markdown file' do
- let(:file_name) { 'ruby-style-guide.md' }
- let(:content) { project.repository.blob_at('master', 'files/markdown/ruby-style-guide.md').data }
-
- context 'visiting directly' do
- before do
- visit project_snippet_path(project, snippet)
-
- wait_for_requests
- end
-
- it 'displays the blob using the rich viewer' do
- aggregate_failures do
- # hides the simple viewer
- expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false)
- expect(page).to have_selector('.blob-viewer[data-type="rich"]')
-
- # shows rendered Markdown
- expect(page).to have_link("PEP-8")
-
- # shows a viewer switcher
- expect(page).to have_selector('.js-blob-viewer-switcher')
-
- # shows a disabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn.disabled')
-
- # shows a raw button
- expect(page).to have_link('Open raw')
-
- # shows a download button
- expect(page).to have_link('Download')
- end
- end
-
- context 'switching to the simple viewer' do
- before do
- find('.js-blob-viewer-switch-btn[data-viewer=simple]').click
-
- wait_for_requests
- end
-
- it 'displays the blob using the simple viewer' do
- aggregate_failures do
- # hides the rich viewer
- expect(page).to have_selector('.blob-viewer[data-type="simple"]')
- expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false)
-
- # shows highlighted Markdown code
- expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)")
-
- # shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
- end
- end
-
- context 'switching to the rich viewer again' do
- before do
- find('.js-blob-viewer-switch-btn[data-viewer=rich]').click
-
- wait_for_requests
- end
-
- it 'displays the blob using the rich viewer' do
- aggregate_failures do
- # hides the simple viewer
- expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false)
- expect(page).to have_selector('.blob-viewer[data-type="rich"]')
-
- # shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
- end
- end
- end
- end
- end
-
- context 'visiting with a line number anchor' do
- before do
- visit project_snippet_path(project, snippet, anchor: 'L1')
-
- wait_for_requests
- end
-
- it 'displays the blob using the simple viewer' do
- aggregate_failures do
- # hides the rich viewer
- expect(page).to have_selector('.blob-viewer[data-type="simple"]')
- expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false)
-
- # highlights the line in question
- expect(page).to have_selector('#LC1.hll')
-
- # shows highlighted Markdown code
- expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)")
+ it_behaves_like 'showing user status' do
+ let(:file_path) { 'files/ruby/popen.rb' }
+ let(:user_with_status) { snippet.author }
- # shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
- end
- end
- end
+ subject { visit project_snippet_path(project, snippet) }
end
- it_behaves_like 'showing user status' do
- let(:file_name) { 'ruby-style-guide.md' }
- let(:content) { project.repository.blob_at('master', 'files/markdown/ruby-style-guide.md').data }
-
- let(:user_with_status) { snippet.author }
+ it_behaves_like 'does not show New Snippet button' do
+ let(:file_path) { 'files/ruby/popen.rb' }
- subject do
- visit project_snippet_path(project, snippet)
- wait_for_requests
- end
+ subject { visit project_snippet_path(project, snippet) }
end
end
diff --git a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
index 05d8989d88a..fdab63a56b8 100644
--- a/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_updates_wiki_page_spec.rb
@@ -234,4 +234,30 @@ RSpec.describe 'User updates wiki page' do
it_behaves_like 'wiki file attachments'
end
+
+ context 'when an existing page exceeds the content size limit' do
+ let_it_be(:project) { create(:project, :wiki_repo) }
+ let!(:wiki_page) { create(:wiki_page, wiki: project.wiki, content: "one\ntwo\nthree") }
+
+ before do
+ stub_application_setting(wiki_page_max_content_bytes: 10)
+
+ visit wiki_page_path(wiki_page.wiki, wiki_page, action: :edit)
+ end
+
+ it 'allows changing the title if the content does not change' do
+ fill_in 'Title', with: 'new title'
+ click_on 'Save changes'
+
+ expect(page).to have_content('Wiki was successfully updated.')
+ end
+
+ it 'shows a validation error when trying to change the content' do
+ fill_in 'Content', with: 'new content'
+ click_on 'Save changes'
+
+ expect(page).to have_content('The form contains the following error:')
+ expect(page).to have_content('Content is too long (11 Bytes). The maximum size is 10 Bytes.')
+ end
+ end
end
diff --git a/spec/features/projects/wiki/user_views_wiki_pages_spec.rb b/spec/features/projects/wiki/user_views_wiki_pages_spec.rb
index fea913b8212..4f29ae0cc8a 100644
--- a/spec/features/projects/wiki/user_views_wiki_pages_spec.rb
+++ b/spec/features/projects/wiki/user_views_wiki_pages_spec.rb
@@ -11,9 +11,11 @@ RSpec.describe 'User views wiki pages' do
let!(:wiki_page1) do
create(:wiki_page, wiki: project.wiki, title: '3 home', content: '3')
end
+
let!(:wiki_page2) do
create(:wiki_page, wiki: project.wiki, title: '1 home', content: '1')
end
+
let!(:wiki_page3) do
create(:wiki_page, wiki: project.wiki, title: '2 home', content: '2')
end
diff --git a/spec/features/projects_spec.rb b/spec/features/projects_spec.rb
index ab0b6725491..3577498c3b4 100644
--- a/spec/features/projects_spec.rb
+++ b/spec/features/projects_spec.rb
@@ -254,13 +254,13 @@ RSpec.describe 'Project' do
end
it 'focuses on the confirmation field' do
- click_button 'Remove project'
+ click_button 'Delete project'
expect(page).to have_selector '#confirm_name_input:focus'
end
- it 'removes a project', :sidekiq_might_not_need_inline do
- expect { remove_with_confirm('Remove project', project.path) }.to change { Project.count }.by(-1)
+ it 'deletes a project', :sidekiq_might_not_need_inline do
+ expect { remove_with_confirm('Delete project', "Delete #{project.full_name}", 'Yes, delete project') }.to change { Project.count }.by(-1)
expect(page).to have_content "Project '#{project.full_name}' is in the process of being deleted."
expect(Project.all.count).to be_zero
expect(project.issues).to be_empty
@@ -386,9 +386,9 @@ RSpec.describe 'Project' do
{ form: '.rspec-merge-request-settings', input: '#project_printing_merge_request_link_enabled' }]
end
- def remove_with_confirm(button_text, confirm_with)
+ def remove_with_confirm(button_text, confirm_with, confirm_button_text = 'Confirm')
click_button button_text
fill_in 'confirm_name_input', with: confirm_with
- click_button 'Confirm'
+ click_button confirm_button_text
end
end
diff --git a/spec/features/registrations/experience_level_spec.rb b/spec/features/registrations/experience_level_spec.rb
new file mode 100644
index 00000000000..06d380926cd
--- /dev/null
+++ b/spec/features/registrations/experience_level_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Experience level screen' do
+ let_it_be(:user) { create(:user, :unconfirmed) }
+ let_it_be(:group) { create(:group) }
+
+ before do
+ group.add_owner(user)
+ gitlab_sign_in(user)
+ stub_experiment_for_user(onboarding_issues: true)
+ visit users_sign_up_experience_level_path(namespace_path: group.to_param)
+ end
+
+ subject { page }
+
+ it 'shows the intro content' do
+ is_expected.to have_content('Hello there')
+ is_expected.to have_content('Welcome to the guided GitLab tour')
+ is_expected.to have_content('What describes you best?')
+ end
+
+ it 'shows the option for novice' do
+ is_expected.to have_content('Novice')
+ is_expected.to have_content('I’m not very familiar with the basics of project management and DevOps')
+ is_expected.to have_content('Show me everything')
+ end
+
+ it 'shows the option for experienced' do
+ is_expected.to have_content('Experienced')
+ is_expected.to have_content('I’m familiar with the basics of project management and DevOps')
+ is_expected.to have_content('Show me more advanced stuff')
+ end
+
+ it 'does not display any flash messages' do
+ is_expected.not_to have_selector('.flash-container')
+ is_expected.not_to have_content("Please check your email (#{user.email}) to verify that you own this address and unlock the power of CI/CD")
+ end
+
+ it 'does not include the footer links' do
+ is_expected.not_to have_link('Help')
+ is_expected.not_to have_link('About GitLab')
+ end
+end
diff --git a/spec/features/runners_spec.rb b/spec/features/runners_spec.rb
index 9b2373bf28b..0dff4c28270 100644
--- a/spec/features/runners_spec.rb
+++ b/spec/features/runners_spec.rb
@@ -450,5 +450,19 @@ RSpec.describe 'Runners' do
expect(all(:link, href: group_runner_path(group, runner)).length).to eq(1)
end
end
+
+ context 'filtered search' do
+ it 'allows user to search by status and type', :js do
+ visit group_settings_ci_cd_path(group)
+
+ find('.filtered-search').click
+
+ page.within('#js-dropdown-hint') do
+ expect(page).to have_content('Status')
+ expect(page).to have_content('Type')
+ expect(page).not_to have_content('Tag')
+ end
+ end
+ end
end
end
diff --git a/spec/features/search/user_uses_header_search_field_spec.rb b/spec/features/search/user_uses_header_search_field_spec.rb
index 5567dcb30ec..37e83d1e888 100644
--- a/spec/features/search/user_uses_header_search_field_spec.rb
+++ b/spec/features/search/user_uses_header_search_field_spec.rb
@@ -104,6 +104,14 @@ RSpec.describe 'User uses header search field', :js do
let(:scope_name) { 'All GitLab' }
end
+ it 'displays search options' do
+ page.within('.search-input-wrap') do
+ fill_in('search', with: 'test')
+ end
+
+ expect(page).to have_selector(scoped_search_link('test'))
+ end
+
context 'when searching through the search field' do
before do
create(:issue, project: project, title: 'project issue')
@@ -122,9 +130,41 @@ RSpec.describe 'User uses header search field', :js do
end
context 'when user is in a project scope' do
- include_examples 'search field examples' do
- let(:url) { project_path(project) }
- let(:scope_name) { project.name }
+ context 'and it belongs to a group' do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, namespace: group) }
+
+ include_examples 'search field examples' do
+ let(:url) { project_path(project) }
+ let(:scope_name) { project.name }
+ end
+
+ it 'displays search options' do
+ page.within('.search-input-wrap') do
+ fill_in('search', with: 'test')
+ end
+
+ expect(page).to have_selector(scoped_search_link('test'))
+ expect(page).to have_selector(scoped_search_link('test', group_id: group.id))
+ expect(page).to have_selector(scoped_search_link('test', project_id: project.id, group_id: group.id))
+ end
+ end
+
+ context 'and it belongs to a user' do
+ include_examples 'search field examples' do
+ let(:url) { project_path(project) }
+ let(:scope_name) { project.name }
+ end
+
+ it 'displays search options' do
+ page.within('.search-input-wrap') do
+ fill_in('search', with: 'test')
+ end
+
+ expect(page).to have_selector(scoped_search_link('test'))
+ expect(page).not_to have_selector(scoped_search_link('test', group_id: project.namespace_id))
+ expect(page).to have_selector(scoped_search_link('test', project_id: project.id))
+ end
end
end
@@ -140,6 +180,16 @@ RSpec.describe 'User uses header search field', :js do
let(:url) { group_path(group) }
let(:scope_name) { group.name }
end
+
+ it 'displays search options' do
+ page.within('.search-input-wrap') do
+ fill_in('search', with: 'test')
+ end
+
+ expect(page).to have_selector(scoped_search_link('test'))
+ expect(page).to have_selector(scoped_search_link('test', group_id: group.id))
+ expect(page).not_to have_selector(scoped_search_link('test', project_id: project.id))
+ end
end
context 'when user is in a subgroup scope' do
@@ -156,5 +206,25 @@ RSpec.describe 'User uses header search field', :js do
let(:url) { group_path(subgroup) }
let(:scope_name) { subgroup.name }
end
+
+ it 'displays search options' do
+ page.within('.search-input-wrap') do
+ fill_in('search', with: 'test')
+ end
+
+ expect(page).to have_selector(scoped_search_link('test'))
+ expect(page).to have_selector(scoped_search_link('test', group_id: subgroup.id))
+ expect(page).not_to have_selector(scoped_search_link('test', project_id: project.id))
+ end
+ end
+
+ def scoped_search_link(term, project_id: nil, group_id: nil)
+ # search_path will accept group_id and project_id but the order does not match
+ # what is expected in the href, so the variable must be built manually
+ href = search_path(search: term)
+ href.concat("&project_id=#{project_id}") if project_id
+ href.concat("&group_id=#{group_id}") if group_id
+
+ ".dropdown a[href='#{href}']"
end
end
diff --git a/spec/features/security/project/internal_access_spec.rb b/spec/features/security/project/internal_access_spec.rb
index 99f30d2f904..051bd601c1d 100644
--- a/spec/features/security/project/internal_access_spec.rb
+++ b/spec/features/security/project/internal_access_spec.rb
@@ -479,8 +479,8 @@ RSpec.describe "Internal Project Access" do
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
- it { is_expected.to be_denied_for(:guest).of(project) }
- it { is_expected.to be_denied_for(:user) }
+ it { is_expected.to be_allowed_for(:guest).of(project) }
+ it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
@@ -495,8 +495,8 @@ RSpec.describe "Internal Project Access" do
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
- it { is_expected.to be_denied_for(:guest).of(project) }
- it { is_expected.to be_denied_for(:user) }
+ it { is_expected.to be_allowed_for(:guest).of(project) }
+ it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
@@ -511,8 +511,8 @@ RSpec.describe "Internal Project Access" do
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
- it { is_expected.to be_denied_for(:guest).of(project) }
- it { is_expected.to be_denied_for(:user) }
+ it { is_expected.to be_allowed_for(:guest).of(project) }
+ it { is_expected.to be_allowed_for(:user) }
it { is_expected.to be_denied_for(:external) }
it { is_expected.to be_denied_for(:visitor) }
end
diff --git a/spec/features/security/project/public_access_spec.rb b/spec/features/security/project/public_access_spec.rb
index ea00a59dee4..75993959f6e 100644
--- a/spec/features/security/project/public_access_spec.rb
+++ b/spec/features/security/project/public_access_spec.rb
@@ -293,10 +293,10 @@ RSpec.describe "Public Project Access" do
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
- it { is_expected.to be_denied_for(:guest).of(project) }
- it { is_expected.to be_denied_for(:user) }
- it { is_expected.to be_denied_for(:external) }
- it { is_expected.to be_denied_for(:visitor) }
+ it { is_expected.to be_allowed_for(:guest).of(project) }
+ it { is_expected.to be_allowed_for(:user) }
+ it { is_expected.to be_allowed_for(:external) }
+ it { is_expected.to be_allowed_for(:visitor) }
end
describe "GET /:project_path/-/environments/:id" do
@@ -309,10 +309,10 @@ RSpec.describe "Public Project Access" do
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
- it { is_expected.to be_denied_for(:guest).of(project) }
- it { is_expected.to be_denied_for(:user) }
- it { is_expected.to be_denied_for(:external) }
- it { is_expected.to be_denied_for(:visitor) }
+ it { is_expected.to be_allowed_for(:guest).of(project) }
+ it { is_expected.to be_allowed_for(:user) }
+ it { is_expected.to be_allowed_for(:external) }
+ it { is_expected.to be_allowed_for(:visitor) }
end
describe "GET /:project_path/-/environments/:id/deployments" do
@@ -325,10 +325,10 @@ RSpec.describe "Public Project Access" do
it { is_expected.to be_allowed_for(:maintainer).of(project) }
it { is_expected.to be_allowed_for(:developer).of(project) }
it { is_expected.to be_allowed_for(:reporter).of(project) }
- it { is_expected.to be_denied_for(:guest).of(project) }
- it { is_expected.to be_denied_for(:user) }
- it { is_expected.to be_denied_for(:external) }
- it { is_expected.to be_denied_for(:visitor) }
+ it { is_expected.to be_allowed_for(:guest).of(project) }
+ it { is_expected.to be_allowed_for(:user) }
+ it { is_expected.to be_allowed_for(:external) }
+ it { is_expected.to be_allowed_for(:visitor) }
end
describe "GET /:project_path/-/environments/new" do
diff --git a/spec/features/security/project/snippet/internal_access_spec.rb b/spec/features/security/project/snippet/internal_access_spec.rb
index 52ae1022a4e..0667a2fd48a 100644
--- a/spec/features/security/project/snippet/internal_access_spec.rb
+++ b/spec/features/security/project/snippet/internal_access_spec.rb
@@ -5,10 +5,9 @@ require 'spec_helper'
RSpec.describe "Internal Project Snippets Access" do
include AccessMatchers
- let(:project) { create(:project, :internal) }
-
- let(:internal_snippet) { create(:project_snippet, :internal, project: project, author: project.owner) }
- let(:private_snippet) { create(:project_snippet, :private, project: project, author: project.owner) }
+ let_it_be(:project) { create(:project, :internal) }
+ let_it_be(:internal_snippet) { create(:project_snippet, :internal, project: project, author: project.owner) }
+ let_it_be(:private_snippet) { create(:project_snippet, :private, project: project, author: project.owner) }
describe "GET /:project_path/snippets" do
subject { project_snippets_path(project) }
diff --git a/spec/features/snippets/embedded_snippet_spec.rb b/spec/features/snippets/embedded_snippet_spec.rb
index 4f2ab598a6f..b799fb2fc00 100644
--- a/spec/features/snippets/embedded_snippet_spec.rb
+++ b/spec/features/snippets/embedded_snippet_spec.rb
@@ -3,11 +3,13 @@
require 'spec_helper'
RSpec.describe 'Embedded Snippets' do
- let(:snippet) { create(:personal_snippet, :public, file_name: 'random_dir.rb', content: content) }
- let(:content) { "require 'fileutils'\nFileUtils.mkdir_p 'some/random_dir'\n" }
+ let_it_be(:snippet) { create(:personal_snippet, :public, :repository) }
+ let(:blobs) { snippet.blobs.first(3) }
it 'loads snippet', :js do
- script_url = "http://#{Capybara.current_session.server.host}:#{Capybara.current_session.server.port}/#{snippet_path(snippet, format: 'js')}"
+ expect_any_instance_of(Snippet).to receive(:blobs).and_return(blobs)
+
+ script_url = "http://#{Capybara.current_session.server.host}:#{Capybara.current_session.server.port}#{snippet_path(snippet, format: 'js')}"
embed_body = "<html><body><script src=\"#{script_url}\"></script></body></html>"
rack_app = proc do
@@ -19,9 +21,15 @@ RSpec.describe 'Embedded Snippets' do
visit("http://#{server.host}:#{server.port}/embedded_snippet.html")
- expect(page).to have_content("random_dir.rb")
- expect(page).to have_content("require 'fileutils'")
- expect(page).to have_link('Open raw')
- expect(page).to have_link('Download')
+ wait_for_requests
+
+ aggregate_failures do
+ blobs.each do |blob|
+ expect(page).to have_content(blob.path)
+ expect(page.find(".snippet-file-content .blob-content[data-blob-id='#{blob.id}'] code")).to have_content(blob.data.squish)
+ expect(page).to have_link('Open raw', href: /-\/snippets\/#{snippet.id}\/raw\/master\/#{blob.path}/)
+ expect(page).to have_link('Download', href: /-\/snippets\/#{snippet.id}\/raw\/master\/#{blob.path}\?inline=false/)
+ end
+ end
end
end
diff --git a/spec/features/snippets/notes_on_personal_snippets_spec.rb b/spec/features/snippets/notes_on_personal_snippets_spec.rb
index 55031183e10..e98bb22d3ea 100644
--- a/spec/features/snippets/notes_on_personal_snippets_spec.rb
+++ b/spec/features/snippets/notes_on_personal_snippets_spec.rb
@@ -120,6 +120,17 @@ RSpec.describe 'Comments on personal snippets', :js do
# but we want to make sure
expect(page).not_to have_selector('.atwho-view')
end
+
+ it_behaves_like 'personal snippet with references' do
+ let(:container) { 'div#notes' }
+
+ subject do
+ fill_in 'note[note]', with: references
+ click_button 'Comment'
+
+ wait_for_requests
+ end
+ end
end
context 'when editing a note' do
diff --git a/spec/features/snippets/show_spec.rb b/spec/features/snippets/show_spec.rb
index 9125ed74273..981ed12d540 100644
--- a/spec/features/snippets/show_spec.rb
+++ b/spec/features/snippets/show_spec.rb
@@ -3,180 +3,33 @@
require 'spec_helper'
RSpec.describe 'Snippet', :js do
- let(:project) { create(:project, :repository) }
- let(:snippet) { create(:personal_snippet, :public, file_name: file_name, content: content) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:snippet) { create(:personal_snippet, :public, :repository, author: user) }
before do
stub_feature_flags(snippets_vue: false)
end
- context 'Ruby file' do
- let(:file_name) { 'popen.rb' }
- let(:content) { project.repository.blob_at('master', 'files/ruby/popen.rb').data }
+ it_behaves_like 'show and render proper snippet blob' do
+ let(:anchor) { nil }
- before do
- visit snippet_path(snippet)
+ subject do
+ visit snippet_path(snippet, anchor: anchor)
wait_for_requests
end
-
- it 'displays the blob' do
- aggregate_failures do
- # shows highlighted Ruby code
- expect(page).to have_content("require 'fileutils'")
-
- # does not show a viewer switcher
- expect(page).not_to have_selector('.js-blob-viewer-switcher')
-
- # shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
-
- # shows a raw button
- expect(page).to have_link('Open raw')
-
- # shows a download button
- expect(page).to have_link('Download')
- end
- end
- end
-
- context 'Markdown file' do
- let(:file_name) { 'ruby-style-guide.md' }
- let(:content) { project.repository.blob_at('master', 'files/markdown/ruby-style-guide.md').data }
-
- context 'visiting directly' do
- before do
- visit snippet_path(snippet)
-
- wait_for_requests
- end
-
- it 'displays the blob using the rich viewer' do
- aggregate_failures do
- # hides the simple viewer
- expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false)
- expect(page).to have_selector('.blob-viewer[data-type="rich"]')
-
- # shows rendered Markdown
- expect(page).to have_link("PEP-8")
-
- # shows a viewer switcher
- expect(page).to have_selector('.js-blob-viewer-switcher')
-
- # shows a disabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn.disabled')
-
- # shows a raw button
- expect(page).to have_link('Open raw')
-
- # shows a download button
- expect(page).to have_link('Download')
- end
- end
-
- context 'Markdown rendering' do
- let(:snippet) { create(:personal_snippet, :public, file_name: file_name, content: content) }
- let(:file_name) { 'test.md' }
- let(:content) { "1. one\n - sublist\n" }
-
- context 'when rendering default markdown' do
- it 'renders using CommonMark' do
- expect(page).to have_content("sublist")
- expect(page).not_to have_xpath("//ol//li//ul")
- end
- end
- end
-
- context 'switching to the simple viewer' do
- before do
- find('.js-blob-viewer-switch-btn[data-viewer=simple]').click
-
- wait_for_requests
- end
-
- it 'displays the blob using the simple viewer' do
- aggregate_failures do
- # hides the rich viewer
- expect(page).to have_selector('.blob-viewer[data-type="simple"]')
- expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false)
-
- # shows highlighted Markdown code
- expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)")
-
- # shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
- end
- end
-
- context 'switching to the rich viewer again' do
- before do
- find('.js-blob-viewer-switch-btn[data-viewer=rich]').click
-
- wait_for_requests
- end
-
- it 'displays the blob using the rich viewer' do
- aggregate_failures do
- # hides the simple viewer
- expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false)
- expect(page).to have_selector('.blob-viewer[data-type="rich"]')
-
- # shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
- end
- end
- end
- end
- end
-
- context 'visiting with a line number anchor' do
- before do
- visit snippet_path(snippet, anchor: 'L1')
-
- wait_for_requests
- end
-
- it 'displays the blob using the simple viewer' do
- aggregate_failures do
- # hides the rich viewer
- expect(page).to have_selector('.blob-viewer[data-type="simple"]')
- expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false)
-
- # highlights the line in question
- expect(page).to have_selector('#LC1.hll')
-
- # shows highlighted Markdown code
- expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)")
-
- # shows an enabled copy button
- expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
- end
- end
- end
end
it_behaves_like 'showing user status' do
- let(:file_name) { 'popen.rb' }
- let(:content) { project.repository.blob_at('master', 'files/ruby/popen.rb').data }
+ let(:file_path) { 'files/ruby/popen.rb' }
let(:user_with_status) { snippet.author }
subject { visit snippet_path(snippet) }
end
- context 'when user cannot create snippets' do
- let(:user) { create(:user, :external) }
- let(:snippet) { create(:personal_snippet, :public) }
-
- before do
- sign_in(user)
-
- visit snippet_path(snippet)
+ it_behaves_like 'does not show New Snippet button' do
+ let(:file_path) { 'files/ruby/popen.rb' }
- wait_for_requests
- end
-
- it 'does not show the "New Snippet" button' do
- expect(page).not_to have_link('New snippet')
- end
+ subject { visit snippet_path(snippet) }
end
end
diff --git a/spec/features/snippets/user_creates_snippet_spec.rb b/spec/features/snippets/user_creates_snippet_spec.rb
index b100e035d38..f4c6536d6d3 100644
--- a/spec/features/snippets/user_creates_snippet_spec.rb
+++ b/spec/features/snippets/user_creates_snippet_spec.rb
@@ -2,7 +2,17 @@
require 'spec_helper'
-RSpec.shared_examples_for 'snippet editor' do
+RSpec.describe 'User creates snippet', :js do
+ include DropzoneHelper
+
+ let_it_be(:user) { create(:user) }
+
+ let(:title) { 'My Snippet Title' }
+ let(:file_content) { 'Hello World!' }
+ let(:md_description) { 'My Snippet **Description**' }
+ let(:description) { 'My Snippet Description' }
+ let(:created_snippet) { Snippet.last }
+
before do
stub_feature_flags(snippets_vue: false)
stub_feature_flags(snippets_edit_vue: false)
@@ -14,15 +24,15 @@ RSpec.shared_examples_for 'snippet editor' do
end
def fill_form
- fill_in 'personal_snippet_title', with: 'My Snippet Title'
+ fill_in 'personal_snippet_title', with: title
# Click placeholder first to expand full description field
description_field.click
- fill_in 'personal_snippet_description', with: 'My Snippet **Description**'
+ fill_in 'personal_snippet_description', with: md_description
page.within('.file-editor') do
el = find('.inputarea')
- el.send_keys 'Hello World!'
+ el.send_keys file_content
end
end
@@ -34,12 +44,12 @@ RSpec.shared_examples_for 'snippet editor' do
click_button('Create snippet')
wait_for_requests
- expect(page).to have_content('My Snippet Title')
+ expect(page).to have_content(title)
page.within('.snippet-header .description') do
- expect(page).to have_content('My Snippet Description')
+ expect(page).to have_content(description)
expect(page).to have_selector('strong')
end
- expect(page).to have_content('Hello World!')
+ expect(page).to have_content(file_content)
end
it 'previews a snippet with file' do
@@ -57,7 +67,7 @@ RSpec.shared_examples_for 'snippet editor' do
link = find('a.no-attachment-icon img.js-lazy-loaded[alt="banana_sample"]')['src']
expect(link).to match(%r{/uploads/-/system/user/#{user.id}/\h{32}/banana_sample\.gif\z})
- # Adds a cache buster for checking if the image exists as Selenium is now handling the cached regquests
+ # Adds a cache buster for checking if the image exists as Selenium is now handling the cached requests
# not anymore as requests when they come straight from memory cache.
reqs = inspect_requests { visit("#{link}?ran=#{SecureRandom.base64(20)}") }
expect(reqs.first.status_code).to eq(200)
@@ -99,15 +109,10 @@ RSpec.shared_examples_for 'snippet editor' do
wait_for_requests
end
- it 'displays the error' do
+ it 'renders the new page and displays the error' do
expect(page).to have_content(error)
- end
-
- it 'renders new page' do
expect(page).to have_content('New Snippet')
- end
- it 'has the correct action path' do
action = find('form.snippet-form')['action']
expect(action).to match(%r{/snippets\z})
end
@@ -116,46 +121,10 @@ RSpec.shared_examples_for 'snippet editor' do
it 'validation fails for the first time' do
visit new_snippet_path
- fill_in 'personal_snippet_title', with: 'My Snippet Title'
+ fill_in 'personal_snippet_title', with: title
click_button('Create snippet')
expect(page).to have_selector('#error_explanation')
-
- fill_form
- dropzone_file Rails.root.join('spec', 'fixtures', 'banana_sample.gif')
-
- click_button('Create snippet')
- wait_for_requests
-
- expect(page).to have_content('My Snippet Title')
- page.within('.snippet-header .description') do
- expect(page).to have_content('My Snippet Description')
- expect(page).to have_selector('strong')
- end
- expect(page).to have_content('Hello World!')
- link = find('a.no-attachment-icon img.js-lazy-loaded[alt="banana_sample"]')['src']
- expect(link).to match(%r{/uploads/-/system/personal_snippet/#{Snippet.last.id}/\h{32}/banana_sample\.gif\z})
-
- reqs = inspect_requests { visit("#{link}?ran=#{SecureRandom.base64(20)}") }
- expect(reqs.first.status_code).to eq(200)
- end
-
- it 'Authenticated user creates a snippet with + in filename' do
- visit new_snippet_path
-
- fill_in 'personal_snippet_title', with: 'My Snippet Title'
- page.within('.file-editor') do
- find(:xpath, "//input[@id='personal_snippet_file_name']").set 'snippet+file+name'
- el = find('.inputarea')
- el.send_keys 'Hello World!'
- end
-
- click_button 'Create snippet'
- wait_for_requests
-
- expect(page).to have_content('My Snippet Title')
- expect(page).to have_content('snippet+file+name')
- expect(page).to have_content('Hello World!')
end
context 'when snippets default visibility level is restricted' do
@@ -172,20 +141,20 @@ RSpec.shared_examples_for 'snippet editor' do
click_button('Create snippet')
wait_for_requests
- visit snippets_path
- click_link('Internal')
-
- expect(page).to have_content('My Snippet Title')
- created_snippet = Snippet.last
expect(created_snippet.visibility_level).to eq(Gitlab::VisibilityLevel::INTERNAL)
end
end
-end
-RSpec.describe 'User creates snippet', :js do
- include DropzoneHelper
+ it_behaves_like 'personal snippet with references' do
+ let(:container) { '.snippet-header .description' }
+ let(:md_description) { references }
- let_it_be(:user) { create(:user) }
+ subject do
+ visit new_snippet_path
+ fill_form
+ click_button('Create snippet')
- it_behaves_like "snippet editor"
+ wait_for_requests
+ end
+ end
end
diff --git a/spec/features/snippets/user_edits_snippet_spec.rb b/spec/features/snippets/user_edits_snippet_spec.rb
index 3692b0d1ad8..5773904dedf 100644
--- a/spec/features/snippets/user_edits_snippet_spec.rb
+++ b/spec/features/snippets/user_edits_snippet_spec.rb
@@ -56,8 +56,8 @@ RSpec.describe 'User edits snippet', :js do
click_button 'Save changes'
wait_for_requests
- expect(page).to have_no_xpath("//i[@class='fa fa-lock']")
- expect(page).to have_xpath("//i[@class='fa fa-shield']")
+ expect(page).to have_no_selector('[data-testid="lock-icon"]')
+ expect(page).to have_selector('[data-testid="shield-icon"]')
end
it 'updates the snippet to make it public' do
@@ -66,8 +66,8 @@ RSpec.describe 'User edits snippet', :js do
click_button 'Save changes'
wait_for_requests
- expect(page).to have_no_xpath("//i[@class='fa fa-lock']")
- expect(page).to have_xpath("//i[@class='fa fa-globe']")
+ expect(page).to have_no_selector('[data-testid="lock-icon"]')
+ expect(page).to have_selector('[data-testid="earth-icon"]')
end
context 'when the git operation fails' do
diff --git a/spec/features/users/signup_spec.rb b/spec/features/users/signup_spec.rb
index af2ecfec498..332be055027 100644
--- a/spec/features/users/signup_spec.rb
+++ b/spec/features/users/signup_spec.rb
@@ -509,4 +509,29 @@ RSpec.describe 'With experimental flow' do
expect(page).to have_current_path(new_project_path)
end
end
+
+ context 'when terms_opt_in experimental is enabled' do
+ include TermsHelper
+
+ before do
+ enforce_terms
+ stub_experiment(signup_flow: true, terms_opt_in: true)
+ stub_experiment_for_user(signup_flow: true, terms_opt_in: true)
+ end
+
+ it 'terms are checked by default' do
+ new_user = build_stubbed(:user)
+ visit new_user_registration_path
+
+ fill_in 'new_user_username', with: new_user.username
+ fill_in 'new_user_email', with: new_user.email
+ fill_in 'new_user_first_name', with: new_user.first_name
+ fill_in 'new_user_last_name', with: new_user.last_name
+ fill_in 'new_user_password', with: new_user.password
+
+ click_button 'Register'
+
+ expect(current_path).to eq users_sign_up_welcome_path
+ end
+ end
end
diff --git a/spec/finders/admin/projects_finder_spec.rb b/spec/finders/admin/projects_finder_spec.rb
index 03eb41ddfb6..8522170874f 100644
--- a/spec/finders/admin/projects_finder_spec.rb
+++ b/spec/finders/admin/projects_finder_spec.rb
@@ -88,7 +88,7 @@ RSpec.describe Admin::ProjectsFinder do
context 'filter by abandoned' do
before do
- private_project.update(last_activity_at: Time.zone.now - 6.months - 1.minute)
+ private_project.update!(last_activity_at: Time.zone.now - 6.months - 1.minute)
end
let(:params) { { abandoned: true } }
@@ -98,7 +98,7 @@ RSpec.describe Admin::ProjectsFinder do
context 'filter by last_repository_check_failed' do
before do
- private_project.update(last_repository_check_failed: true)
+ private_project.update!(last_repository_check_failed: true)
end
let(:params) { { last_repository_check_failed: true } }
diff --git a/spec/finders/alert_management/alerts_finder_spec.rb b/spec/finders/alert_management/alerts_finder_spec.rb
index 7bf9047704b..926446b31d5 100644
--- a/spec/finders/alert_management/alerts_finder_spec.rb
+++ b/spec/finders/alert_management/alerts_finder_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe AlertManagement::AlertsFinder, '#execute' do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
- let_it_be(:alert_1) { create(:alert_management_alert, :all_fields, :resolved, project: project, ended_at: 1.year.ago, events: 2, severity: :high) }
- let_it_be(:alert_2) { create(:alert_management_alert, :all_fields, :ignored, project: project, events: 1, severity: :critical) }
- let_it_be(:alert_3) { create(:alert_management_alert, :all_fields) }
+ let_it_be(:resolved_alert) { create(:alert_management_alert, :all_fields, :resolved, project: project, ended_at: 1.year.ago, events: 2, severity: :high) }
+ let_it_be(:ignored_alert) { create(:alert_management_alert, :all_fields, :ignored, project: project, events: 1, severity: :critical) }
+ let_it_be(:triggered_alert) { create(:alert_management_alert, :all_fields) }
let(:params) { {} }
describe '#execute' do
@@ -23,13 +23,13 @@ RSpec.describe AlertManagement::AlertsFinder, '#execute' do
end
context 'empty params' do
- it { is_expected.to contain_exactly(alert_1, alert_2) }
+ it { is_expected.to contain_exactly(resolved_alert, ignored_alert) }
end
context 'iid given' do
- let(:params) { { iid: alert_1.iid } }
+ let(:params) { { iid: resolved_alert.iid } }
- it { is_expected.to match_array(alert_1) }
+ it { is_expected.to match_array(resolved_alert) }
context 'unknown iid' do
let(:params) { { iid: 'unknown' } }
@@ -41,13 +41,13 @@ RSpec.describe AlertManagement::AlertsFinder, '#execute' do
context 'status given' do
let(:params) { { status: AlertManagement::Alert::STATUSES[:resolved] } }
- it { is_expected.to match_array(alert_1) }
+ it { is_expected.to match_array(resolved_alert) }
context 'with an array of statuses' do
- let(:alert_3) { create(:alert_management_alert) }
+ let(:triggered_alert) { create(:alert_management_alert) }
let(:params) { { status: [AlertManagement::Alert::STATUSES[:resolved]] } }
- it { is_expected.to match_array(alert_1) }
+ it { is_expected.to match_array(resolved_alert) }
end
context 'with no alerts of status' do
@@ -59,13 +59,13 @@ RSpec.describe AlertManagement::AlertsFinder, '#execute' do
context 'with an empty status array' do
let(:params) { { status: [] } }
- it { is_expected.to match_array([alert_1, alert_2]) }
+ it { is_expected.to match_array([resolved_alert, ignored_alert]) }
end
context 'with an nil status' do
let(:params) { { status: nil } }
- it { is_expected.to match_array([alert_1, alert_2]) }
+ it { is_expected.to match_array([resolved_alert, ignored_alert]) }
end
end
@@ -74,13 +74,13 @@ RSpec.describe AlertManagement::AlertsFinder, '#execute' do
context 'sorts alerts ascending' do
let(:params) { { sort: 'created_asc' } }
- it { is_expected.to eq [alert_1, alert_2] }
+ it { is_expected.to eq [resolved_alert, ignored_alert] }
end
context 'sorts alerts descending' do
let(:params) { { sort: 'created_desc' } }
- it { is_expected.to eq [alert_2, alert_1] }
+ it { is_expected.to eq [ignored_alert, resolved_alert] }
end
end
@@ -88,13 +88,13 @@ RSpec.describe AlertManagement::AlertsFinder, '#execute' do
context 'sorts alerts ascending' do
let(:params) { { sort: 'updated_asc' } }
- it { is_expected.to eq [alert_1, alert_2] }
+ it { is_expected.to eq [resolved_alert, ignored_alert] }
end
context 'sorts alerts descending' do
let(:params) { { sort: 'updated_desc' } }
- it { is_expected.to eq [alert_2, alert_1] }
+ it { is_expected.to eq [ignored_alert, resolved_alert] }
end
end
@@ -102,13 +102,13 @@ RSpec.describe AlertManagement::AlertsFinder, '#execute' do
context 'sorts alerts ascending' do
let(:params) { { sort: 'started_at_asc' } }
- it { is_expected.to eq [alert_1, alert_2] }
+ it { is_expected.to eq [resolved_alert, ignored_alert] }
end
context 'sorts alerts descending' do
let(:params) { { sort: 'started_at_desc' } }
- it { is_expected.to eq [alert_2, alert_1] }
+ it { is_expected.to eq [ignored_alert, resolved_alert] }
end
end
@@ -116,13 +116,13 @@ RSpec.describe AlertManagement::AlertsFinder, '#execute' do
context 'sorts alerts ascending' do
let(:params) { { sort: 'ended_at_asc' } }
- it { is_expected.to eq [alert_1, alert_2] }
+ it { is_expected.to eq [resolved_alert, ignored_alert] }
end
context 'sorts alerts descending' do
let(:params) { { sort: 'ended_at_desc' } }
- it { is_expected.to eq [alert_2, alert_1] }
+ it { is_expected.to eq [ignored_alert, resolved_alert] }
end
end
@@ -133,13 +133,13 @@ RSpec.describe AlertManagement::AlertsFinder, '#execute' do
context 'sorts alerts ascending' do
let(:params) { { sort: 'event_count_asc' } }
- it { is_expected.to eq [alert_2, alert_1, alert_count_3, alert_count_6] }
+ it { is_expected.to eq [ignored_alert, resolved_alert, alert_count_3, alert_count_6] }
end
context 'sorts alerts descending' do
let(:params) { { sort: 'event_count_desc' } }
- it { is_expected.to eq [alert_count_6, alert_count_3, alert_1, alert_2] }
+ it { is_expected.to eq [alert_count_6, alert_count_3, resolved_alert, ignored_alert] }
end
end
diff --git a/spec/finders/autocomplete/move_to_project_finder_spec.rb b/spec/finders/autocomplete/move_to_project_finder_spec.rb
index 61328a5335a..fb2de908777 100644
--- a/spec/finders/autocomplete/move_to_project_finder_spec.rb
+++ b/spec/finders/autocomplete/move_to_project_finder_spec.rb
@@ -22,14 +22,14 @@ RSpec.describe Autocomplete::MoveToProjectFinder do
expect(finder.execute).to be_empty
end
- it 'returns projects equal or above Gitlab::Access::REPORTER ordered by name' do
+ it 'returns projects equal or above Gitlab::Access::REPORTER' do
reporter_project.add_reporter(user)
developer_project.add_developer(user)
maintainer_project.add_maintainer(user)
finder = described_class.new(user, project_id: project.id)
- expect(finder.execute.to_a).to eq([reporter_project, developer_project, maintainer_project])
+ expect(finder.execute.to_a).to contain_exactly(reporter_project, developer_project, maintainer_project)
end
it 'does not include the source project' do
@@ -53,7 +53,7 @@ RSpec.describe Autocomplete::MoveToProjectFinder do
it 'does not return projects for which issues are disabled' do
reporter_project.add_reporter(user)
- reporter_project.update(issues_enabled: false)
+ reporter_project.update!(issues_enabled: false)
other_reporter_project = create(:project)
other_reporter_project.add_reporter(user)
@@ -88,10 +88,10 @@ RSpec.describe Autocomplete::MoveToProjectFinder do
wadus_project.add_maintainer(user)
expect(described_class.new(user, project_id: project.id).execute.to_a)
- .to eq([foo_project, wadus_project])
+ .to contain_exactly(foo_project, wadus_project)
expect(described_class.new(user, project_id: project.id, search: 'wadus').execute.to_a)
- .to eq([wadus_project])
+ .to contain_exactly(wadus_project)
end
it 'allows searching by parent namespace' do
diff --git a/spec/finders/ci/daily_build_group_report_results_finder_spec.rb b/spec/finders/ci/daily_build_group_report_results_finder_spec.rb
index bdb0bc9b561..c0434b5f371 100644
--- a/spec/finders/ci/daily_build_group_report_results_finder_spec.rb
+++ b/spec/finders/ci/daily_build_group_report_results_finder_spec.rb
@@ -59,6 +59,8 @@ RSpec.describe Ci::DailyBuildGroupReportResultsFinder do
end
end
+ private
+
def create_daily_coverage(group_name, coverage, date)
create(
:ci_daily_build_group_report_result,
diff --git a/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb b/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb
index ca6e0793d55..196fde5efe0 100644
--- a/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb
+++ b/spec/finders/ci/pipelines_for_merge_request_finder_spec.rb
@@ -115,7 +115,7 @@ RSpec.describe Ci::PipelinesForMergeRequestFinder do
context 'with multiple irrelevant merge_request_diffs' do
before do
- merge_request.update(target_branch: 'v1.0.0')
+ merge_request.update!(target_branch: 'v1.0.0')
end
it_behaves_like 'returning pipelines with proper ordering'
diff --git a/spec/finders/design_management/designs_finder_spec.rb b/spec/finders/design_management/designs_finder_spec.rb
index 696327cc49c..0133095827d 100644
--- a/spec/finders/design_management/designs_finder_spec.rb
+++ b/spec/finders/design_management/designs_finder_spec.rb
@@ -8,9 +8,9 @@ RSpec.describe DesignManagement::DesignsFinder do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :private) }
let_it_be(:issue) { create(:issue, project: project) }
- let_it_be(:design1) { create(:design, :with_file, issue: issue, versions_count: 1) }
- let_it_be(:design2) { create(:design, :with_file, issue: issue, versions_count: 1) }
- let_it_be(:design3) { create(:design, :with_file, issue: issue, versions_count: 1) }
+ let_it_be(:design1) { create(:design, :with_file, issue: issue, versions_count: 1, relative_position: 3) }
+ let_it_be(:design2) { create(:design, :with_file, issue: issue, versions_count: 1, relative_position: 2) }
+ let_it_be(:design3) { create(:design, :with_file, issue: issue, versions_count: 1, relative_position: 1) }
let(:params) { {} }
subject(:designs) { described_class.new(issue, user, params).execute }
@@ -38,8 +38,28 @@ RSpec.describe DesignManagement::DesignsFinder do
enable_design_management
end
- it 'returns the designs' do
- is_expected.to contain_exactly(design1, design2, design3)
+ it 'returns the designs sorted by their relative position' do
+ is_expected.to eq([design3, design2, design1])
+ end
+
+ context 'when the :reorder_designs feature is enabled for the project' do
+ before do
+ stub_feature_flags(reorder_designs: project)
+ end
+
+ it 'returns the designs sorted by their relative position' do
+ is_expected.to eq([design3, design2, design1])
+ end
+ end
+
+ context 'when the :reorder_designs feature is disabled' do
+ before do
+ stub_feature_flags(reorder_designs: false)
+ end
+
+ it 'returns the designs sorted by ID' do
+ is_expected.to eq([design1, design2, design3])
+ end
end
context 'when argument is the ids of designs' do
diff --git a/spec/finders/fork_targets_finder_spec.rb b/spec/finders/fork_targets_finder_spec.rb
index 3c66f4e5757..7208f46cfff 100644
--- a/spec/finders/fork_targets_finder_spec.rb
+++ b/spec/finders/fork_targets_finder_spec.rb
@@ -10,15 +10,19 @@ RSpec.describe ForkTargetsFinder do
let!(:maintained_group) do
create(:group).tap { |g| g.add_maintainer(user) }
end
+
let!(:owned_group) do
create(:group).tap { |g| g.add_owner(user) }
end
+
let!(:developer_group) do
create(:group).tap { |g| g.add_developer(user) }
end
+
let!(:reporter_group) do
create(:group).tap { |g| g.add_reporter(user) }
end
+
let!(:guest_group) do
create(:group).tap { |g| g.add_guest(user) }
end
diff --git a/spec/finders/group_descendants_finder_spec.rb b/spec/finders/group_descendants_finder_spec.rb
index 77ef546e083..2f9303606b1 100644
--- a/spec/finders/group_descendants_finder_spec.rb
+++ b/spec/finders/group_descendants_finder_spec.rb
@@ -122,7 +122,7 @@ RSpec.describe GroupDescendantsFinder do
it 'does not include projects shared with the group' do
project = create(:project, namespace: group)
other_project = create(:project)
- other_project.project_group_links.create(group: group,
+ other_project.project_group_links.create!(group: group,
group_access: Gitlab::Access::MAINTAINER)
expect(finder.execute).to contain_exactly(project)
diff --git a/spec/finders/group_projects_finder_spec.rb b/spec/finders/group_projects_finder_spec.rb
index 14f2bb017c6..c66fdb19260 100644
--- a/spec/finders/group_projects_finder_spec.rb
+++ b/spec/finders/group_projects_finder_spec.rb
@@ -51,7 +51,7 @@ RSpec.describe GroupProjectsFinder do
let!(:shared_project_4) { create(:project, :internal, path: '8') }
before do
- shared_project_4.project_group_links.create(group_access: Gitlab::Access::REPORTER, group: group)
+ shared_project_4.project_group_links.create!(group_access: Gitlab::Access::REPORTER, group: group)
end
let(:params) { { min_access_level: Gitlab::Access::MAINTAINER } }
@@ -76,7 +76,7 @@ RSpec.describe GroupProjectsFinder do
context "with external user" do
before do
- current_user.update(external: true)
+ current_user.update!(external: true)
end
it { is_expected.to match_array([shared_project_2, shared_project_1]) }
@@ -107,7 +107,7 @@ RSpec.describe GroupProjectsFinder do
context "with external user" do
before do
- current_user.update(external: true)
+ current_user.update!(external: true)
end
context 'with subgroups projects' do
diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb
index 672318c292e..fb7d4e808fe 100644
--- a/spec/finders/issues_finder_spec.rb
+++ b/spec/finders/issues_finder_spec.rb
@@ -185,9 +185,9 @@ RSpec.describe IssuesFinder do
let(:params) { { milestone_title: group_milestone.title } }
before do
- project2.update(namespace: group)
- issue2.update(milestone: group_milestone)
- issue3.update(milestone: group_milestone)
+ project2.update!(namespace: group)
+ issue2.update!(milestone: group_milestone)
+ issue3.update!(milestone: group_milestone)
end
it 'returns issues assigned to that group milestone' do
@@ -668,6 +668,58 @@ RSpec.describe IssuesFinder do
end
end
+ context 'filtering by issue type' do
+ let_it_be(:incident_issue) { create(:incident, project: project1) }
+
+ context 'no type given' do
+ let(:params) { { issue_types: [] } }
+
+ it 'returns all issues' do
+ expect(issues).to contain_exactly(incident_issue, issue1, issue2, issue3, issue4)
+ end
+ end
+
+ context 'incident type' do
+ let(:params) { { issue_types: ['incident'] } }
+
+ it 'returns incident issues' do
+ expect(issues).to contain_exactly(incident_issue)
+ end
+ end
+
+ context 'issue type' do
+ let(:params) { { issue_types: ['issue'] } }
+
+ it 'returns all issues with type issue' do
+ expect(issues).to contain_exactly(issue1, issue2, issue3, issue4)
+ end
+ end
+
+ context 'multiple params' do
+ let(:params) { { issue_types: %w(issue incident) } }
+
+ it 'returns all issues' do
+ expect(issues).to contain_exactly(incident_issue, issue1, issue2, issue3, issue4)
+ end
+ end
+
+ context 'without array' do
+ let(:params) { { issue_types: 'incident' } }
+
+ it 'returns incident issues' do
+ expect(issues).to contain_exactly(incident_issue)
+ end
+ end
+
+ context 'invalid params' do
+ let(:params) { { issue_types: ['nonsense'] } }
+
+ it 'returns no issues' do
+ expect(issues).to eq(Issue.none)
+ end
+ end
+ end
+
context 'when the user is unauthorized' do
let(:search_user) { nil }
diff --git a/spec/finders/joined_groups_finder_spec.rb b/spec/finders/joined_groups_finder_spec.rb
index 8f826ef67ec..058db735708 100644
--- a/spec/finders/joined_groups_finder_spec.rb
+++ b/spec/finders/joined_groups_finder_spec.rb
@@ -55,7 +55,7 @@ RSpec.describe JoinedGroupsFinder do
context 'external users' do
before do
- profile_visitor.update(external: true)
+ profile_visitor.update!(external: true)
end
context 'if not a member' do
diff --git a/spec/finders/members_finder_spec.rb b/spec/finders/members_finder_spec.rb
index b14ad84a96e..3ef8d6a01aa 100644
--- a/spec/finders/members_finder_spec.rb
+++ b/spec/finders/members_finder_spec.rb
@@ -10,16 +10,39 @@ RSpec.describe MembersFinder, '#execute' do
let_it_be(:user2) { create(:user) }
let_it_be(:user3) { create(:user) }
let_it_be(:user4) { create(:user) }
+ let_it_be(:blocked_user) { create(:user, :blocked) }
it 'returns members for project and parent groups' do
nested_group.request_access(user1)
member1 = group.add_maintainer(user2)
member2 = nested_group.add_maintainer(user3)
member3 = project.add_maintainer(user4)
+ blocked_member = project.add_maintainer(blocked_user)
result = described_class.new(project, user2).execute
- expect(result).to contain_exactly(member1, member2, member3)
+ expect(result).to contain_exactly(member1, member2, member3, blocked_member)
+ end
+
+ it 'returns owners and maintainers' do
+ member1 = group.add_owner(user1)
+ group.add_developer(user2)
+ member3 = project.add_maintainer(user3)
+ project.add_developer(user4)
+
+ result = described_class.new(project, user2, params: { owners_and_maintainers: true }).execute
+
+ expect(result).to contain_exactly(member1, member3)
+ end
+
+ it 'returns active users and excludes invited users' do
+ member1 = project.add_maintainer(user2)
+ create(:project_member, :invited, project: project, invite_email: create(:user).email)
+ project.add_maintainer(blocked_user)
+
+ result = described_class.new(project, user2, params: { active_without_invites_and_requests: true }).execute
+
+ expect(result).to contain_exactly(member1)
end
it 'includes only non-invite members if user do not have amdin permissions on project' do
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index e3643698012..5b86c891e47 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -85,6 +85,31 @@ RSpec.describe MergeRequestsFinder do
expect(merge_requests).to contain_exactly(merge_request5)
end
+ context 'filters by merged_at date' do
+ before do
+ merge_request1.metrics.update!(merged_at: 5.days.ago)
+ merge_request2.metrics.update!(merged_at: 10.days.ago)
+ end
+
+ describe 'merged_after' do
+ subject { described_class.new(user, merged_after: 6.days.ago).execute }
+
+ it { is_expected.to eq([merge_request1]) }
+ end
+
+ describe 'merged_before' do
+ subject { described_class.new(user, merged_before: 6.days.ago).execute }
+
+ it { is_expected.to eq([merge_request2]) }
+ end
+
+ describe 'when both merged_after and merged_before is given' do
+ subject { described_class.new(user, merged_after: 15.days.ago, merged_before: 6.days.ago).execute }
+
+ it { is_expected.to eq([merge_request2]) }
+ end
+ end
+
context 'filtering by group' do
it 'includes all merge requests when user has access excluding merge requests from projects the user does not have access to' do
private_project = allow_gitaly_n_plus_1 { create(:project, :private, group: group) }
@@ -192,43 +217,59 @@ RSpec.describe MergeRequestsFinder do
expect(merge_requests).to contain_exactly(merge_request3)
end
- describe 'WIP state' do
+ describe 'draft state' do
let!(:wip_merge_request1) { create(:merge_request, :simple, author: user, source_project: project5, target_project: project5, title: 'WIP: thing') }
let!(:wip_merge_request2) { create(:merge_request, :simple, author: user, source_project: project6, target_project: project6, title: 'wip thing') }
let!(:wip_merge_request3) { create(:merge_request, :simple, author: user, source_project: project1, target_project: project1, title: '[wip] thing') }
let!(:wip_merge_request4) { create(:merge_request, :simple, author: user, source_project: project1, target_project: project2, title: 'wip: thing') }
+ let!(:draft_merge_request1) { create(:merge_request, :simple, author: user, source_branch: 'draft1', source_project: project5, target_project: project5, title: 'Draft: thing') }
+ let!(:draft_merge_request2) { create(:merge_request, :simple, author: user, source_branch: 'draft2', source_project: project6, target_project: project6, title: '[draft] thing') }
+ let!(:draft_merge_request3) { create(:merge_request, :simple, author: user, source_branch: 'draft3', source_project: project1, target_project: project1, title: '(draft) thing') }
+ let!(:draft_merge_request4) { create(:merge_request, :simple, author: user, source_branch: 'draft4', source_project: project1, target_project: project2, title: 'Draft - thing') }
- it 'filters by wip' do
- params = { wip: 'yes' }
+ [:wip, :draft].each do |draft_param_key|
+ it "filters by #{draft_param_key}" do
+ params = { draft_param_key => 'yes' }
- merge_requests = described_class.new(user, params).execute
+ merge_requests = described_class.new(user, params).execute
- expect(merge_requests).to contain_exactly(merge_request4, merge_request5, wip_merge_request1, wip_merge_request2, wip_merge_request3, wip_merge_request4)
- end
+ expect(merge_requests).to contain_exactly(
+ merge_request4, merge_request5, wip_merge_request1, wip_merge_request2, wip_merge_request3, wip_merge_request4,
+ draft_merge_request1, draft_merge_request2, draft_merge_request3, draft_merge_request4
+ )
+ end
- it 'filters by not wip' do
- params = { wip: 'no' }
+ context 'when merge_request_draft_filter is disabled' do
+ it 'does not include draft merge requests' do
+ stub_feature_flags(merge_request_draft_filter: false)
- merge_requests = described_class.new(user, params).execute
+ merge_requests = described_class.new(user, { draft_param_key => 'yes' }).execute
- expect(merge_requests).to contain_exactly(merge_request1, merge_request2, merge_request3)
- end
+ expect(merge_requests).to contain_exactly(
+ merge_request4, merge_request5, wip_merge_request1, wip_merge_request2, wip_merge_request3, wip_merge_request4
+ )
+ end
+ end
- it 'returns all items if no valid wip param exists' do
- params = { wip: '' }
+ it "filters by not #{draft_param_key}" do
+ params = { draft_param_key => 'no' }
- merge_requests = described_class.new(user, params).execute
+ merge_requests = described_class.new(user, params).execute
- expect(merge_requests).to contain_exactly(
- merge_request1, merge_request2, merge_request3, merge_request4,
- merge_request5, wip_merge_request1, wip_merge_request2, wip_merge_request3,
- wip_merge_request4)
- end
+ expect(merge_requests).to contain_exactly(merge_request1, merge_request2, merge_request3)
+ end
- it 'adds wip to scalar params' do
- scalar_params = described_class.scalar_params
+ it "returns all items if no valid #{draft_param_key} param exists" do
+ params = { draft_param_key => '' }
+
+ merge_requests = described_class.new(user, params).execute
- expect(scalar_params).to include(:wip, :assignee_id)
+ expect(merge_requests).to contain_exactly(
+ merge_request1, merge_request2, merge_request3, merge_request4,
+ merge_request5, wip_merge_request1, wip_merge_request2, wip_merge_request3, wip_merge_request4,
+ draft_merge_request1, draft_merge_request2, draft_merge_request3, draft_merge_request4
+ )
+ end
end
context 'filter by deployment' do
@@ -265,6 +306,14 @@ RSpec.describe MergeRequestsFinder do
end
end
+ describe '.scalar_params' do
+ it 'contains scalar params related to merge requests' do
+ scalar_params = described_class.scalar_params
+
+ expect(scalar_params).to include(:wip, :draft, :assignee_id)
+ end
+ end
+
context 'assignee filtering' do
let(:issuables) { described_class.new(user, params).execute }
@@ -311,9 +360,8 @@ RSpec.describe MergeRequestsFinder do
let(:group_milestone) { create(:milestone, group: group) }
before do
- project2.update(namespace: group)
- merge_request2.update(milestone: group_milestone)
- merge_request3.update(milestone: group_milestone)
+ merge_request1.update!(milestone: group_milestone)
+ merge_request2.update!(milestone: group_milestone)
end
it 'returns merge requests assigned to that group milestone' do
@@ -321,7 +369,7 @@ RSpec.describe MergeRequestsFinder do
merge_requests = described_class.new(user, params).execute
- expect(merge_requests).to contain_exactly(merge_request2, merge_request3)
+ expect(merge_requests).to contain_exactly(merge_request1, merge_request2)
end
context 'using NOT' do
@@ -330,7 +378,7 @@ RSpec.describe MergeRequestsFinder do
it 'returns MRs not assigned to that group milestone' do
merge_requests = described_class.new(user, params).execute
- expect(merge_requests).to contain_exactly(merge_request1, merge_request4, merge_request5)
+ expect(merge_requests).to contain_exactly(merge_request3, merge_request4, merge_request5)
end
end
end
diff --git a/spec/finders/milestones_finder_spec.rb b/spec/finders/milestones_finder_spec.rb
index 5920c185c64..6e486671132 100644
--- a/spec/finders/milestones_finder_spec.rb
+++ b/spec/finders/milestones_finder_spec.rb
@@ -56,6 +56,14 @@ RSpec.describe MilestonesFinder do
milestone_3.close
end
+ it 'filters by id' do
+ params[:ids] = [milestone_1.id, milestone_2.id]
+
+ result = described_class.new(params).execute
+
+ expect(result).to contain_exactly(milestone_1, milestone_2)
+ end
+
it 'filters by active state' do
params[:state] = 'active'
result = described_class.new(params).execute
diff --git a/spec/finders/personal_access_tokens_finder_spec.rb b/spec/finders/personal_access_tokens_finder_spec.rb
index 94954f4153b..c8913329839 100644
--- a/spec/finders/personal_access_tokens_finder_spec.rb
+++ b/spec/finders/personal_access_tokens_finder_spec.rb
@@ -3,13 +3,14 @@
require 'spec_helper'
RSpec.describe PersonalAccessTokensFinder do
- def finder(options = {})
- described_class.new(options)
+ def finder(options = {}, current_user = nil)
+ described_class.new(options, current_user)
end
describe '#execute' do
let(:user) { create(:user) }
let(:params) { {} }
+ let(:current_user) { nil }
let!(:active_personal_access_token) { create(:personal_access_token, user: user) }
let!(:expired_personal_access_token) { create(:personal_access_token, :expired, user: user) }
let!(:revoked_personal_access_token) { create(:personal_access_token, :revoked, user: user) }
@@ -17,7 +18,42 @@ RSpec.describe PersonalAccessTokensFinder do
let!(:expired_impersonation_token) { create(:personal_access_token, :expired, :impersonation, user: user) }
let!(:revoked_impersonation_token) { create(:personal_access_token, :revoked, :impersonation, user: user) }
- subject { finder(params).execute }
+ subject { finder(params, current_user).execute }
+
+ context 'when current_user is defined' do
+ let(:current_user) { create(:admin) }
+ let(:params) { { user: user } }
+
+ context 'current_user is allowed to read PATs' do
+ it do
+ is_expected.to contain_exactly(active_personal_access_token, active_impersonation_token,
+ revoked_personal_access_token, expired_personal_access_token,
+ revoked_impersonation_token, expired_impersonation_token)
+ end
+ end
+
+ context 'current_user is not allowed to read PATs' do
+ let(:current_user) { create(:user) }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'when user param is not set' do
+ let(:params) { {} }
+
+ it do
+ is_expected.to contain_exactly(active_personal_access_token, active_impersonation_token,
+ revoked_personal_access_token, expired_personal_access_token,
+ revoked_impersonation_token, expired_impersonation_token)
+ end
+
+ context 'when current_user is not an administrator' do
+ let(:current_user) { create(:user) }
+
+ it { is_expected.to be_empty }
+ end
+ end
+ end
describe 'without user' do
it do
diff --git a/spec/finders/personal_projects_finder_spec.rb b/spec/finders/personal_projects_finder_spec.rb
index 62e9999fdd6..493ec0e569e 100644
--- a/spec/finders/personal_projects_finder_spec.rb
+++ b/spec/finders/personal_projects_finder_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe PersonalProjectsFinder do
context 'external' do
before do
- current_user.update(external: true)
+ current_user.update!(external: true)
end
it { is_expected.to eq([public_project, private_project]) }
diff --git a/spec/finders/projects_finder_spec.rb b/spec/finders/projects_finder_spec.rb
index bd71a8186ad..29b6dc61386 100644
--- a/spec/finders/projects_finder_spec.rb
+++ b/spec/finders/projects_finder_spec.rb
@@ -244,8 +244,8 @@ RSpec.describe ProjectsFinder, :do_not_mock_admin_mode do
let(:params) { { last_activity_after: 60.minutes.ago } }
before do
- internal_project.update(last_activity_at: Time.now)
- public_project.update(last_activity_at: 61.minutes.ago)
+ internal_project.update!(last_activity_at: Time.now)
+ public_project.update!(last_activity_at: 61.minutes.ago)
end
it { is_expected.to match_array([internal_project]) }
@@ -255,8 +255,8 @@ RSpec.describe ProjectsFinder, :do_not_mock_admin_mode do
let(:params) { { last_activity_before: 60.minutes.ago } }
before do
- internal_project.update(last_activity_at: Time.now)
- public_project.update(last_activity_at: 61.minutes.ago)
+ internal_project.update!(last_activity_at: Time.now)
+ public_project.update!(last_activity_at: 61.minutes.ago)
end
it { is_expected.to match_array([public_project]) }
diff --git a/spec/finders/releases_finder_spec.rb b/spec/finders/releases_finder_spec.rb
index 3dc01570d64..e8049a9eb81 100644
--- a/spec/finders/releases_finder_spec.rb
+++ b/spec/finders/releases_finder_spec.rb
@@ -3,29 +3,68 @@
require 'spec_helper'
RSpec.describe ReleasesFinder do
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
- let(:params) { {} }
+ let(:user) { create(:user) }
+ let(:group) { create :group }
+ let(:project) { create(:project, :repository, group: group) }
+ let(:params) { {} }
+ let(:args) { {} }
let(:repository) { project.repository }
let(:v1_0_0) { create(:release, project: project, tag: 'v1.0.0') }
let(:v1_1_0) { create(:release, project: project, tag: 'v1.1.0') }
- let(:finder) { described_class.new(project, user, params) }
before do
v1_0_0.update_attribute(:released_at, 2.days.ago)
v1_1_0.update_attribute(:released_at, 1.day.ago)
end
- describe '#execute' do
- subject { finder.execute(**args) }
+ shared_examples_for 'when the user is not part of the project' do
+ it 'returns no releases' do
+ is_expected.to be_empty
+ end
+ end
+
+ # See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/27716
+ shared_examples_for 'when tag is nil' do
+ before do
+ v1_0_0.update_column(:tag, nil)
+ end
+
+ it 'ignores rows with a nil tag' do
+ expect(subject.size).to eq(1)
+ expect(subject).to eq([v1_1_0])
+ end
+ end
+
+ shared_examples_for 'when a tag parameter is passed' do
+ let(:params) { { tag: 'v1.0.0' } }
+
+ it 'only returns the release with the matching tag' do
+ expect(subject).to eq([v1_0_0])
+ end
+ end
+
+ shared_examples_for 'preload' do
+ it 'preloads associations' do
+ expect(Release).to receive(:preloaded).once.and_call_original
+
+ subject
+ end
+
+ context 'when preload is false' do
+ let(:args) { { preload: false } }
- let(:args) { {} }
+ it 'does not preload associations' do
+ expect(Release).not_to receive(:preloaded)
- context 'when the user is not part of the project' do
- it 'returns no releases' do
- is_expected.to be_empty
+ subject
end
end
+ end
+
+ describe 'when parent is a project' do
+ subject { described_class.new(project, user, params).execute(**args) }
+
+ it_behaves_like 'when the user is not part of the project'
context 'when the user is a project developer' do
before do
@@ -38,39 +77,137 @@ RSpec.describe ReleasesFinder do
expect(subject).to eq([v1_1_0, v1_0_0])
end
- it 'preloads associations' do
- expect(Release).to receive(:preloaded).once.and_call_original
+ it_behaves_like 'preload'
+ it_behaves_like 'when tag is nil'
+ it_behaves_like 'when a tag parameter is passed'
+ end
+ end
- subject
- end
+ describe 'when parent is a group' do
+ context 'without subgroups' do
+ let(:project2) { create(:project, :repository, namespace: group) }
+ let!(:v6) { create(:release, project: project2, tag: 'v6') }
- context 'when preload is false' do
- let(:args) { { preload: false } }
+ subject { described_class.new(group, user, params).execute(**args) }
- it 'does not preload associations' do
- expect(Release).not_to receive(:preloaded)
+ it_behaves_like 'when the user is not part of the project'
+
+ context 'when the user is a project developer on one sibling project' do
+ before do
+ project.add_developer(user)
+ v1_0_0.update_attribute(:released_at, 3.days.ago)
+ v1_1_0.update_attribute(:released_at, 1.day.ago)
+ end
- subject
+ it 'sorts by release date' do
+ expect(subject.size).to eq(2)
+ expect(subject).to eq([v1_1_0, v1_0_0])
end
end
- # See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/27716
- context 'when tag is nil' do
+ context 'when the user is a project developer on all projects' do
before do
- v1_0_0.update_column(:tag, nil)
+ project.add_developer(user)
+ project2.add_developer(user)
+ v1_0_0.update_attribute(:released_at, 3.days.ago)
+ v6.update_attribute(:released_at, 2.days.ago)
+ v1_1_0.update_attribute(:released_at, 1.day.ago)
end
- it 'ignores rows with a nil tag' do
- expect(subject.size).to eq(1)
- expect(subject).to eq([v1_1_0])
+ it 'sorts by release date' do
+ expect(subject.size).to eq(3)
+ expect(subject).to eq([v1_1_0, v6, v1_0_0])
+ end
+
+ it_behaves_like 'when a tag parameter is passed'
+ end
+ end
+
+ describe 'with subgroups' do
+ let(:params) { { include_subgroups: true } }
+
+ subject { described_class.new(group, user, params).execute(**args) }
+
+ context 'with a single-level subgroup' do
+ let(:subgroup) { create :group, parent: group }
+ let(:project2) { create(:project, :repository, namespace: subgroup) }
+ let!(:v6) { create(:release, project: project2, tag: 'v6') }
+
+ it_behaves_like 'when the user is not part of the project'
+
+ context 'when the user a project developer in the subgroup project' do
+ before do
+ project2.add_developer(user)
+ end
+
+ it 'returns only the subgroup releases' do
+ expect(subject).to match_array([v6])
+ end
+ end
+
+ context 'when the user a project developer in both projects' do
+ before do
+ project.add_developer(user)
+ project2.add_developer(user)
+ v6.update_attribute(:released_at, 2.days.ago)
+ end
+
+ it 'returns all releases' do
+ expect(subject).to match_array([v1_1_0, v1_0_0, v6])
+ end
+
+ it_behaves_like 'when a tag parameter is passed'
end
end
- context 'when a tag parameter is passed' do
- let(:params) { { tag: 'v1.0.0' } }
+ context 'with a multi-level subgroup' do
+ let(:subgroup) { create :group, parent: group }
+ let(:subsubgroup) { create :group, parent: subgroup }
+ let(:project2) { create(:project, :repository, namespace: subgroup) }
+ let(:project3) { create(:project, :repository, namespace: subsubgroup) }
+ let!(:v6) { create(:release, project: project2, tag: 'v6') }
+ let!(:p3) { create(:release, project: project3, tag: 'p3') }
+
+ before do
+ v6.update_attribute(:released_at, 2.days.ago)
+ p3.update_attribute(:released_at, 3.days.ago)
+ end
+
+ it_behaves_like 'when the user is not part of the project'
+
+ context 'when the user a project developer in the subgroup and subsubgroup project' do
+ before do
+ project2.add_developer(user)
+ project3.add_developer(user)
+ end
+
+ it 'returns only the subgroup and subsubgroup releases' do
+ expect(subject).to match_array([v6, p3])
+ end
+ end
+
+ context 'when the user a project developer in the subsubgroup project' do
+ before do
+ project3.add_developer(user)
+ end
+
+ it 'returns only the subsubgroup releases' do
+ expect(subject).to match_array([p3])
+ end
+ end
+
+ context 'when the user a project developer in all projects' do
+ before do
+ project.add_developer(user)
+ project2.add_developer(user)
+ project3.add_developer(user)
+ end
+
+ it 'returns all releases' do
+ expect(subject).to match_array([v1_1_0, v6, v1_0_0, p3])
+ end
- it 'only returns the release with the matching tag' do
- expect(subject).to eq([v1_0_0])
+ it_behaves_like 'when a tag parameter is passed'
end
end
end
diff --git a/spec/finders/template_finder_spec.rb b/spec/finders/template_finder_spec.rb
index 34f81e249e2..0fdd6ab402d 100644
--- a/spec/finders/template_finder_spec.rb
+++ b/spec/finders/template_finder_spec.rb
@@ -12,7 +12,8 @@ RSpec.describe TemplateFinder do
:dockerfiles | described_class
:gitignores | described_class
:gitlab_ci_ymls | described_class
- :licenses | ::LicenseTemplateFinder
+ :licenses | ::LicenseTemplateFinder
+ :metrics_dashboard_ymls | described_class
end
with_them do
@@ -28,6 +29,7 @@ RSpec.describe TemplateFinder do
:dockerfiles | 'Binary'
:gitignores | 'Actionscript'
:gitlab_ci_ymls | 'Android'
+ :metrics_dashboard_ymls | 'Default'
end
with_them do
diff --git a/spec/finders/todos_finder_spec.rb b/spec/finders/todos_finder_spec.rb
index f6796398782..577ad80ede1 100644
--- a/spec/finders/todos_finder_spec.rb
+++ b/spec/finders/todos_finder_spec.rb
@@ -4,14 +4,14 @@ require 'spec_helper'
RSpec.describe TodosFinder do
describe '#execute' do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:project) { create(:project, namespace: group) }
- let(:issue) { create(:issue, project: project) }
- let(:merge_request) { create(:merge_request, source_project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :repository, namespace: group) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
let(:finder) { described_class }
- before do
+ before_all do
group.add_developer(user)
end
@@ -89,8 +89,6 @@ RSpec.describe TodosFinder do
end
it 'raises an argument error when invalid type is passed' do
- create(:todo, user: user, group: group, target: create(:design))
-
todos_finder = finder.new(user, { type: %w[Issue MergeRequest NotAValidType] })
expect { todos_finder.execute }.to raise_error(ArgumentError)
@@ -131,8 +129,8 @@ RSpec.describe TodosFinder do
end
context 'when filtering by author' do
- let(:author1) { create(:user) }
- let(:author2) { create(:user) }
+ let_it_be(:author1) { create(:user) }
+ let_it_be(:author2) { create(:user) }
let!(:todo1) { create(:todo, user: user, author: author1) }
let!(:todo2) { create(:todo, user: user, author: author2) }
@@ -154,7 +152,7 @@ RSpec.describe TodosFinder do
context 'by groups' do
context 'with subgroups' do
- let(:subgroup) { create(:group, parent: group) }
+ let_it_be(:subgroup) { create(:group, parent: group) }
let!(:todo3) { create(:todo, user: user, group: subgroup, target: issue) }
it 'returns todos from subgroups when filtered by a group' do
@@ -167,17 +165,14 @@ RSpec.describe TodosFinder do
context 'filtering for multiple groups' do
let_it_be(:group2) { create(:group) }
let_it_be(:group3) { create(:group) }
+ let_it_be(:subgroup1) { create(:group, parent: group) }
+ let_it_be(:subgroup2) { create(:group, parent: group2) }
let!(:todo1) { create(:todo, user: user, project: project, target: issue) }
let!(:todo2) { create(:todo, user: user, group: group, target: merge_request) }
let!(:todo3) { create(:todo, user: user, group: group2, target: merge_request) }
-
- let(:subgroup1) { create(:group, parent: group) }
let!(:todo4) { create(:todo, user: user, group: subgroup1, target: issue) }
-
- let(:subgroup2) { create(:group, parent: group2) }
let!(:todo5) { create(:todo, user: user, group: subgroup2, target: issue) }
-
let!(:todo6) { create(:todo, user: user, group: group3, target: issue) }
it 'returns the expected groups' do
@@ -232,6 +227,29 @@ RSpec.describe TodosFinder do
expect(todos).to match_array([todo2, todo1])
end
end
+
+ context 'when filtering by target id' do
+ it 'returns the expected todos for the target' do
+ todos = finder.new(user, { type: 'Issue', target_id: issue.id }).execute
+
+ expect(todos).to match_array([todo1])
+ end
+
+ it 'returns the expected todos for multiple target ids' do
+ another_issue = create(:issue, project: project)
+ todo3 = create(:todo, user: user, project: project, target: another_issue)
+
+ todos = finder.new(user, { type: 'Issue', target_id: [issue.id, another_issue.id] }).execute
+
+ expect(todos).to match_array([todo1, todo3])
+ end
+
+ it 'returns the expected todos for empty target id collection' do
+ todos = finder.new(user, { target_id: [] }).execute
+
+ expect(todos).to match_array([todo1, todo2])
+ end
+ end
end
context 'external authorization' do
@@ -307,9 +325,9 @@ RSpec.describe TodosFinder do
it 'returns the expected types' do
expected_result =
if Gitlab.ee?
- %w[Epic Issue MergeRequest DesignManagement::Design]
+ %w[Epic Issue MergeRequest DesignManagement::Design AlertManagement::Alert]
else
- %w[Issue MergeRequest DesignManagement::Design]
+ %w[Issue MergeRequest DesignManagement::Design AlertManagement::Alert]
end
expect(described_class.todo_types).to contain_exactly(*expected_result)
diff --git a/spec/finders/uploader_finder_spec.rb b/spec/finders/uploader_finder_spec.rb
index 814d4b88b57..e1488f18d8a 100644
--- a/spec/finders/uploader_finder_spec.rb
+++ b/spec/finders/uploader_finder_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe UploaderFinder do
subject { described_class.new(project, secret, file_name).execute }
before do
- upload.save
+ upload.save!
end
context 'when successful' do
@@ -32,7 +32,7 @@ RSpec.describe UploaderFinder do
context 'when path traversal in file name' do
before do
upload.path = '/uploads/11111111111111111111111111111111/../../../../../../../../../../../../../../etc/passwd)'
- upload.save
+ upload.save!
end
it 'returns nil' do
diff --git a/spec/fixtures/api/schemas/environment.json b/spec/fixtures/api/schemas/environment.json
index f42d701834a..d1274bea817 100644
--- a/spec/fixtures/api/schemas/environment.json
+++ b/spec/fixtures/api/schemas/environment.json
@@ -33,6 +33,7 @@
"updated_at": { "type": "string", "format": "date-time" },
"auto_stop_at": { "type": "string", "format": "date-time" },
"can_stop": { "type": "boolean" },
+ "has_opened_alert": { "type": "boolean" },
"cluster_type": { "type": "types/nullable_string.json" },
"terminal_path": { "type": "types/nullable_string.json" },
"last_deployment": {
diff --git a/spec/fixtures/api/schemas/job/artifact.json b/spec/fixtures/api/schemas/job/artifact.json
index 1812e69fbd6..f4a69e66141 100644
--- a/spec/fixtures/api/schemas/job/artifact.json
+++ b/spec/fixtures/api/schemas/job/artifact.json
@@ -5,7 +5,8 @@
"browse_path": { "type": "string"},
"keep_path": { "type": "string"},
"expired": { "type": "boolean" },
- "expire_at": { "type": "string", "format": "date-time" }
+ "expire_at": { "type": "string", "format": "date-time" },
+ "locked": { "type": "boolean" }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/api/schemas/release/link.json b/spec/fixtures/api/schemas/release/link.json
index b3aebfa131e..669f0a39343 100644
--- a/spec/fixtures/api/schemas/release/link.json
+++ b/spec/fixtures/api/schemas/release/link.json
@@ -4,7 +4,6 @@
"properties": {
"id": { "type": "integer" },
"name": { "type": "string" },
- "filepath": { "type": "string" },
"url": { "type": "string" },
"direct_asset_url": { "type": "string" },
"external": { "type": "boolean" },
diff --git a/spec/fixtures/gitlab/database/structure_example.sql b/spec/fixtures/gitlab/database/structure_example.sql
index 06db67b725a..1ad78adea53 100644
--- a/spec/fixtures/gitlab/database/structure_example.sql
+++ b/spec/fixtures/gitlab/database/structure_example.sql
@@ -77,19 +77,3 @@ ALTER TABLE ONLY public.abuse_reports
CREATE INDEX index_abuse_reports_on_user_id ON public.abuse_reports USING btree (user_id);
-
-
-INSERT INTO "schema_migrations" (version) VALUES
-('20200305121159'),
-('20200306095654'),
-('20200306160521'),
-('20200306170211'),
-('20200306170321'),
-('20200306170531'),
-('20200309140540'),
-('20200309195209'),
-('20200309195710'),
-('20200310132654'),
-('20200310135823');
-
-
diff --git a/spec/fixtures/gitlab/database/structure_example_cleaned.sql b/spec/fixtures/gitlab/database/structure_example_cleaned.sql
index 5618fb694a0..42eed974e64 100644
--- a/spec/fixtures/gitlab/database/structure_example_cleaned.sql
+++ b/spec/fixtures/gitlab/database/structure_example_cleaned.sql
@@ -27,16 +27,6 @@ ALTER TABLE ONLY public.abuse_reports
CREATE INDEX index_abuse_reports_on_user_id ON public.abuse_reports USING btree (user_id);
-INSERT INTO "schema_migrations" (version) VALUES
-('20200305121159'),
-('20200306095654'),
-('20200306160521'),
-('20200306170211'),
-('20200306170321'),
-('20200306170531'),
-('20200309140540'),
-('20200309195209'),
-('20200309195710'),
-('20200310132654'),
-('20200310135823');
-
+-- schema_migrations.version information is no longer stored in this file,
+-- but instead tracked in the db/schema_migrations directory
+-- see https://gitlab.com/gitlab-org/gitlab/-/issues/218590 for details
diff --git a/spec/fixtures/helm/helm_list_v2_empty_blob.json.gz b/spec/fixtures/helm/helm_list_v2_empty_blob.json.gz
new file mode 100644
index 00000000000..5647f052c3b
--- /dev/null
+++ b/spec/fixtures/helm/helm_list_v2_empty_blob.json.gz
Binary files differ
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/dashboard_groups_missing_panels_and_group.yml b/spec/fixtures/lib/gitlab/metrics/dashboard/dashboard_groups_missing_panels_and_group.yml
new file mode 100644
index 00000000000..746a90f266e
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/dashboard_groups_missing_panels_and_group.yml
@@ -0,0 +1,33 @@
+dashboard: 'Test Dashboard'
+panel_groups:
+- panels:
+ - title: "Super Chart A1"
+ type: "area-chart"
+ y_label: "y_label"
+ weight: 1
+ max_value: 1
+ metrics:
+ - id: metric_a1
+ query_range: |+
+ avg(
+ sum(
+ container_memory_usage_bytes{
+ container_name!="POD",
+ pod_name=~"^{{ci_environment_slug}}-(.*)",
+ namespace="{{kube_namespace}}"
+ }
+ ) by (job)
+ ) without (job)
+ /1024/1024/1024
+ unit: unit
+ label: Legend Label
+ - title: "Super Chart A2"
+ type: "area-chart"
+ y_label: "y_label"
+ weight: 2
+ metrics:
+ - id: metric_a2
+ query_range: 'query'
+ label: Legend Label
+ unit: unit
+- group: Group B
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/dashboard_is_an_array.yml b/spec/fixtures/lib/gitlab/metrics/dashboard/dashboard_is_an_array.yml
new file mode 100644
index 00000000000..7627592553e
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/dashboard_is_an_array.yml
@@ -0,0 +1,15 @@
+---
+- dashboard: 'Test Dashboard'
+ panel_groups:
+ - group: Group A
+ panels:
+ - title: "Super Chart A2"
+ type: "area-chart"
+ y_label: "y_label"
+ weight: 2
+ metrics:
+ - id: metric_a2
+ query_range: 'query'
+ label: Legend Label
+ unit: unit
+- dashboard: 'second entry'
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/dashboard_missing_panel_groups.yml b/spec/fixtures/lib/gitlab/metrics/dashboard/dashboard_missing_panel_groups.yml
new file mode 100644
index 00000000000..6f9e22c3212
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/dashboard_missing_panel_groups.yml
@@ -0,0 +1,32 @@
+dashboard: 'Test Dashboard'
+priority: 1
+links:
+ - title: Link 1
+ url: https://gitlab.com
+ - title: Link 2
+ url: https://docs.gitlab.com
+templating:
+ variables:
+ text_variable_full_syntax:
+ label: 'Variable 1'
+ type: text
+ options:
+ default_value: 'default'
+ text_variable_simple_syntax: 'default value'
+ custom_variable_simple_syntax: ['value1', 'value2', 'value3']
+ custom_variable_full_syntax:
+ label: 'Variable 2'
+ type: custom
+ options:
+ values:
+ - value: 'value option 1'
+ text: 'Option 1'
+ - value: 'value_option_2'
+ text: 'Option 2'
+ default: true
+ metric_label_values_variable:
+ label: 'Variable 3'
+ type: metric_label_values
+ options:
+ series_selector: 'backend:haproxy_backend_availability:ratio{env="{{env}}"}'
+ label: 'backend'
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/dashboard_panel_is_missing_metrics.yml b/spec/fixtures/lib/gitlab/metrics/dashboard/dashboard_panel_is_missing_metrics.yml
new file mode 100644
index 00000000000..8f12365dca2
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/dashboard_panel_is_missing_metrics.yml
@@ -0,0 +1,15 @@
+dashboard: 'Test Dashboard'
+panel_groups:
+- group: Group A
+ priority: 1
+ panels:
+ - title: "Super Chart A1"
+ type: "area-chart"
+ y_label: "y_label"
+ weight: 1
+ max_value: 1
+ - title: "Super Chart A2"
+ type: "area-chart"
+ y_label: "y_label"
+ weight: 2
+ metrics:
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/dashboard_panle_groups_wrong_content_type.yml b/spec/fixtures/lib/gitlab/metrics/dashboard/dashboard_panle_groups_wrong_content_type.yml
new file mode 100644
index 00000000000..104107fa96e
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/dashboard_panle_groups_wrong_content_type.yml
@@ -0,0 +1,33 @@
+dashboard: 'Test Dashboard'
+priority: 1
+links:
+- title: Link 1
+ url: https://gitlab.com
+- title: Link 2
+ url: https://docs.gitlab.com
+templating:
+ variables:
+ text_variable_full_syntax:
+ label: 'Variable 1'
+ type: text
+ options:
+ default_value: 'default'
+ text_variable_simple_syntax: 'default value'
+ custom_variable_simple_syntax: ['value1', 'value2', 'value3']
+ custom_variable_full_syntax:
+ label: 'Variable 2'
+ type: custom
+ options:
+ values:
+ - value: 'value option 1'
+ text: 'Option 1'
+ - value: 'value_option_2'
+ text: 'Option 2'
+ default: true
+ metric_label_values_variable:
+ label: 'Variable 3'
+ type: metric_label_values
+ options:
+ series_selector: 'backend:haproxy_backend_availability:ratio{env="{{env}}"}'
+ label: 'backend'
+panel_groups: this should be an array
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/duplicate_id_dashboard.yml b/spec/fixtures/lib/gitlab/metrics/dashboard/duplicate_id_dashboard.yml
new file mode 100644
index 00000000000..09a87703bfa
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/duplicate_id_dashboard.yml
@@ -0,0 +1,67 @@
+dashboard: 'Test Dashboard'
+priority: 1
+links:
+- title: Link 1
+ url: https://gitlab.com
+- title: Link 2
+ url: https://docs.gitlab.com
+templating:
+ variables:
+ text_variable_full_syntax:
+ label: 'Variable 1'
+ type: text
+ options:
+ default_value: 'default'
+ text_variable_simple_syntax: 'default value'
+ custom_variable_simple_syntax: ['value1', 'value2', 'value3']
+ custom_variable_full_syntax:
+ label: 'Variable 2'
+ type: custom
+ options:
+ values:
+ - value: 'value option 1'
+ text: 'Option 1'
+ - value: 'value_option_2'
+ text: 'Option 2'
+ default: true
+ metric_label_values_variable:
+ label: 'Variable 3'
+ type: metric_label_values
+ options:
+ series_selector: 'backend:haproxy_backend_availability:ratio{env="{{env}}"}'
+ label: 'backend'
+panel_groups:
+- group: Group A
+ priority: 1
+ panels:
+ - title: "Super Chart A1"
+ type: "area-chart"
+ y_label: "y_label"
+ weight: 1
+ max_value: 1
+ metrics:
+ - id: metric_a1
+ query_range: 'query'
+ unit: unit
+ label: Legend Label
+ - title: "Super Chart A2"
+ type: "area-chart"
+ y_label: "y_label"
+ weight: 2
+ metrics:
+ - id: metric_a1
+ query_range: 'query'
+ label: Legend Label
+ unit: unit
+- group: Group B
+ priority: 10
+ panels:
+ - title: "Super Chart B"
+ type: "area-chart"
+ y_label: "y_label"
+ weight: 1
+ metrics:
+ - id: metric_a1
+ query_range: 'query'
+ unit: unit
+ label: Legend Label
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/invalid_dashboard.yml b/spec/fixtures/lib/gitlab/metrics/dashboard/invalid_dashboard.yml
new file mode 100644
index 00000000000..312053d2770
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/invalid_dashboard.yml
@@ -0,0 +1,67 @@
+dashboard: 'Test Dashboard'
+priority: 1
+links:
+- title: Link 1
+ url: https://gitlab.com
+- title: Link 2
+ url: https://docs.gitlab.com
+templating:
+ variables:
+ text_variable_full_syntax:
+ label: 'Variable 1'
+ type: text
+ options:
+ default_value: 'default'
+ text_variable_simple_syntax: 'default value'
+ custom_variable_simple_syntax: ['value1', 'value2', 'value3']
+ custom_variable_full_syntax:
+ label: 'Variable 2'
+ type: custom
+ options:
+ values:
+ - value: 'value option 1'
+ text: 'Option 1'
+ - value: 'value_option_2'
+ text: 'Option 2'
+ default: true
+ metric_label_values_variable:
+ label: 'Variable 3'
+ type: metric_label_values
+ options:
+ series_selector: 'backend:haproxy_backend_availability:ratio{env="{{env}}"}'
+ label: 'backend'
+panel_groups:
+- group: Group A
+ priority: 1
+ panels:
+ - title: "Super Chart A1"
+ type: "area-chart"
+ y_label: "y_label"
+ weight: this_should_be_a_int
+ max_value: 1
+ metrics:
+ - id: metric_a1
+ query_range: 'query'
+ unit: unit
+ label: Legend Label
+ - title: "Super Chart A2"
+ type: "area-chart"
+ y_label: "y_label"
+ weight: 2
+ metrics:
+ - id: metric_a2
+ query_range: 'query'
+ label: Legend Label
+ unit: unit
+- group: Group B
+ priority: 10
+ panels:
+ - title: "Super Chart B"
+ type: "area-chart"
+ y_label: "y_label"
+ weight: 1
+ metrics:
+ - id: metric_b
+ query_range: 'query'
+ unit: unit
+ label: Legend Label
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/sample_dashboard.yml b/spec/fixtures/lib/gitlab/metrics/dashboard/sample_dashboard.yml
index 1e41ef669d1..17b9552763a 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/sample_dashboard.yml
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/sample_dashboard.yml
@@ -31,37 +31,42 @@ templating:
series_selector: 'backend:haproxy_backend_availability:ratio{env="{{env}}"}'
label: 'backend'
panel_groups:
-- group: Group A
- priority: 1
+- group: Group B
panels:
- - title: "Super Chart A1"
+ - title: "Super Chart B"
type: "area-chart"
y_label: "y_label"
- weight: 1
- max_value: 1
metrics:
- - id: metric_a1
+ - id: metric_b
query_range: 'query'
unit: unit
label: Legend Label
+- group: Group A
+ panels:
- title: "Super Chart A2"
type: "area-chart"
y_label: "y_label"
- weight: 2
metrics:
- id: metric_a2
- query_range: 'query'
+ query_range: 2000
label: Legend Label
unit: unit
-- group: Group B
- priority: 10
- panels:
- - title: "Super Chart B"
+ - title: "Super Chart A1"
type: "area-chart"
y_label: "y_label"
- weight: 1
+ max_value: 1
metrics:
- - id: metric_b
- query_range: 'query'
+ - id: metric_a1
+ query_range: |+
+ avg(
+ sum(
+ container_memory_usage_bytes{
+ container_name!="POD",
+ pod_name=~"^{{ci_environment_slug}}-(.*)",
+ namespace="{{kube_namespace}}"
+ }
+ ) by (job)
+ ) without (job)
+ /1024/1024/1024
unit: unit
label: Legend Label
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json
index 038f5ac5d4e..b23b0ea15d2 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/metrics.json
@@ -10,8 +10,8 @@
],
"properties": {
"id": { "type": "string" },
- "query_range": { "type": "string" },
- "query": { "type": "string" },
+ "query_range": { "type": ["string", "number"] },
+ "query": { "type": ["string", "number"] },
"unit": { "type": "string" },
"label": { "type": "string" },
"track": { "type": "string" },
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panel_groups.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panel_groups.json
index d16fcd40359..71c0981d9ec 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panel_groups.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panel_groups.json
@@ -6,7 +6,6 @@
],
"properties": {
"group": { "type": "string" },
- "priority": { "type": "number" },
"panels": {
"type": "array",
"items": { "$ref": "panels.json" }
diff --git a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json
index 20595cc0d73..b4809a85101 100644
--- a/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json
+++ b/spec/fixtures/lib/gitlab/metrics/dashboard/schemas/panels.json
@@ -12,7 +12,6 @@
"y_label": { "type": "string" },
"y_axis": { "$ref": "axis.json" },
"max_value": { "type": "number" },
- "weight": { "type": "number" },
"metrics": {
"type": "array",
"items": { "$ref": "metrics.json" }
diff --git a/spec/fixtures/pipeline_artifacts/code_coverage.json b/spec/fixtures/pipeline_artifacts/code_coverage.json
new file mode 100644
index 00000000000..e9c2cc0d1bf
--- /dev/null
+++ b/spec/fixtures/pipeline_artifacts/code_coverage.json
@@ -0,0 +1,9 @@
+{
+ "files": {
+ "demo.rb": {
+ "1": 1,
+ "2": 1,
+ "3": 1
+ }
+ }
+}
diff --git a/spec/fixtures/potential_html.po b/spec/fixtures/potential_html.po
new file mode 100644
index 00000000000..83ea2c13cef
--- /dev/null
+++ b/spec/fixtures/potential_html.po
@@ -0,0 +1,28 @@
+# Spanish translations for gitlab package.
+# Copyright (C) 2017 THE PACKAGE'S COPYRIGHT HOLDER
+# This file is distributed under the same license as the gitlab package.
+# FIRST AUTHOR <EMAIL@ADDRESS>, 2017.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: gitlab 1.0.0\n"
+"Report-Msgid-Bugs-To: \n"
+"PO-Revision-Date: 2017-07-13 12:10-0500\n"
+"Language-Team: Spanish\n"
+"Language: es\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Plural-Forms: nplurals=2; plural=n != 1;\n"
+"Last-Translator: Translator <test@example.com>\n"
+"X-Generator: Poedit 2.0.2\n"
+
+msgid "String with some <strong>emphasis</strong>"
+msgid_plural "String with lots of <strong>emphasis</strong>"
+msgstr[0] "Translated string with some <strong>emphasis</strong>"
+msgstr[1] "Translated string with lots of <strong>emphasis</strong>"
+
+msgid "String with a legitimate < use"
+msgid_plural "String with lots of < > uses"
+msgstr[0] "Translated string with a legitimate < use"
+msgstr[1] "Translated string with lots of < > uses"
diff --git a/spec/fixtures/product_analytics/event.json b/spec/fixtures/product_analytics/event.json
index 3100b068a0c..1a4db860be2 100644
--- a/spec/fixtures/product_analytics/event.json
+++ b/spec/fixtures/product_analytics/event.json
@@ -12,5 +12,10 @@
"lang":"en-US",
"cookie":"1",
"tz":"America/Los_Angeles",
- "cs":"UTF-8"
+ "cs":"UTF-8",
+ "se_ca": "category",
+ "se_ac": "action",
+ "se_la": "label",
+ "se_pr": "property",
+ "se_va": 12.34
}
diff --git a/spec/fixtures/valid.po b/spec/fixtures/valid.po
index bb2dfa419bb..f1d62296709 100644
--- a/spec/fixtures/valid.po
+++ b/spec/fixtures/valid.po
@@ -73,9 +73,6 @@ msgid_plural "Branches"
msgstr[0] "Rama"
msgstr[1] "Ramas"
-msgid "Branch <strong>%{branch_name}</strong> was created. To set up auto deploy, choose a GitLab CI Yaml template and commit your changes. %{link_to_autodeploy_doc}"
-msgstr "La rama <strong>%{branch_name}</strong> fue creada. Para configurar el auto despliegue, escoge una plantilla Yaml para GitLab CI y envía tus cambios. %{link_to_autodeploy_doc}"
-
msgid "BranchSwitcherPlaceholder|Search branches"
msgstr "Buscar ramas"
diff --git a/spec/frontend/__mocks__/@toast-ui/vue-editor/index.js b/spec/frontend/__mocks__/@toast-ui/vue-editor/index.js
index 726ed0fa030..9fee8e18d26 100644
--- a/spec/frontend/__mocks__/@toast-ui/vue-editor/index.js
+++ b/spec/frontend/__mocks__/@toast-ui/vue-editor/index.js
@@ -17,6 +17,17 @@ export const Editor = {
type: String,
},
},
+ created() {
+ const mockEditorApi = {
+ eventManager: {
+ addEventType: jest.fn(),
+ listen: jest.fn(),
+ removeEventHandler: jest.fn(),
+ },
+ };
+
+ this.$emit('load', mockEditorApi);
+ },
render(h) {
return h('div');
},
diff --git a/spec/frontend/__mocks__/monaco-editor/index.js b/spec/frontend/__mocks__/monaco-editor/index.js
index b9602d69b74..18b7df32f9b 100644
--- a/spec/frontend/__mocks__/monaco-editor/index.js
+++ b/spec/frontend/__mocks__/monaco-editor/index.js
@@ -8,11 +8,11 @@ import 'monaco-editor/esm/vs/language/css/monaco.contribution';
import 'monaco-editor/esm/vs/language/json/monaco.contribution';
import 'monaco-editor/esm/vs/language/html/monaco.contribution';
import 'monaco-editor/esm/vs/basic-languages/monaco.contribution';
-import 'monaco-yaml/esm/monaco.contribution';
+import 'monaco-yaml/lib/esm/monaco.contribution';
// This language starts trying to spin up web workers which obviously breaks in Jest environment
jest.mock('monaco-editor/esm/vs/language/typescript/tsMode');
-jest.mock('monaco-yaml/esm/yamlMode');
+jest.mock('monaco-yaml/lib/esm/yamlMode');
export * from 'monaco-editor/esm/vs/editor/editor.api';
export default global.monaco;
diff --git a/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap b/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap
new file mode 100644
index 00000000000..5fad0d07f97
--- /dev/null
+++ b/spec/frontend/add_context_commits_modal/components/__snapshots__/add_context_commits_modal_spec.js.snap
@@ -0,0 +1,50 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`AddContextCommitsModal renders modal with 2 tabs 1`] = `
+<gl-modal-stub
+ body-class="add-review-item pt-0"
+ cancel-variant="light"
+ modalclass=""
+ modalid="add-review-item"
+ ok-disabled="true"
+ ok-title="Save changes"
+ scrollable="true"
+ size="md"
+ title="Add or remove previously merged commits"
+ titletag="h4"
+>
+ <gl-tabs-stub
+ contentclass="pt-0"
+ theme="indigo"
+ value="0"
+ >
+ <gl-tab-stub>
+
+ <div
+ class="mt-2"
+ >
+ <gl-search-box-by-type-stub
+ clearbuttontitle="Clear"
+ placeholder="Search by commit title or SHA"
+ value=""
+ />
+
+ <review-tab-container-stub
+ commits=""
+ emptylisttext="Your search didn't match any commits. Try a different query."
+ loadingfailedtext="Unable to load commits. Try again later."
+ />
+ </div>
+ </gl-tab-stub>
+
+ <gl-tab-stub>
+
+ <review-tab-container-stub
+ commits=""
+ emptylisttext="Commits you select appear here. Go to the first tab and select commits to add to this merge request."
+ loadingfailedtext="Unable to load commits. Try again later."
+ />
+ </gl-tab-stub>
+ </gl-tabs-stub>
+</gl-modal-stub>
+`;
diff --git a/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js b/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js
new file mode 100644
index 00000000000..6904e34db5d
--- /dev/null
+++ b/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js
@@ -0,0 +1,174 @@
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
+import { GlModal, GlSearchBoxByType } from '@gitlab/ui';
+import AddReviewItemsModal from '~/add_context_commits_modal/components/add_context_commits_modal_wrapper.vue';
+import getDiffWithCommit from '../../diffs/mock_data/diff_with_commit';
+
+import defaultState from '~/add_context_commits_modal/store/state';
+import mutations from '~/add_context_commits_modal/store/mutations';
+import * as actions from '~/add_context_commits_modal/store/actions';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('AddContextCommitsModal', () => {
+ let wrapper;
+ let store;
+ const createContextCommits = jest.fn();
+ const removeContextCommits = jest.fn();
+ const resetModalState = jest.fn();
+ const searchCommits = jest.fn();
+ const { commit } = getDiffWithCommit();
+
+ const createWrapper = (props = {}) => {
+ store = new Vuex.Store({
+ mutations,
+ state: {
+ ...defaultState(),
+ },
+ actions: {
+ ...actions,
+ searchCommits,
+ createContextCommits,
+ removeContextCommits,
+ resetModalState,
+ },
+ });
+
+ wrapper = shallowMount(AddReviewItemsModal, {
+ localVue,
+ store,
+ propsData: {
+ contextCommitsPath: '',
+ targetBranch: 'master',
+ mergeRequestIid: 1,
+ projectId: 1,
+ ...props,
+ },
+ });
+ return wrapper;
+ };
+
+ const findModal = () => wrapper.find(GlModal);
+ const findSearch = () => wrapper.find(GlSearchBoxByType);
+
+ beforeEach(() => {
+ wrapper = createWrapper();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders modal with 2 tabs', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('an ok button labeled "Save changes"', () => {
+ expect(findModal().attributes('ok-title')).toEqual('Save changes');
+ });
+
+ describe('when in first tab, renders a modal with', () => {
+ it('renders the search box component', () => {
+ expect(findSearch().exists()).toBe(true);
+ });
+
+ it('when user starts entering text in search box, it calls action "searchCommits" after waiting for 500s', () => {
+ const searchText = 'abcd';
+ findSearch().vm.$emit('input', searchText);
+ expect(searchCommits).not.toBeCalled();
+ jest.advanceTimersByTime(500);
+ expect(searchCommits).toHaveBeenCalledWith(expect.anything(), searchText, undefined);
+ });
+
+ it('disabled ok button when no row is selected', () => {
+ expect(findModal().attributes('ok-disabled')).toBe('true');
+ });
+
+ it('enabled ok button when atleast one row is selected', () => {
+ wrapper.vm.$store.state.selectedCommits = [{ ...commit, isSelected: true }];
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findModal().attributes('ok-disabled')).toBeFalsy();
+ });
+ });
+ });
+
+ describe('when in second tab, renders a modal with', () => {
+ beforeEach(() => {
+ wrapper.vm.$store.state.tabIndex = 1;
+ });
+ it('a disabled ok button when no row is selected', () => {
+ expect(findModal().attributes('ok-disabled')).toBe('true');
+ });
+
+ it('an enabled ok button when atleast one row is selected', () => {
+ wrapper.vm.$store.state.selectedCommits = [{ ...commit, isSelected: true }];
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findModal().attributes('ok-disabled')).toBeFalsy();
+ });
+ });
+
+ it('a disabled ok button in first tab, when row is selected in second tab', () => {
+ createWrapper({ selectedContextCommits: [commit] });
+ expect(wrapper.find(GlModal).attributes('ok-disabled')).toBe('true');
+ });
+ });
+
+ describe('has an ok button when clicked calls action', () => {
+ it('"createContextCommits" when only new commits to be added ', () => {
+ wrapper.vm.$store.state.selectedCommits = [{ ...commit, isSelected: true }];
+ findModal().vm.$emit('ok');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(createContextCommits).toHaveBeenCalledWith(
+ expect.anything(),
+ { commits: [{ ...commit, isSelected: true }], forceReload: true },
+ undefined,
+ );
+ });
+ });
+ it('"removeContextCommits" when only added commits are to be removed ', () => {
+ wrapper.vm.$store.state.toRemoveCommits = [commit.short_id];
+ findModal().vm.$emit('ok');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(removeContextCommits).toHaveBeenCalledWith(expect.anything(), true, undefined);
+ });
+ });
+ it('"createContextCommits" and "removeContextCommits" when new commits are to be added and old commits are to be removed', () => {
+ wrapper.vm.$store.state.selectedCommits = [{ ...commit, isSelected: true }];
+ wrapper.vm.$store.state.toRemoveCommits = [commit.short_id];
+ findModal().vm.$emit('ok');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(createContextCommits).toHaveBeenCalledWith(
+ expect.anything(),
+ { commits: [{ ...commit, isSelected: true }] },
+ undefined,
+ );
+ expect(removeContextCommits).toHaveBeenCalledWith(expect.anything(), undefined, undefined);
+ });
+ });
+ });
+
+ describe('has a cancel button when clicked', () => {
+ it('does not call "createContextCommits" or "removeContextCommits"', () => {
+ findModal().vm.$emit('cancel');
+ expect(createContextCommits).not.toHaveBeenCalled();
+ expect(removeContextCommits).not.toHaveBeenCalled();
+ });
+ it('"resetModalState" to reset all the modal state', () => {
+ findModal().vm.$emit('cancel');
+ expect(resetModalState).toHaveBeenCalledWith(expect.anything(), undefined, undefined);
+ });
+ });
+
+ describe('when model is closed by clicking the "X" button or by pressing "ESC" key', () => {
+ it('does not call "createContextCommits" or "removeContextCommits"', () => {
+ findModal().vm.$emit('close');
+ expect(createContextCommits).not.toHaveBeenCalled();
+ expect(removeContextCommits).not.toHaveBeenCalled();
+ });
+ it('"resetModalState" to reset all the modal state', () => {
+ findModal().vm.$emit('close');
+ expect(resetModalState).toHaveBeenCalledWith(expect.anything(), undefined, undefined);
+ });
+ });
+});
diff --git a/spec/frontend/add_context_commits_modal/components/review_tab_container_spec.js b/spec/frontend/add_context_commits_modal/components/review_tab_container_spec.js
new file mode 100644
index 00000000000..4e65713a680
--- /dev/null
+++ b/spec/frontend/add_context_commits_modal/components/review_tab_container_spec.js
@@ -0,0 +1,51 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLoadingIcon } from '@gitlab/ui';
+import ReviewTabContainer from '~/add_context_commits_modal/components/review_tab_container.vue';
+import CommitItem from '~/diffs/components/commit_item.vue';
+import getDiffWithCommit from '../../diffs/mock_data/diff_with_commit';
+
+describe('ReviewTabContainer', () => {
+ let wrapper;
+ const { commit } = getDiffWithCommit();
+
+ const createWrapper = (props = {}) => {
+ wrapper = shallowMount(ReviewTabContainer, {
+ propsData: {
+ tab: 'commits',
+ isLoading: false,
+ loadingError: false,
+ loadingFailedText: 'Failed to load commits',
+ commits: [],
+ selectedRow: [],
+ ...props,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('shows loading icon when commits are being loaded', () => {
+ createWrapper({ isLoading: true });
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ });
+
+ it('shows loading error text when API call fails', () => {
+ createWrapper({ loadingError: true });
+ expect(wrapper.text()).toContain('Failed to load commits');
+ });
+
+ it('shows "No commits present here" when commits are not present', () => {
+ expect(wrapper.text()).toContain('No commits present here');
+ });
+
+ it('renders all passed commits as list', () => {
+ createWrapper({ commits: [commit] });
+ expect(wrapper.findAll(CommitItem).length).toBe(1);
+ });
+});
diff --git a/spec/frontend/add_context_commits_modal/store/actions_spec.js b/spec/frontend/add_context_commits_modal/store/actions_spec.js
new file mode 100644
index 00000000000..24948dd6073
--- /dev/null
+++ b/spec/frontend/add_context_commits_modal/store/actions_spec.js
@@ -0,0 +1,239 @@
+import MockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'helpers/test_constants';
+import axios from '~/lib/utils/axios_utils';
+import {
+ setBaseConfig,
+ setTabIndex,
+ setCommits,
+ createContextCommits,
+ fetchContextCommits,
+ setContextCommits,
+ removeContextCommits,
+ setSelectedCommits,
+ setSearchText,
+ setToRemoveCommits,
+ resetModalState,
+} from '~/add_context_commits_modal/store/actions';
+import * as types from '~/add_context_commits_modal/store/mutation_types';
+import testAction from '../../helpers/vuex_action_helper';
+
+describe('AddContextCommitsModalStoreActions', () => {
+ const contextCommitEndpoint =
+ '/api/v4/projects/gitlab-org%2fgitlab/merge_requests/1/context_commits';
+ const mergeRequestIid = 1;
+ const projectId = 1;
+ const projectPath = 'gitlab-org/gitlab';
+ const contextCommitsPath = `${TEST_HOST}/gitlab-org/gitlab/-/merge_requests/1/context_commits.json`;
+ const dummyCommit = {
+ id: 1,
+ title: 'dummy commit',
+ short_id: 'abcdef',
+ committed_date: '2020-06-12',
+ };
+ gon.api_version = 'v4';
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('setBaseConfig', () => {
+ it('commits SET_BASE_CONFIG', done => {
+ const options = { contextCommitsPath, mergeRequestIid, projectId };
+ testAction(
+ setBaseConfig,
+ options,
+ {
+ contextCommitsPath: '',
+ mergeRequestIid,
+ projectId,
+ },
+ [
+ {
+ type: types.SET_BASE_CONFIG,
+ payload: options,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setTabIndex', () => {
+ it('commits SET_TABINDEX', done => {
+ testAction(
+ setTabIndex,
+ { tabIndex: 1 },
+ { tabIndex: 0 },
+ [{ type: types.SET_TABINDEX, payload: { tabIndex: 1 } }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setCommits', () => {
+ it('commits SET_COMMITS', done => {
+ testAction(
+ setCommits,
+ { commits: [], silentAddition: false },
+ { isLoadingCommits: false, commits: [] },
+ [{ type: types.SET_COMMITS, payload: [] }],
+ [],
+ done,
+ );
+ });
+
+ it('commits SET_COMMITS_SILENT', done => {
+ testAction(
+ setCommits,
+ { commits: [], silentAddition: true },
+ { isLoadingCommits: true, commits: [] },
+ [{ type: types.SET_COMMITS_SILENT, payload: [] }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('createContextCommits', () => {
+ it('calls API to create context commits', done => {
+ mock.onPost(contextCommitEndpoint).reply(200, {});
+
+ testAction(createContextCommits, { commits: [] }, {}, [], [], done);
+
+ createContextCommits(
+ { state: { projectId, mergeRequestIid }, commit: () => null },
+ { commits: [] },
+ )
+ .then(() => {
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('fetchContextCommits', () => {
+ beforeEach(() => {
+ mock
+ .onGet(
+ `/api/${gon.api_version}/projects/gitlab-org%2Fgitlab/merge_requests/1/context_commits`,
+ )
+ .reply(200, [dummyCommit]);
+ });
+ it('commits FETCH_CONTEXT_COMMITS', done => {
+ const contextCommit = { ...dummyCommit, isSelected: true };
+ testAction(
+ fetchContextCommits,
+ null,
+ {
+ mergeRequestIid,
+ projectId: projectPath,
+ isLoadingContextCommits: false,
+ contextCommitsLoadingError: false,
+ commits: [],
+ },
+ [{ type: types.FETCH_CONTEXT_COMMITS }],
+ [
+ { type: 'setContextCommits', payload: [contextCommit] },
+ { type: 'setCommits', payload: { commits: [contextCommit], silentAddition: true } },
+ { type: 'setSelectedCommits', payload: [contextCommit] },
+ ],
+ done,
+ );
+ });
+ });
+
+ describe('setContextCommits', () => {
+ it('commits SET_CONTEXT_COMMITS', done => {
+ testAction(
+ setContextCommits,
+ { data: [] },
+ { contextCommits: [], isLoadingContextCommits: false },
+ [{ type: types.SET_CONTEXT_COMMITS, payload: { data: [] } }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('removeContextCommits', () => {
+ beforeEach(() => {
+ mock
+ .onDelete('/api/v4/projects/gitlab-org%2Fgitlab/merge_requests/1/context_commits')
+ .reply(204);
+ });
+ it('calls API to remove context commits', done => {
+ testAction(
+ removeContextCommits,
+ { forceReload: false },
+ { mergeRequestIid, projectId, toRemoveCommits: [] },
+ [],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setSelectedCommits', () => {
+ it('commits SET_SELECTED_COMMITS', done => {
+ testAction(
+ setSelectedCommits,
+ [dummyCommit],
+ { selectedCommits: [] },
+ [{ type: types.SET_SELECTED_COMMITS, payload: [dummyCommit] }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setSearchText', () => {
+ it('commits SET_SEARCH_TEXT', done => {
+ const searchText = 'Dummy Text';
+ testAction(
+ setSearchText,
+ searchText,
+ { searchText: '' },
+ [{ type: types.SET_SEARCH_TEXT, payload: searchText }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setToRemoveCommits', () => {
+ it('commits SET_TO_REMOVE_COMMITS', done => {
+ const commitId = 'abcde';
+
+ testAction(
+ setToRemoveCommits,
+ [commitId],
+ { toRemoveCommits: [] },
+ [{ type: types.SET_TO_REMOVE_COMMITS, payload: [commitId] }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('resetModalState', () => {
+ it('commits RESET_MODAL_STATE', done => {
+ const commitId = 'abcde';
+
+ testAction(
+ resetModalState,
+ null,
+ { toRemoveCommits: [commitId] },
+ [{ type: types.RESET_MODAL_STATE }],
+ [],
+ done,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/add_context_commits_modal/store/mutations_spec.js b/spec/frontend/add_context_commits_modal/store/mutations_spec.js
new file mode 100644
index 00000000000..22f82570ab1
--- /dev/null
+++ b/spec/frontend/add_context_commits_modal/store/mutations_spec.js
@@ -0,0 +1,156 @@
+import { TEST_HOST } from 'helpers/test_constants';
+import mutations from '~/add_context_commits_modal/store/mutations';
+import * as types from '~/add_context_commits_modal/store/mutation_types';
+import getDiffWithCommit from '../../diffs/mock_data/diff_with_commit';
+
+describe('AddContextCommitsModalStoreMutations', () => {
+ const { commit } = getDiffWithCommit();
+ describe('SET_BASE_CONFIG', () => {
+ it('should set contextCommitsPath, mergeRequestIid and projectId', () => {
+ const state = {};
+ const contextCommitsPath = `${TEST_HOST}/gitlab-org/gitlab/-/merge_requests/1/context_commits.json`;
+ const mergeRequestIid = 1;
+ const projectId = 1;
+
+ mutations[types.SET_BASE_CONFIG](state, { contextCommitsPath, mergeRequestIid, projectId });
+
+ expect(state.contextCommitsPath).toEqual(contextCommitsPath);
+ expect(state.mergeRequestIid).toEqual(mergeRequestIid);
+ expect(state.projectId).toEqual(projectId);
+ });
+ });
+
+ describe('SET_TABINDEX', () => {
+ it('sets tabIndex to specific index', () => {
+ const state = { tabIndex: 0 };
+
+ mutations[types.SET_TABINDEX](state, 1);
+
+ expect(state.tabIndex).toBe(1);
+ });
+ });
+
+ describe('FETCH_COMMITS', () => {
+ it('sets isLoadingCommits to true', () => {
+ const state = { isLoadingCommits: false };
+
+ mutations[types.FETCH_COMMITS](state);
+
+ expect(state.isLoadingCommits).toBe(true);
+ });
+ });
+
+ describe('SET_COMMITS', () => {
+ it('sets commits to passed data and stop loading', () => {
+ const state = { commits: [], isLoadingCommits: true };
+
+ mutations[types.SET_COMMITS](state, [commit]);
+
+ expect(state.commits).toStrictEqual([commit]);
+ expect(state.isLoadingCommits).toBe(false);
+ });
+ });
+
+ describe('SET_COMMITS_SILENT', () => {
+ it('sets commits to passed data and loading continues', () => {
+ const state = { commits: [], isLoadingCommits: true };
+
+ mutations[types.SET_COMMITS_SILENT](state, [commit]);
+
+ expect(state.commits).toStrictEqual([commit]);
+ expect(state.isLoadingCommits).toBe(true);
+ });
+ });
+
+ describe('FETCH_COMMITS_ERROR', () => {
+ it('sets commitsLoadingError to true', () => {
+ const state = { commitsLoadingError: false };
+
+ mutations[types.FETCH_COMMITS_ERROR](state);
+
+ expect(state.commitsLoadingError).toBe(true);
+ });
+ });
+
+ describe('FETCH_CONTEXT_COMMITS', () => {
+ it('sets isLoadingContextCommits to true', () => {
+ const state = { isLoadingContextCommits: false };
+
+ mutations[types.FETCH_CONTEXT_COMMITS](state);
+
+ expect(state.isLoadingContextCommits).toBe(true);
+ });
+ });
+
+ describe('SET_CONTEXT_COMMITS', () => {
+ it('sets contextCommit to passed data and stop loading', () => {
+ const state = { contextCommits: [], isLoadingContextCommits: true };
+
+ mutations[types.SET_CONTEXT_COMMITS](state, [commit]);
+
+ expect(state.contextCommits).toStrictEqual([commit]);
+ expect(state.isLoadingContextCommits).toBe(false);
+ });
+ });
+
+ describe('FETCH_CONTEXT_COMMITS_ERROR', () => {
+ it('sets contextCommitsLoadingError to true', () => {
+ const state = { contextCommitsLoadingError: false };
+
+ mutations[types.FETCH_CONTEXT_COMMITS_ERROR](state);
+
+ expect(state.contextCommitsLoadingError).toBe(true);
+ });
+ });
+
+ describe('SET_SELECTED_COMMITS', () => {
+ it('sets selectedCommits to specified value', () => {
+ const state = { selectedCommits: [] };
+
+ mutations[types.SET_SELECTED_COMMITS](state, [commit]);
+
+ expect(state.selectedCommits).toStrictEqual([commit]);
+ });
+ });
+
+ describe('SET_SEARCH_TEXT', () => {
+ it('sets searchText to specified value', () => {
+ const searchText = 'Test';
+ const state = { searchText: '' };
+
+ mutations[types.SET_SEARCH_TEXT](state, searchText);
+
+ expect(state.searchText).toBe(searchText);
+ });
+ });
+
+ describe('SET_TO_REMOVE_COMMITS', () => {
+ it('sets searchText to specified value', () => {
+ const state = { toRemoveCommits: [] };
+
+ mutations[types.SET_TO_REMOVE_COMMITS](state, [commit.short_id]);
+
+ expect(state.toRemoveCommits).toStrictEqual([commit.short_id]);
+ });
+ });
+
+ describe('RESET_MODAL_STATE', () => {
+ it('sets searchText to specified value', () => {
+ const state = {
+ commits: [commit],
+ contextCommits: [commit],
+ selectedCommits: [commit],
+ toRemoveCommits: [commit.short_id],
+ searchText: 'Test',
+ };
+
+ mutations[types.RESET_MODAL_STATE](state);
+
+ expect(state.commits).toStrictEqual([]);
+ expect(state.contextCommits).toStrictEqual([]);
+ expect(state.selectedCommits).toStrictEqual([]);
+ expect(state.toRemoveCommits).toStrictEqual([]);
+ expect(state.searchText).toBe('');
+ });
+ });
+});
diff --git a/spec/frontend/alert_management/components/alert_management_detail_spec.js b/spec/frontend/alert_management/components/alert_details_spec.js
index daa730d3b9f..2c4ed100a56 100644
--- a/spec/frontend/alert_management/components/alert_management_detail_spec.js
+++ b/spec/frontend/alert_management/components/alert_details_spec.js
@@ -20,6 +20,7 @@ describe('AlertDetails', () => {
const projectPath = 'root/alerts';
const projectIssuesPath = 'root/alerts/-/issues';
const projectId = '1';
+ const $router = { replace: jest.fn() };
const findDetailsTable = () => wrapper.find(GlTable);
@@ -44,6 +45,8 @@ describe('AlertDetails', () => {
sidebarStatus: {},
},
},
+ $router,
+ $route: { params: {} },
},
stubs,
});
@@ -60,9 +63,9 @@ describe('AlertDetails', () => {
mock.restore();
});
- const findCreateIssueBtn = () => wrapper.find('[data-testid="createIssueBtn"]');
- const findViewIssueBtn = () => wrapper.find('[data-testid="viewIssueBtn"]');
- const findIssueCreationAlert = () => wrapper.find('[data-testid="issueCreationError"]');
+ const findCreateIncidentBtn = () => wrapper.find('[data-testid="createIncidentBtn"]');
+ const findViewIncidentBtn = () => wrapper.find('[data-testid="viewIncidentBtn"]');
+ const findIncidentCreationAlert = () => wrapper.find('[data-testid="incidentCreationError"]');
describe('Alert details', () => {
describe('when alert is null', () => {
@@ -81,11 +84,11 @@ describe('AlertDetails', () => {
});
it('renders a tab with overview information', () => {
- expect(wrapper.find('[data-testid="overviewTab"]').exists()).toBe(true);
+ expect(wrapper.find('[data-testid="overview"]').exists()).toBe(true);
});
it('renders a tab with full alert information', () => {
- expect(wrapper.find('[data-testid="fullDetailsTab"]').exists()).toBe(true);
+ expect(wrapper.find('[data-testid="fullDetails"]').exists()).toBe(true);
});
it('renders severity', () => {
@@ -115,6 +118,8 @@ describe('AlertDetails', () => {
${'monitoringTool'} | ${undefined} | ${false}
${'service'} | ${'Prometheus'} | ${true}
${'service'} | ${undefined} | ${false}
+ ${'runbook'} | ${undefined} | ${false}
+ ${'runbook'} | ${'run.com'} | ${true}
`(`$desc`, ({ field, data, isShown }) => {
beforeEach(() => {
mountComponent({ data: { alert: { ...mockAlert, [field]: data } } });
@@ -130,18 +135,20 @@ describe('AlertDetails', () => {
});
});
- describe('Create issue from alert', () => {
- it('should display "View issue" button that links the issue page when issue exists', () => {
+ describe('Create incident from alert', () => {
+ it('should display "View incident" button that links the incident page when incident exists', () => {
const issueIid = '3';
mountComponent({
data: { alert: { ...mockAlert, issueIid }, sidebarStatus: false },
});
- expect(findViewIssueBtn().exists()).toBe(true);
- expect(findViewIssueBtn().attributes('href')).toBe(joinPaths(projectIssuesPath, issueIid));
- expect(findCreateIssueBtn().exists()).toBe(false);
+ expect(findViewIncidentBtn().exists()).toBe(true);
+ expect(findViewIncidentBtn().attributes('href')).toBe(
+ joinPaths(projectIssuesPath, issueIid),
+ );
+ expect(findCreateIncidentBtn().exists()).toBe(false);
});
- it('should display "Create issue" button when issue doesn\'t exist yet', () => {
+ it('should display "Create incident" button when incident doesn\'t exist yet', () => {
const issueIid = null;
mountComponent({
mountMethod: mount,
@@ -149,8 +156,8 @@ describe('AlertDetails', () => {
});
return wrapper.vm.$nextTick().then(() => {
- expect(findViewIssueBtn().exists()).toBe(false);
- expect(findCreateIssueBtn().exists()).toBe(true);
+ expect(findViewIncidentBtn().exists()).toBe(false);
+ expect(findCreateIncidentBtn().exists()).toBe(true);
});
});
@@ -160,7 +167,7 @@ describe('AlertDetails', () => {
.spyOn(wrapper.vm.$apollo, 'mutate')
.mockResolvedValue({ data: { createAlertIssue: { issue: { iid: issueIid } } } });
- findCreateIssueBtn().trigger('click');
+ findCreateIncidentBtn().trigger('click');
expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
mutation: createIssueMutation,
variables: {
@@ -170,7 +177,7 @@ describe('AlertDetails', () => {
});
});
- it('shows error alert when issue creation fails ', () => {
+ it('shows error alert when incident creation fails ', () => {
const errorMsg = 'Something went wrong';
mountComponent({
mountMethod: mount,
@@ -178,10 +185,10 @@ describe('AlertDetails', () => {
});
jest.spyOn(wrapper.vm.$apollo, 'mutate').mockRejectedValue(errorMsg);
- findCreateIssueBtn().trigger('click');
+ findCreateIncidentBtn().trigger('click');
setImmediate(() => {
- expect(findIssueCreationAlert().text()).toBe(errorMsg);
+ expect(findIncidentCreationAlert().text()).toBe(errorMsg);
});
});
});
@@ -191,7 +198,7 @@ describe('AlertDetails', () => {
mountComponent({ data: { alert: mockAlert } });
});
it('should display a table of raw alert details data', () => {
- wrapper.find('[data-testid="fullDetailsTab"]').trigger('click');
+ wrapper.find('[data-testid="fullDetails"]').trigger('click');
expect(findDetailsTable().exists()).toBe(true);
});
});
@@ -252,6 +259,22 @@ describe('AlertDetails', () => {
);
});
});
+
+ describe('tab navigation', () => {
+ beforeEach(() => {
+ mountComponent({ data: { alert: mockAlert } });
+ });
+
+ it.each`
+ index | tabId
+ ${0} | ${'overview'}
+ ${1} | ${'fullDetails'}
+ ${2} | ${'metrics'}
+ `('will navigate to the correct tab via $tabId', ({ index, tabId }) => {
+ wrapper.setData({ currentTabIndex: index });
+ expect($router.replace).toHaveBeenCalledWith({ name: 'tab', params: { tabId } });
+ });
+ });
});
describe('Snowplow tracking', () => {
diff --git a/spec/frontend/alert_management/components/alert_management_empty_state_spec.js b/spec/frontend/alert_management/components/alert_management_empty_state_spec.js
index 0d1214211d3..6712282503d 100644
--- a/spec/frontend/alert_management/components/alert_management_empty_state_spec.js
+++ b/spec/frontend/alert_management/components/alert_management_empty_state_spec.js
@@ -15,6 +15,7 @@ describe('AlertManagementEmptyState', () => {
wrapper = shallowMount(AlertManagementEmptyState, {
propsData: {
enableAlertManagementPath: '/link',
+ alertsHelpUrl: '/link',
emptyAlertSvgPath: 'illustration/path',
...props,
},
diff --git a/spec/frontend/alert_management/components/alert_management_list_wrapper_spec.js b/spec/frontend/alert_management/components/alert_management_list_wrapper_spec.js
index 4644406c037..c36107c28ce 100644
--- a/spec/frontend/alert_management/components/alert_management_list_wrapper_spec.js
+++ b/spec/frontend/alert_management/components/alert_management_list_wrapper_spec.js
@@ -19,6 +19,7 @@ describe('AlertManagementList', () => {
propsData: {
projectPath: 'gitlab-org/gitlab',
enableAlertManagementPath: '/link',
+ alertsHelpUrl: '/link',
populatingAlertsHelpUrl: '/help/help-page.md#populating-alert-data',
emptyAlertSvgPath: 'illustration/path',
...props,
diff --git a/spec/frontend/alert_management/components/alert_management_sidebar_todo_spec.js b/spec/frontend/alert_management/components/alert_management_sidebar_todo_spec.js
index fe08cf2c10a..2814b5ce357 100644
--- a/spec/frontend/alert_management/components/alert_management_sidebar_todo_spec.js
+++ b/spec/frontend/alert_management/components/alert_management_sidebar_todo_spec.js
@@ -1,6 +1,6 @@
import { mount } from '@vue/test-utils';
import SidebarTodo from '~/alert_management/components/sidebar/sidebar_todo.vue';
-import AlertMarkTodo from '~/alert_management/graphql/mutations/alert_todo_create.graphql';
+import AlertMarkTodo from '~/alert_management/graphql/mutations/alert_todo_create.mutation.graphql';
import mockAlerts from '../mocks/alerts.json';
const mockAlert = mockAlerts[0];
@@ -34,6 +34,8 @@ describe('Alert Details Sidebar To Do', () => {
wrapper.destroy();
});
+ const findToDoButton = () => wrapper.find('[data-testid="alert-todo-button"]');
+
describe('updating the alert to do', () => {
const mockUpdatedMutationResult = {
data: {
@@ -44,25 +46,27 @@ describe('Alert Details Sidebar To Do', () => {
},
};
- beforeEach(() => {
- mountComponent({
- data: { alert: mockAlert },
- sidebarCollapsed: false,
- loading: false,
+ describe('adding a todo', () => {
+ beforeEach(() => {
+ mountComponent({
+ data: { alert: mockAlert },
+ sidebarCollapsed: false,
+ loading: false,
+ });
});
- });
- it('renders a button for adding a To Do', () => {
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find('[data-testid="alert-todo-button"]').text()).toBe('Add a To Do');
+ it('renders a button for adding a To-Do', async () => {
+ await wrapper.vm.$nextTick();
+
+ expect(findToDoButton().text()).toBe('Add a To-Do');
});
- });
- it('calls `$apollo.mutate` with `AlertMarkTodo` mutation and variables containing `iid`, `todoEvent`, & `projectPath`', () => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockUpdatedMutationResult);
+ it('calls `$apollo.mutate` with `AlertMarkTodo` mutation and variables containing `iid`, `todoEvent`, & `projectPath`', async () => {
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockUpdatedMutationResult);
+
+ findToDoButton().trigger('click');
+ await wrapper.vm.$nextTick();
- return wrapper.vm.$nextTick().then(() => {
- wrapper.find('button').trigger('click');
expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
mutation: AlertMarkTodo,
variables: {
@@ -72,5 +76,28 @@ describe('Alert Details Sidebar To Do', () => {
});
});
});
+ describe('removing a todo', () => {
+ beforeEach(() => {
+ mountComponent({
+ data: { alert: { ...mockAlert, todos: { nodes: [{ id: '1234' }] } } },
+ sidebarCollapsed: false,
+ loading: false,
+ });
+ });
+
+ it('renders a Mark As Done button when todo is present', async () => {
+ await wrapper.vm.$nextTick();
+
+ expect(findToDoButton().text()).toBe('Mark as done');
+ });
+
+ it('calls `$apollo.mutate` with `AlertMarkTodoDone` mutation and variables containing `id`', async () => {
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockUpdatedMutationResult);
+
+ findToDoButton().trigger('click');
+ await wrapper.vm.$nextTick();
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledTimes(1);
+ });
+ });
});
});
diff --git a/spec/frontend/alert_management/components/alert_management_table_spec.js b/spec/frontend/alert_management/components/alert_management_table_spec.js
index f316126432e..5dd0d9dc1ba 100644
--- a/spec/frontend/alert_management/components/alert_management_table_spec.js
+++ b/spec/frontend/alert_management/components/alert_management_table_spec.js
@@ -3,8 +3,8 @@ import {
GlTable,
GlAlert,
GlLoadingIcon,
- GlDropdown,
- GlDropdownItem,
+ GlDeprecatedDropdown,
+ GlDeprecatedDropdownItem,
GlIcon,
GlTabs,
GlTab,
@@ -12,6 +12,7 @@ import {
GlPagination,
GlSearchBoxByType,
} from '@gitlab/ui';
+import waitForPromises from 'helpers/wait_for_promises';
import { visitUrl } from '~/lib/utils/url_utility';
import TimeAgo from '~/vue_shared/components/time_ago_tooltip.vue';
import AlertManagementTable from '~/alert_management/components/alert_management_table.vue';
@@ -32,18 +33,19 @@ describe('AlertManagementTable', () => {
const findAlerts = () => wrapper.findAll('table tbody tr');
const findAlert = () => wrapper.find(GlAlert);
const findLoader = () => wrapper.find(GlLoadingIcon);
- const findStatusDropdown = () => wrapper.find(GlDropdown);
+ const findStatusDropdown = () => wrapper.find(GlDeprecatedDropdown);
const findStatusFilterTabs = () => wrapper.findAll(GlTab);
const findStatusTabs = () => wrapper.find(GlTabs);
const findStatusFilterBadge = () => wrapper.findAll(GlBadge);
const findDateFields = () => wrapper.findAll(TimeAgo);
- const findFirstStatusOption = () => findStatusDropdown().find(GlDropdownItem);
+ const findFirstStatusOption = () => findStatusDropdown().find(GlDeprecatedDropdownItem);
const findAssignees = () => wrapper.findAll('[data-testid="assigneesField"]');
const findSeverityFields = () => wrapper.findAll('[data-testid="severityField"]');
const findSeverityColumnHeader = () => wrapper.findAll('th').at(0);
const findPagination = () => wrapper.find(GlPagination);
const findSearch = () => wrapper.find(GlSearchBoxByType);
const findIssueFields = () => wrapper.findAll('[data-testid="issueField"]');
+ const findAlertError = () => wrapper.find('[data-testid="alert-error"]');
const alertsCount = {
open: 14,
triggered: 10,
@@ -51,6 +53,11 @@ describe('AlertManagementTable', () => {
resolved: 1,
all: 16,
};
+ const selectFirstStatusOption = () => {
+ findFirstStatusOption().vm.$emit('click');
+
+ return waitForPromises();
+ };
function mountComponent({
props = {
@@ -138,7 +145,7 @@ describe('AlertManagementTable', () => {
it('error state', () => {
mountComponent({
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { errors: ['error'] }, alertsCount: null, errored: true },
+ data: { alerts: { errors: ['error'] }, alertsCount: null, hasError: true },
loading: false,
});
expect(findAlertsTable().exists()).toBe(true);
@@ -155,7 +162,7 @@ describe('AlertManagementTable', () => {
it('empty state', () => {
mountComponent({
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: [], pageInfo: {} }, alertsCount: { all: 0 }, errored: false },
+ data: { alerts: { list: [], pageInfo: {} }, alertsCount: { all: 0 }, hasError: false },
loading: false,
});
expect(findAlertsTable().exists()).toBe(true);
@@ -172,7 +179,7 @@ describe('AlertManagementTable', () => {
it('has data state', () => {
mountComponent({
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
loading: false,
});
expect(findLoader().exists()).toBe(false);
@@ -188,7 +195,7 @@ describe('AlertManagementTable', () => {
it('displays status dropdown', () => {
mountComponent({
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
loading: false,
});
expect(findStatusDropdown().exists()).toBe(true);
@@ -197,7 +204,7 @@ describe('AlertManagementTable', () => {
it('does not display a dropdown status header', () => {
mountComponent({
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
loading: false,
});
expect(findStatusDropdown().contains('.dropdown-title')).toBe(false);
@@ -206,7 +213,7 @@ describe('AlertManagementTable', () => {
it('shows correct severity icons', () => {
mountComponent({
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
loading: false,
});
@@ -223,7 +230,7 @@ describe('AlertManagementTable', () => {
it('renders severity text', () => {
mountComponent({
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
loading: false,
});
@@ -237,7 +244,7 @@ describe('AlertManagementTable', () => {
it('renders Unassigned when no assignee(s) present', () => {
mountComponent({
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
loading: false,
});
@@ -251,7 +258,7 @@ describe('AlertManagementTable', () => {
it('renders username(s) when assignee(s) present', () => {
mountComponent({
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
loading: false,
});
@@ -265,7 +272,7 @@ describe('AlertManagementTable', () => {
it('navigates to the detail page when alert row is clicked', () => {
mountComponent({
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
loading: false,
});
@@ -279,7 +286,7 @@ describe('AlertManagementTable', () => {
beforeEach(() => {
mountComponent({
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
loading: false,
});
});
@@ -323,7 +330,7 @@ describe('AlertManagementTable', () => {
],
},
alertsCount,
- errored: false,
+ hasError: false,
},
loading: false,
});
@@ -343,7 +350,7 @@ describe('AlertManagementTable', () => {
},
],
alertsCount,
- errored: false,
+ hasError: false,
},
loading: false,
});
@@ -358,7 +365,7 @@ describe('AlertManagementTable', () => {
it('should highlight the row when alert is new', () => {
mountComponent({
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: [newAlert] }, alertsCount, errored: false },
+ data: { alerts: { list: [newAlert] }, alertsCount, hasError: false },
loading: false,
});
@@ -372,7 +379,7 @@ describe('AlertManagementTable', () => {
it('should not highlight the row when alert is not new', () => {
mountComponent({
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: [oldAlert] }, alertsCount, errored: false },
+ data: { alerts: { list: [oldAlert] }, alertsCount, hasError: false },
loading: false,
});
@@ -392,7 +399,7 @@ describe('AlertManagementTable', () => {
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
data: {
alerts: { list: mockAlerts },
- errored: false,
+ hasError: false,
sort: 'STARTED_AT_DESC',
alertsCount,
},
@@ -429,7 +436,7 @@ describe('AlertManagementTable', () => {
beforeEach(() => {
mountComponent({
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
loading: false,
});
});
@@ -448,19 +455,36 @@ describe('AlertManagementTable', () => {
});
});
- it('shows an error when request fails', () => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockReturnValue(Promise.reject(new Error()));
- findFirstStatusOption().vm.$emit('click');
- wrapper.setData({
- errored: true,
+ describe('when a request fails', () => {
+ beforeEach(() => {
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockReturnValue(Promise.reject(new Error()));
});
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.find('[data-testid="alert-error"]').exists()).toBe(true);
+ it('shows an error', async () => {
+ await selectFirstStatusOption();
+
+ expect(findAlertError().text()).toContain(
+ 'There was an error while updating the status of the alert.',
+ );
+ });
+
+ it('shows an error when triggered a second time', async () => {
+ await selectFirstStatusOption();
+
+ wrapper.find(GlAlert).vm.$emit('dismiss');
+
+ await wrapper.vm.$nextTick();
+
+ // Assert that the error has been dismissed in the setup
+ expect(findAlertError().exists()).toBe(false);
+
+ await selectFirstStatusOption();
+
+ expect(findAlertError().exists()).toBe(true);
});
});
- it('shows an error when response includes HTML errors', () => {
+ it('shows an error when response includes HTML errors', async () => {
const mockUpdatedMutationErrorResult = {
data: {
updateAlertStatus: {
@@ -474,13 +498,11 @@ describe('AlertManagementTable', () => {
};
jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockUpdatedMutationErrorResult);
- findFirstStatusOption().vm.$emit('click');
- wrapper.setData({ errored: true });
- return wrapper.vm.$nextTick(() => {
- expect(wrapper.contains('[data-testid="alert-error"]')).toBe(true);
- expect(wrapper.contains('[data-testid="htmlError"]')).toBe(true);
- });
+ await selectFirstStatusOption();
+
+ expect(findAlertError().exists()).toBe(true);
+ expect(findAlertError().contains('[data-testid="htmlError"]')).toBe(true);
});
});
@@ -510,7 +532,7 @@ describe('AlertManagementTable', () => {
beforeEach(() => {
mountComponent({
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts, pageInfo: {} }, alertsCount, errored: false },
+ data: { alerts: { list: mockAlerts, pageInfo: {} }, alertsCount, hasError: false },
loading: false,
});
});
@@ -570,7 +592,7 @@ describe('AlertManagementTable', () => {
beforeEach(() => {
mountComponent({
props: { alertManagementEnabled: true, userCanEnableAlertManagement: true },
- data: { alerts: { list: mockAlerts }, alertsCount, errored: false },
+ data: { alerts: { list: mockAlerts }, alertsCount, hasError: false },
loading: false,
});
});
diff --git a/spec/frontend/alert_management/components/alert_metrics_spec.js b/spec/frontend/alert_management/components/alert_metrics_spec.js
index c188363ddc2..e0a069fa1a8 100644
--- a/spec/frontend/alert_management/components/alert_metrics_spec.js
+++ b/spec/frontend/alert_management/components/alert_metrics_spec.js
@@ -1,8 +1,8 @@
import { shallowMount } from '@vue/test-utils';
import waitForPromises from 'helpers/wait_for_promises';
-import AlertMetrics from '~/alert_management/components/alert_metrics.vue';
import MockAdapter from 'axios-mock-adapter';
import axios from 'axios';
+import AlertMetrics from '~/alert_management/components/alert_metrics.vue';
jest.mock('~/monitoring/stores', () => ({
monitoringDashboard: {},
diff --git a/spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js b/spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js
index db086782424..a14596b6722 100644
--- a/spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js
+++ b/spec/frontend/alert_management/components/sidebar/alert_managment_sidebar_assignees_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
-import { GlDropdownItem } from '@gitlab/ui';
+import { GlDeprecatedDropdownItem } from '@gitlab/ui';
import SidebarAssignee from '~/alert_management/components/sidebar/sidebar_assignee.vue';
import SidebarAssignees from '~/alert_management/components/sidebar/sidebar_assignees.vue';
import AlertSetAssignees from '~/alert_management/graphql/mutations/alert_set_assignees.mutation.graphql';
@@ -103,7 +103,7 @@ describe('Alert Details Sidebar Assignees', () => {
it('renders a unassigned option', () => {
wrapper.setData({ isDropdownSearching: false });
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find(GlDropdownItem).text()).toBe('Unassigned');
+ expect(wrapper.find(GlDeprecatedDropdownItem).text()).toBe('Unassigned');
});
});
diff --git a/spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js b/spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js
index c2eaf540e9c..5bd0d3b3c17 100644
--- a/spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js
+++ b/spec/frontend/alert_management/components/sidebar/alert_sidebar_status_spec.js
@@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
-import { GlDropdown, GlDropdownItem, GlLoadingIcon } from '@gitlab/ui';
+import { GlDeprecatedDropdown, GlDeprecatedDropdownItem, GlLoadingIcon } from '@gitlab/ui';
import { trackAlertStatusUpdateOptions } from '~/alert_management/constants';
import AlertSidebarStatus from '~/alert_management/components/sidebar/sidebar_status.vue';
import updateAlertStatus from '~/alert_management/graphql/mutations/update_alert_status.mutation.graphql';
@@ -10,8 +10,8 @@ const mockAlert = mockAlerts[0];
describe('Alert Details Sidebar Status', () => {
let wrapper;
- const findStatusDropdown = () => wrapper.find(GlDropdown);
- const findStatusDropdownItem = () => wrapper.find(GlDropdownItem);
+ const findStatusDropdown = () => wrapper.find(GlDeprecatedDropdown);
+ const findStatusDropdownItem = () => wrapper.find(GlDeprecatedDropdownItem);
const findStatusLoadingIcon = () => wrapper.find(GlLoadingIcon);
function mountComponent({ data, sidebarCollapsed = true, loading = false, stubs = {} } = {}) {
diff --git a/spec/frontend/alert_management/mocks/alerts.json b/spec/frontend/alert_management/mocks/alerts.json
index f63019d1e5c..fec101a52b4 100644
--- a/spec/frontend/alert_management/mocks/alerts.json
+++ b/spec/frontend/alert_management/mocks/alerts.json
@@ -9,7 +9,8 @@
"endedAt": "2020-04-17T23:18:14.996Z",
"status": "TRIGGERED",
"assignees": { "nodes": [] },
- "notes": { "nodes": [] }
+ "notes": { "nodes": [] },
+ "todos": { "nodes": [] }
},
{
"iid": "1527543",
@@ -37,7 +38,8 @@
"systemNoteIconName": "user"
}
]
- }
+ },
+ "todos": { "nodes": [] }
},
{
"iid": "1527544",
@@ -63,6 +65,7 @@
}
}
]
- }
+ },
+ "todos": { "nodes": [] }
}
]
diff --git a/spec/frontend/alert_settings/__snapshots__/alert_settings_form_spec.js.snap b/spec/frontend/alert_settings/__snapshots__/alert_settings_form_spec.js.snap
index 1f5c3a80fbb..16e92bf505a 100644
--- a/spec/frontend/alert_settings/__snapshots__/alert_settings_form_spec.js.snap
+++ b/spec/frontend/alert_settings/__snapshots__/alert_settings_form_spec.js.snap
@@ -13,20 +13,20 @@ exports[`AlertsSettingsForm with default values renders the initial template 1`]
</div>
<gl-form-stub>
<gl-form-group-stub label=\\"Integrations\\" label-for=\\"integrations\\" label-class=\\"label-bold\\">
- <gl-form-select-stub options=\\"[object Object],[object Object],[object Object]\\" data-testid=\\"alert-settings-select\\" value=\\"generic\\"></gl-form-select-stub> <span class=\\"gl-text-gray-400\\"><gl-sprintf-stub message=\\"Learn more about our %{linkStart}upcoming integrations%{linkEnd}\\"></gl-sprintf-stub></span>
+ <gl-form-select-stub options=\\"[object Object],[object Object],[object Object]\\" data-testid=\\"alert-settings-select\\" value=\\"generic\\"></gl-form-select-stub> <span class=\\"gl-text-gray-200\\"><gl-sprintf-stub message=\\"Learn more about our %{linkStart}upcoming integrations%{linkEnd}\\"></gl-sprintf-stub></span>
</gl-form-group-stub>
<gl-form-group-stub label=\\"Active\\" label-for=\\"activated\\" label-class=\\"label-bold\\">
<toggle-button-stub id=\\"activated\\"></toggle-button-stub>
</gl-form-group-stub>
<!---->
<gl-form-group-stub label=\\"Webhook URL\\" label-for=\\"url\\" label-class=\\"label-bold\\">
- <gl-form-input-group-stub value=\\"/alerts/notify.json\\" predefinedoptions=\\"[object Object]\\" id=\\"url\\" readonly=\\"\\"></gl-form-input-group-stub> <span class=\\"gl-text-gray-400\\">
+ <gl-form-input-group-stub value=\\"/alerts/notify.json\\" predefinedoptions=\\"[object Object]\\" id=\\"url\\" readonly=\\"\\"></gl-form-input-group-stub> <span class=\\"gl-text-gray-200\\">
</span>
</gl-form-group-stub>
<gl-form-group-stub label=\\"Authorization key\\" label-for=\\"authorization-key\\" label-class=\\"label-bold\\">
<gl-form-input-group-stub value=\\"abcedfg123\\" predefinedoptions=\\"[object Object]\\" id=\\"authorization-key\\" readonly=\\"\\" class=\\"gl-mb-2\\"></gl-form-input-group-stub>
- <gl-button-stub category=\\"tertiary\\" variant=\\"default\\" size=\\"medium\\" icon=\\"\\" disabled=\\"true\\" class=\\"gl-mt-3\\" role=\\"button\\" tabindex=\\"0\\">Reset key</gl-button-stub>
+ <gl-button-stub category=\\"primary\\" variant=\\"default\\" size=\\"medium\\" icon=\\"\\" disabled=\\"true\\" class=\\"gl-mt-3\\" role=\\"button\\" tabindex=\\"0\\">Reset key</gl-button-stub>
<gl-modal-stub modalid=\\"authKeyModal\\" titletag=\\"h4\\" modalclass=\\"\\" size=\\"md\\" title=\\"Reset key\\" ok-title=\\"Reset key\\" ok-variant=\\"danger\\">
Resetting the authorization key for this project will require updating the authorization key in every alert source it is enabled in.
</gl-modal-stub>
@@ -34,14 +34,16 @@ exports[`AlertsSettingsForm with default values renders the initial template 1`]
<gl-form-group-stub label=\\"Alert test payload\\" label-for=\\"alert-json\\" label-class=\\"label-bold\\">
<gl-form-textarea-stub noresize=\\"true\\" id=\\"alert-json\\" disabled=\\"true\\" state=\\"true\\" placeholder=\\"Enter test alert JSON....\\" rows=\\"6\\" max-rows=\\"10\\"></gl-form-textarea-stub>
</gl-form-group-stub>
- <gl-button-stub category=\\"tertiary\\" variant=\\"default\\" size=\\"medium\\" icon=\\"\\" disabled=\\"true\\">Test alert payload</gl-button-stub>
+ <div class=\\"gl-display-flex gl-justify-content-end\\">
+ <gl-button-stub category=\\"primary\\" variant=\\"default\\" size=\\"medium\\" icon=\\"\\" disabled=\\"true\\">Test alert payload</gl-button-stub>
+ </div>
<div class=\\"footer-block row-content-block gl-display-flex gl-justify-content-space-between\\">
- <gl-button-stub category=\\"primary\\" variant=\\"success\\" size=\\"medium\\" icon=\\"\\" disabled=\\"true\\">
- Save changes
- </gl-button-stub>
<gl-button-stub category=\\"primary\\" variant=\\"default\\" size=\\"medium\\" icon=\\"\\" disabled=\\"true\\">
Cancel
</gl-button-stub>
+ <gl-button-stub category=\\"primary\\" variant=\\"success\\" size=\\"medium\\" icon=\\"\\" disabled=\\"true\\">
+ Save changes
+ </gl-button-stub>
</div>
</gl-form-stub>
</div>"
diff --git a/spec/frontend/alert_settings/alert_settings_form_spec.js b/spec/frontend/alert_settings/alert_settings_form_spec.js
index 5a04d768645..87a631bda56 100644
--- a/spec/frontend/alert_settings/alert_settings_form_spec.js
+++ b/spec/frontend/alert_settings/alert_settings_form_spec.js
@@ -11,41 +11,36 @@ const KEY = 'abcedfg123';
const INVALID_URL = 'http://invalid';
const ACTIVATED = false;
-const defaultProps = {
- generic: {
- initialAuthorizationKey: KEY,
- formPath: INVALID_URL,
- url: GENERIC_URL,
- alertsSetupUrl: INVALID_URL,
- alertsUsageUrl: INVALID_URL,
- activated: ACTIVATED,
- },
- prometheus: {
- prometheusAuthorizationKey: KEY,
- prometheusFormPath: INVALID_URL,
- prometheusUrl: PROMETHEUS_URL,
- activated: ACTIVATED,
- },
- opsgenie: {
- opsgenieMvcIsAvailable: true,
- formPath: INVALID_URL,
- activated: ACTIVATED,
- opsgenieMvcTargetUrl: GENERIC_URL,
- },
-};
-
describe('AlertsSettingsForm', () => {
let wrapper;
let mockAxios;
- const createComponent = (props = defaultProps, { methods } = {}, data) => {
+ const createComponent = ({ methods } = {}, data) => {
wrapper = shallowMount(AlertsSettingsForm, {
data() {
return { ...data };
},
- propsData: {
- ...defaultProps,
- ...props,
+ provide: {
+ generic: {
+ authorizationKey: KEY,
+ formPath: INVALID_URL,
+ url: GENERIC_URL,
+ alertsSetupUrl: INVALID_URL,
+ alertsUsageUrl: INVALID_URL,
+ activated: ACTIVATED,
+ },
+ prometheus: {
+ authorizationKey: KEY,
+ prometheusFormPath: INVALID_URL,
+ prometheusUrl: PROMETHEUS_URL,
+ activated: ACTIVATED,
+ },
+ opsgenie: {
+ opsgenieMvcIsAvailable: true,
+ formPath: INVALID_URL,
+ activated: ACTIVATED,
+ opsgenieMvcTargetUrl: GENERIC_URL,
+ },
},
methods,
});
@@ -83,32 +78,33 @@ describe('AlertsSettingsForm', () => {
describe('reset key', () => {
it('triggers resetKey method', () => {
- const resetGenericKey = jest.fn();
- const methods = { resetGenericKey };
- createComponent(defaultProps, { methods });
+ const resetKey = jest.fn();
+ const methods = { resetKey };
+ createComponent({ methods });
wrapper.find(GlModal).vm.$emit('ok');
- expect(resetGenericKey).toHaveBeenCalled();
+ expect(resetKey).toHaveBeenCalled();
});
it('updates the authorization key on success', () => {
- const formPath = 'some/path';
- mockAxios.onPut(formPath, { service: { token: '' } }).replyOnce(200, { token: 'newToken' });
- createComponent({ generic: { ...defaultProps.generic, formPath } });
+ createComponent(
+ {},
+ {
+ authKey: 'newToken',
+ },
+ );
- return wrapper.vm.resetGenericKey().then(() => {
- expect(findAuthorizationKey().attributes('value')).toBe('newToken');
- });
+ expect(findAuthorizationKey().attributes('value')).toBe('newToken');
});
it('shows a alert message on error', () => {
const formPath = 'some/path';
mockAxios.onPut(formPath).replyOnce(404);
- createComponent({ generic: { ...defaultProps.generic, formPath } });
+ createComponent();
- return wrapper.vm.resetGenericKey().then(() => {
+ return wrapper.vm.resetKey().then(() => {
expect(wrapper.find(GlAlert).exists()).toBe(true);
});
});
@@ -118,22 +114,18 @@ describe('AlertsSettingsForm', () => {
it('triggers toggleActivated method', () => {
const toggleService = jest.fn();
const methods = { toggleService };
- createComponent(defaultProps, { methods });
+ createComponent({ methods });
wrapper.find(ToggleButton).vm.$emit('change', true);
-
expect(toggleService).toHaveBeenCalled();
});
describe('error is encountered', () => {
- beforeEach(() => {
+ it('restores previous value', () => {
const formPath = 'some/path';
mockAxios.onPut(formPath).replyOnce(500);
- });
-
- it('restores previous value', () => {
- createComponent({ generic: { ...defaultProps.generic, initialActivated: false } });
- return wrapper.vm.resetGenericKey().then(() => {
+ createComponent();
+ return wrapper.vm.resetKey().then(() => {
expect(wrapper.find(ToggleButton).props('value')).toBe(false);
});
});
@@ -143,7 +135,6 @@ describe('AlertsSettingsForm', () => {
describe('prometheus is active', () => {
beforeEach(() => {
createComponent(
- { prometheus: { ...defaultProps.prometheus, prometheusIsActivated: true } },
{},
{
selectedEndpoint: 'prometheus',
@@ -164,10 +155,9 @@ describe('AlertsSettingsForm', () => {
});
});
- describe('opsgenie is active', () => {
+ describe('Opsgenie is active', () => {
beforeEach(() => {
createComponent(
- { opsgenie: { ...defaultProps.opsgenie, opsgenieMvcActivated: true } },
{},
{
selectedEndpoint: 'opsgenie',
@@ -175,15 +165,14 @@ describe('AlertsSettingsForm', () => {
);
});
- it('shows a input for the opsgenie target URL', () => {
+ it('shows a input for the Opsgenie target URL', () => {
expect(findApiUrl().exists()).toBe(true);
- expect(findSelect().attributes('value')).toBe('opsgenie');
});
});
describe('trigger test alert', () => {
beforeEach(() => {
- createComponent({ generic: { ...defaultProps.generic, initialActivated: true } }, {}, true);
+ createComponent({});
});
it('should enable the JSON input', () => {
@@ -191,30 +180,19 @@ describe('AlertsSettingsForm', () => {
expect(findJsonInput().props('value')).toBe(null);
});
- it('should validate JSON input', () => {
- createComponent({ generic: { ...defaultProps.generic } }, true, {
+ it('should validate JSON input', async () => {
+ createComponent(true, {
testAlertJson: '{ "value": "test" }',
});
findJsonInput().vm.$emit('change');
- return wrapper.vm.$nextTick().then(() => {
- expect(findJsonInput().attributes('state')).toBe('true');
- });
- });
-
- describe('alert service is toggled', () => {
- it('should show a info alert if successful', () => {
- const formPath = 'some/path';
- const toggleService = true;
- mockAxios.onPut(formPath).replyOnce(200);
- createComponent({ generic: { ...defaultProps.generic, formPath } });
+ await wrapper.vm.$nextTick();
- return wrapper.vm.toggleActivated(toggleService).then(() => {
- expect(wrapper.find(GlAlert).attributes('variant')).toBe('info');
- });
- });
+ expect(findJsonInput().attributes('state')).toBe('true');
+ });
+ describe('alert service is toggled', () => {
it('should show a error alert if failed', () => {
const formPath = 'some/path';
const toggleService = true;
@@ -222,9 +200,10 @@ describe('AlertsSettingsForm', () => {
errors: 'Error message to display',
});
- createComponent({ generic: { ...defaultProps.generic, formPath } });
+ createComponent();
return wrapper.vm.toggleActivated(toggleService).then(() => {
+ expect(wrapper.vm.active).toBe(false);
expect(wrapper.find(GlAlert).attributes('variant')).toBe('danger');
});
});
diff --git a/spec/frontend/alerts_service_settings/components/alerts_service_form_spec.js b/spec/frontend/alerts_service_settings/components/alerts_service_form_spec.js
index 610f9d6b9bd..5574c83eb76 100644
--- a/spec/frontend/alerts_service_settings/components/alerts_service_form_spec.js
+++ b/spec/frontend/alerts_service_settings/components/alerts_service_form_spec.js
@@ -4,7 +4,7 @@ import { shallowMount } from '@vue/test-utils';
import { GlModal } from '@gitlab/ui';
import AlertsServiceForm from '~/alerts_service_settings/components/alerts_service_form.vue';
import ToggleButton from '~/vue_shared/components/toggle_button.vue';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
jest.mock('~/flash');
diff --git a/spec/frontend/analytics/components/activity_chart_spec.js b/spec/frontend/analytics/components/activity_chart_spec.js
new file mode 100644
index 00000000000..1f0f9a6c5d7
--- /dev/null
+++ b/spec/frontend/analytics/components/activity_chart_spec.js
@@ -0,0 +1,39 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlColumnChart } from '@gitlab/ui/dist/charts';
+import ActivityChart from '~/analytics/product_analytics/components/activity_chart.vue';
+
+describe('Activity Chart Bundle', () => {
+ let wrapper;
+ function mountComponent({ provide }) {
+ wrapper = shallowMount(ActivityChart, {
+ provide: {
+ formattedData: {},
+ ...provide,
+ },
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findChart = () => wrapper.find(GlColumnChart);
+ const findNoData = () => wrapper.find('[data-testid="noActivityChartData"]');
+
+ describe('Activity Chart', () => {
+ it('renders an warning message with no data', () => {
+ mountComponent({ provide: { formattedData: {} } });
+ expect(findNoData().exists()).toBe(true);
+ });
+
+ it('renders a chart with data', () => {
+ mountComponent({
+ provide: { formattedData: { keys: ['key1', 'key2'], values: [5038, 2241] } },
+ });
+
+ expect(findNoData().exists()).toBe(false);
+ expect(findChart().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/api_spec.js b/spec/frontend/api_spec.js
index c94637e04af..4f4de62c229 100644
--- a/spec/frontend/api_spec.js
+++ b/spec/frontend/api_spec.js
@@ -1,6 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import Api from '~/api';
+import httpStatus from '~/lib/utils/http_status';
describe('Api', () => {
const dummyApiVersion = 'v3000';
@@ -57,7 +58,7 @@ describe('Api', () => {
it('fetch all group packages', () => {
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}/packages`;
jest.spyOn(axios, 'get');
- mock.onGet(expectedUrl).replyOnce(200, apiResponse);
+ mock.onGet(expectedUrl).replyOnce(httpStatus.OK, apiResponse);
return Api.groupPackages(groupId).then(({ data }) => {
expect(data).toEqual(apiResponse);
@@ -70,7 +71,7 @@ describe('Api', () => {
it('fetch all project packages', () => {
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectId}/packages`;
jest.spyOn(axios, 'get');
- mock.onGet(expectedUrl).replyOnce(200, apiResponse);
+ mock.onGet(expectedUrl).replyOnce(httpStatus.OK, apiResponse);
return Api.projectPackages(projectId).then(({ data }) => {
expect(data).toEqual(apiResponse);
@@ -92,7 +93,7 @@ describe('Api', () => {
const expectedUrl = `foo`;
jest.spyOn(Api, 'buildProjectPackageUrl').mockReturnValue(expectedUrl);
jest.spyOn(axios, 'get');
- mock.onGet(expectedUrl).replyOnce(200, apiResponse);
+ mock.onGet(expectedUrl).replyOnce(httpStatus.OK, apiResponse);
return Api.projectPackage(projectId, packageId).then(({ data }) => {
expect(data).toEqual(apiResponse);
@@ -107,7 +108,7 @@ describe('Api', () => {
jest.spyOn(Api, 'buildProjectPackageUrl').mockReturnValue(expectedUrl);
jest.spyOn(axios, 'delete');
- mock.onDelete(expectedUrl).replyOnce(200, true);
+ mock.onDelete(expectedUrl).replyOnce(httpStatus.OK, true);
return Api.deleteProjectPackage(projectId, packageId).then(({ data }) => {
expect(data).toEqual(true);
@@ -121,7 +122,7 @@ describe('Api', () => {
it('fetches a group', done => {
const groupId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}`;
- mock.onGet(expectedUrl).reply(200, {
+ mock.onGet(expectedUrl).reply(httpStatus.OK, {
name: 'test',
});
@@ -137,7 +138,7 @@ describe('Api', () => {
const groupId = '54321';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}/members`;
const expectedData = [{ id: 7 }];
- mock.onGet(expectedUrl).reply(200, expectedData);
+ mock.onGet(expectedUrl).reply(httpStatus.OK, expectedData);
Api.groupMembers(groupId)
.then(({ data }) => {
@@ -148,12 +149,42 @@ describe('Api', () => {
});
});
+ describe('groupMilestones', () => {
+ it('fetches group milestones', done => {
+ const groupId = '16';
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}/milestones`;
+ const expectedData = [
+ {
+ id: 12,
+ iid: 3,
+ group_id: 16,
+ title: '10.0',
+ description: 'Version',
+ due_date: '2013-11-29',
+ start_date: '2013-11-10',
+ state: 'active',
+ updated_at: '2013-10-02T09:24:18Z',
+ created_at: '2013-10-02T09:24:18Z',
+ web_url: 'https://gitlab.com/groups/gitlab-org/-/milestones/42',
+ },
+ ];
+ mock.onGet(expectedUrl).reply(httpStatus.OK, expectedData);
+
+ Api.groupMilestones(groupId)
+ .then(({ data }) => {
+ expect(data).toEqual(expectedData);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
describe('groups', () => {
it('fetches groups', done => {
const query = 'dummy query';
const options = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups.json`;
- mock.onGet(expectedUrl).reply(200, [
+ mock.onGet(expectedUrl).reply(httpStatus.OK, [
{
name: 'test',
},
@@ -171,7 +202,7 @@ describe('Api', () => {
it('fetches namespaces', done => {
const query = 'dummy query';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/namespaces.json`;
- mock.onGet(expectedUrl).reply(200, [
+ mock.onGet(expectedUrl).reply(httpStatus.OK, [
{
name: 'test',
},
@@ -191,7 +222,7 @@ describe('Api', () => {
const options = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects.json`;
window.gon.current_user_id = 1;
- mock.onGet(expectedUrl).reply(200, [
+ mock.onGet(expectedUrl).reply(httpStatus.OK, [
{
name: 'test',
},
@@ -208,7 +239,7 @@ describe('Api', () => {
const query = 'dummy query';
const options = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects.json`;
- mock.onGet(expectedUrl).reply(200, [
+ mock.onGet(expectedUrl).reply(httpStatus.OK, [
{
name: 'test',
},
@@ -226,7 +257,7 @@ describe('Api', () => {
it('update a project with the given payload', done => {
const projectPath = 'foo';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}`;
- mock.onPut(expectedUrl).reply(200, { foo: 'bar' });
+ mock.onPut(expectedUrl).reply(httpStatus.OK, { foo: 'bar' });
Api.updateProject(projectPath, { foo: 'bar' })
.then(({ data }) => {
@@ -243,7 +274,7 @@ describe('Api', () => {
const options = { unused: 'option' };
const projectPath = 'gitlab-org%2Fgitlab-ce';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/users`;
- mock.onGet(expectedUrl).reply(200, [
+ mock.onGet(expectedUrl).reply(httpStatus.OK, [
{
name: 'test',
},
@@ -265,7 +296,7 @@ describe('Api', () => {
it('fetches all merge requests for a project', done => {
const mockData = [{ source_branch: 'foo' }, { source_branch: 'bar' }];
- mock.onGet(expectedUrl).reply(200, mockData);
+ mock.onGet(expectedUrl).reply(httpStatus.OK, mockData);
Api.projectMergeRequests(projectPath)
.then(({ data }) => {
expect(data.length).toEqual(2);
@@ -281,7 +312,7 @@ describe('Api', () => {
source_branch: 'bar',
};
const mockData = [{ source_branch: 'bar' }];
- mock.onGet(expectedUrl, { params }).reply(200, mockData);
+ mock.onGet(expectedUrl, { params }).reply(httpStatus.OK, mockData);
Api.projectMergeRequests(projectPath, params)
.then(({ data }) => {
@@ -298,7 +329,7 @@ describe('Api', () => {
const projectPath = 'abc';
const mergeRequestId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/merge_requests/${mergeRequestId}`;
- mock.onGet(expectedUrl).reply(200, {
+ mock.onGet(expectedUrl).reply(httpStatus.OK, {
title: 'test',
});
@@ -316,7 +347,7 @@ describe('Api', () => {
const projectPath = 'abc';
const mergeRequestId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/merge_requests/${mergeRequestId}/changes`;
- mock.onGet(expectedUrl).reply(200, {
+ mock.onGet(expectedUrl).reply(httpStatus.OK, {
title: 'test',
});
@@ -334,7 +365,7 @@ describe('Api', () => {
const projectPath = 'abc';
const mergeRequestId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/merge_requests/${mergeRequestId}/versions`;
- mock.onGet(expectedUrl).reply(200, [
+ mock.onGet(expectedUrl).reply(httpStatus.OK, [
{
id: 123,
},
@@ -356,7 +387,7 @@ describe('Api', () => {
const params = { scope: 'active' };
const mockData = [{ id: 4 }];
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/runners`;
- mock.onGet(expectedUrl, { params }).reply(200, mockData);
+ mock.onGet(expectedUrl, { params }).reply(httpStatus.OK, mockData);
Api.projectRunners(projectPath, { params })
.then(({ data }) => {
@@ -380,7 +411,7 @@ describe('Api', () => {
expect(config.data).toBe(JSON.stringify(expectedData));
return [
- 200,
+ httpStatus.OK,
{
name: 'test',
},
@@ -404,7 +435,7 @@ describe('Api', () => {
expect(config.data).toBe(JSON.stringify(expectedData));
return [
- 200,
+ httpStatus.OK,
{
name: 'test',
},
@@ -423,7 +454,7 @@ describe('Api', () => {
const groupId = '123456';
const query = 'dummy query';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}/projects.json`;
- mock.onGet(expectedUrl).reply(200, [
+ mock.onGet(expectedUrl).reply(httpStatus.OK, [
{
name: 'test',
},
@@ -445,7 +476,7 @@ describe('Api', () => {
)}/repository/commits/${sha}`;
it('fetches a single commit', () => {
- mock.onGet(expectedUrl).reply(200, { id: sha });
+ mock.onGet(expectedUrl).reply(httpStatus.OK, { id: sha });
return Api.commit(projectId, sha).then(({ data: commit }) => {
expect(commit.id).toBe(sha);
@@ -453,7 +484,7 @@ describe('Api', () => {
});
it('fetches a single commit without stats', () => {
- mock.onGet(expectedUrl, { params: { stats: false } }).reply(200, { id: sha });
+ mock.onGet(expectedUrl, { params: { stats: false } }).reply(httpStatus.OK, { id: sha });
return Api.commit(projectId, sha, { stats: false }).then(({ data: commit }) => {
expect(commit.id).toBe(sha);
@@ -470,7 +501,7 @@ describe('Api', () => {
const expectedUrl = `${dummyUrlRoot}/${namespace}/${project}/templates/${templateType}/${encodeURIComponent(
templateKey,
)}`;
- mock.onGet(expectedUrl).reply(200, 'test');
+ mock.onGet(expectedUrl).reply(httpStatus.OK, 'test');
Api.issueTemplate(namespace, project, templateKey, templateType, (error, response) => {
expect(response).toBe('test');
@@ -483,7 +514,7 @@ describe('Api', () => {
it('fetches a list of templates', done => {
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/gitlab-org%2Fgitlab-ce/templates/licenses`;
- mock.onGet(expectedUrl).reply(200, 'test');
+ mock.onGet(expectedUrl).reply(httpStatus.OK, 'test');
Api.projectTemplates('gitlab-org/gitlab-ce', 'licenses', {}, response => {
expect(response).toBe('test');
@@ -497,7 +528,7 @@ describe('Api', () => {
const data = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/gitlab-org%2Fgitlab-ce/templates/licenses/test%20license`;
- mock.onGet(expectedUrl).reply(200, 'test');
+ mock.onGet(expectedUrl).reply(httpStatus.OK, 'test');
Api.projectTemplate('gitlab-org/gitlab-ce', 'licenses', 'test license', data, response => {
expect(response).toBe('test');
@@ -511,7 +542,7 @@ describe('Api', () => {
const query = 'dummy query';
const options = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/users.json`;
- mock.onGet(expectedUrl).reply(200, [
+ mock.onGet(expectedUrl).reply(httpStatus.OK, [
{
name: 'test',
},
@@ -531,7 +562,7 @@ describe('Api', () => {
it('fetches single user', done => {
const userId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/users/${userId}`;
- mock.onGet(expectedUrl).reply(200, {
+ mock.onGet(expectedUrl).reply(httpStatus.OK, {
name: 'testuser',
});
@@ -547,7 +578,7 @@ describe('Api', () => {
describe('user counts', () => {
it('fetches single user counts', done => {
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/user_counts`;
- mock.onGet(expectedUrl).reply(200, {
+ mock.onGet(expectedUrl).reply(httpStatus.OK, {
merge_requests: 4,
});
@@ -564,7 +595,7 @@ describe('Api', () => {
it('fetches single user status', done => {
const userId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/users/${userId}/status`;
- mock.onGet(expectedUrl).reply(200, {
+ mock.onGet(expectedUrl).reply(httpStatus.OK, {
message: 'testmessage',
});
@@ -583,7 +614,7 @@ describe('Api', () => {
const options = { unused: 'option' };
const userId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/users/${userId}/projects`;
- mock.onGet(expectedUrl).reply(200, [
+ mock.onGet(expectedUrl).reply(httpStatus.OK, [
{
name: 'test',
},
@@ -602,7 +633,7 @@ describe('Api', () => {
const projectId = 'example/foobar';
const commitSha = 'abc123def';
const expectedUrl = `${dummyUrlRoot}/${projectId}/commit/${commitSha}/pipelines`;
- mock.onGet(expectedUrl).reply(200, [
+ mock.onGet(expectedUrl).reply(httpStatus.OK, [
{
name: 'test',
},
@@ -629,7 +660,7 @@ describe('Api', () => {
jest.spyOn(axios, 'post');
- mock.onPost(expectedUrl).replyOnce(200, {
+ mock.onPost(expectedUrl).replyOnce(httpStatus.OK, {
name: branch,
});
@@ -652,7 +683,7 @@ describe('Api', () => {
jest.spyOn(axios, 'get');
- mock.onGet(expectedUrl).replyOnce(200, ['fork']);
+ mock.onGet(expectedUrl).replyOnce(httpStatus.OK, ['fork']);
Api.projectForks(dummyProjectPath, { visibility: 'private' })
.then(({ data }) => {
@@ -666,62 +697,239 @@ describe('Api', () => {
});
});
- describe('createReleaseLink', () => {
+ describe('createContextCommits', () => {
+ it('creates a new context commit', done => {
+ const projectPath = 'abc';
+ const mergeRequestId = '123456';
+ const commitsData = ['abcdefg'];
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/merge_requests/${mergeRequestId}/context_commits`;
+ const expectedData = {
+ commits: commitsData,
+ };
+
+ jest.spyOn(axios, 'post');
+
+ mock.onPost(expectedUrl).replyOnce(200, [
+ {
+ id: 'abcdefghijklmnop',
+ short_id: 'abcdefg',
+ title: 'Dummy commit',
+ },
+ ]);
+
+ Api.createContextCommits(projectPath, mergeRequestId, expectedData)
+ .then(({ data }) => {
+ expect(data[0].title).toBe('Dummy commit');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('allContextCommits', () => {
+ it('gets all context commits', done => {
+ const projectPath = 'abc';
+ const mergeRequestId = '123456';
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/merge_requests/${mergeRequestId}/context_commits`;
+
+ jest.spyOn(axios, 'get');
+
+ mock
+ .onGet(expectedUrl)
+ .replyOnce(200, [{ id: 'abcdef', short_id: 'abcdefghi', title: 'Dummy commit title' }]);
+
+ Api.allContextCommits(projectPath, mergeRequestId)
+ .then(({ data }) => {
+ expect(data[0].title).toBe('Dummy commit title');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('removeContextCommits', () => {
+ it('removes context commits', done => {
+ const projectPath = 'abc';
+ const mergeRequestId = '123456';
+ const commitsData = ['abcdefg'];
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/merge_requests/${mergeRequestId}/context_commits`;
+ const expectedData = {
+ commits: commitsData,
+ };
+
+ jest.spyOn(axios, 'delete');
+
+ mock.onDelete(expectedUrl).replyOnce(204);
+
+ Api.removeContextCommits(projectPath, mergeRequestId, expectedData)
+ .then(() => {
+ expect(axios.delete).toHaveBeenCalledWith(expectedUrl, { data: expectedData });
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+ });
+
+ describe('release-related methods', () => {
const dummyProjectPath = 'gitlab-org/gitlab';
- const dummyReleaseTag = 'v1.3';
- const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${encodeURIComponent(
+ const dummyTagName = 'v1.3';
+ const baseReleaseUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${encodeURIComponent(
dummyProjectPath,
- )}/releases/${dummyReleaseTag}/assets/links`;
- const expectedLink = {
- url: 'https://example.com',
- name: 'An example link',
- };
+ )}/releases`;
- describe('when the Release is successfully created', () => {
- it('resolves the Promise', () => {
- mock.onPost(expectedUrl, expectedLink).replyOnce(201);
+ describe('releases', () => {
+ const expectedUrl = baseReleaseUrl;
- return Api.createReleaseLink(dummyProjectPath, dummyReleaseTag, expectedLink).then(() => {
- expect(mock.history.post).toHaveLength(1);
+ describe('when releases are successfully returned', () => {
+ it('resolves the Promise', () => {
+ mock.onGet(expectedUrl).replyOnce(httpStatus.OK);
+
+ return Api.releases(dummyProjectPath).then(() => {
+ expect(mock.history.get).toHaveLength(1);
+ });
+ });
+ });
+
+ describe('when an error occurs while fetching releases', () => {
+ it('rejects the Promise', () => {
+ mock.onGet(expectedUrl).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+
+ return Api.releases(dummyProjectPath).catch(() => {
+ expect(mock.history.get).toHaveLength(1);
+ });
});
});
});
- describe('when an error occurs while creating the Release', () => {
- it('rejects the Promise', () => {
- mock.onPost(expectedUrl, expectedLink).replyOnce(500);
+ describe('release', () => {
+ const expectedUrl = `${baseReleaseUrl}/${encodeURIComponent(dummyTagName)}`;
- return Api.createReleaseLink(dummyProjectPath, dummyReleaseTag, expectedLink).catch(() => {
- expect(mock.history.post).toHaveLength(1);
+ describe('when the release is successfully returned', () => {
+ it('resolves the Promise', () => {
+ mock.onGet(expectedUrl).replyOnce(httpStatus.OK);
+
+ return Api.release(dummyProjectPath, dummyTagName).then(() => {
+ expect(mock.history.get).toHaveLength(1);
+ });
+ });
+ });
+
+ describe('when an error occurs while fetching the release', () => {
+ it('rejects the Promise', () => {
+ mock.onGet(expectedUrl).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+
+ return Api.release(dummyProjectPath, dummyTagName).catch(() => {
+ expect(mock.history.get).toHaveLength(1);
+ });
});
});
});
- });
- describe('deleteReleaseLink', () => {
- const dummyProjectPath = 'gitlab-org/gitlab';
- const dummyReleaseTag = 'v1.3';
- const dummyLinkId = '4';
- const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${encodeURIComponent(
- dummyProjectPath,
- )}/releases/${dummyReleaseTag}/assets/links/${dummyLinkId}`;
+ describe('createRelease', () => {
+ const expectedUrl = baseReleaseUrl;
- describe('when the Release is successfully deleted', () => {
- it('resolves the Promise', () => {
- mock.onDelete(expectedUrl).replyOnce(200);
+ const release = {
+ name: 'Version 1.0',
+ };
+
+ describe('when the release is successfully created', () => {
+ it('resolves the Promise', () => {
+ mock.onPost(expectedUrl, release).replyOnce(httpStatus.CREATED);
+
+ return Api.createRelease(dummyProjectPath, release).then(() => {
+ expect(mock.history.post).toHaveLength(1);
+ });
+ });
+ });
- return Api.deleteReleaseLink(dummyProjectPath, dummyReleaseTag, dummyLinkId).then(() => {
- expect(mock.history.delete).toHaveLength(1);
+ describe('when an error occurs while creating the release', () => {
+ it('rejects the Promise', () => {
+ mock.onPost(expectedUrl, release).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+
+ return Api.createRelease(dummyProjectPath, release).catch(() => {
+ expect(mock.history.post).toHaveLength(1);
+ });
});
});
});
- describe('when an error occurs while deleting the Release', () => {
- it('rejects the Promise', () => {
- mock.onDelete(expectedUrl).replyOnce(500);
+ describe('updateRelease', () => {
+ const expectedUrl = `${baseReleaseUrl}/${encodeURIComponent(dummyTagName)}`;
+
+ const release = {
+ name: 'Version 1.0',
+ };
+
+ describe('when the release is successfully updated', () => {
+ it('resolves the Promise', () => {
+ mock.onPut(expectedUrl, release).replyOnce(httpStatus.OK);
+
+ return Api.updateRelease(dummyProjectPath, dummyTagName, release).then(() => {
+ expect(mock.history.put).toHaveLength(1);
+ });
+ });
+ });
- return Api.deleteReleaseLink(dummyProjectPath, dummyReleaseTag, dummyLinkId).catch(() => {
- expect(mock.history.delete).toHaveLength(1);
+ describe('when an error occurs while updating the release', () => {
+ it('rejects the Promise', () => {
+ mock.onPut(expectedUrl, release).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+
+ return Api.updateRelease(dummyProjectPath, dummyTagName, release).catch(() => {
+ expect(mock.history.put).toHaveLength(1);
+ });
+ });
+ });
+ });
+
+ describe('createReleaseLink', () => {
+ const expectedUrl = `${baseReleaseUrl}/${dummyTagName}/assets/links`;
+ const expectedLink = {
+ url: 'https://example.com',
+ name: 'An example link',
+ };
+
+ describe('when the Release is successfully created', () => {
+ it('resolves the Promise', () => {
+ mock.onPost(expectedUrl, expectedLink).replyOnce(httpStatus.CREATED);
+
+ return Api.createReleaseLink(dummyProjectPath, dummyTagName, expectedLink).then(() => {
+ expect(mock.history.post).toHaveLength(1);
+ });
+ });
+ });
+
+ describe('when an error occurs while creating the Release', () => {
+ it('rejects the Promise', () => {
+ mock.onPost(expectedUrl, expectedLink).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+
+ return Api.createReleaseLink(dummyProjectPath, dummyTagName, expectedLink).catch(() => {
+ expect(mock.history.post).toHaveLength(1);
+ });
+ });
+ });
+ });
+
+ describe('deleteReleaseLink', () => {
+ const dummyLinkId = '4';
+ const expectedUrl = `${baseReleaseUrl}/${dummyTagName}/assets/links/${dummyLinkId}`;
+
+ describe('when the Release is successfully deleted', () => {
+ it('resolves the Promise', () => {
+ mock.onDelete(expectedUrl).replyOnce(httpStatus.OK);
+
+ return Api.deleteReleaseLink(dummyProjectPath, dummyTagName, dummyLinkId).then(() => {
+ expect(mock.history.delete).toHaveLength(1);
+ });
+ });
+ });
+
+ describe('when an error occurs while deleting the Release', () => {
+ it('rejects the Promise', () => {
+ mock.onDelete(expectedUrl).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+
+ return Api.deleteReleaseLink(dummyProjectPath, dummyTagName, dummyLinkId).catch(() => {
+ expect(mock.history.delete).toHaveLength(1);
+ });
});
});
});
@@ -736,7 +944,7 @@ describe('Api', () => {
describe('when the raw file is successfully fetched', () => {
it('resolves the Promise', () => {
- mock.onGet(expectedUrl).replyOnce(200);
+ mock.onGet(expectedUrl).replyOnce(httpStatus.OK);
return Api.getRawFile(dummyProjectPath, dummyFilePath).then(() => {
expect(mock.history.get).toHaveLength(1);
@@ -746,7 +954,7 @@ describe('Api', () => {
describe('when an error occurs while getting a raw file', () => {
it('rejects the Promise', () => {
- mock.onPost(expectedUrl).replyOnce(500);
+ mock.onPost(expectedUrl).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
return Api.getRawFile(dummyProjectPath, dummyFilePath).catch(() => {
expect(mock.history.get).toHaveLength(1);
@@ -768,7 +976,7 @@ describe('Api', () => {
describe('when the merge request is successfully created', () => {
it('resolves the Promise', () => {
- mock.onPost(expectedUrl, options).replyOnce(201);
+ mock.onPost(expectedUrl, options).replyOnce(httpStatus.CREATED);
return Api.createProjectMergeRequest(dummyProjectPath, options).then(() => {
expect(mock.history.post).toHaveLength(1);
@@ -778,7 +986,7 @@ describe('Api', () => {
describe('when an error occurs while getting a raw file', () => {
it('rejects the Promise', () => {
- mock.onPost(expectedUrl).replyOnce(500);
+ mock.onPost(expectedUrl).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
return Api.createProjectMergeRequest(dummyProjectPath).catch(() => {
expect(mock.history.post).toHaveLength(1);
@@ -793,7 +1001,7 @@ describe('Api', () => {
const issue = 1;
const expectedArray = [1, 2, 3];
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectId}/issues/${issue}`;
- mock.onPut(expectedUrl).reply(200, { assigneeIds: expectedArray });
+ mock.onPut(expectedUrl).reply(httpStatus.OK, { assigneeIds: expectedArray });
Api.updateIssue(projectId, issue, { assigneeIds: expectedArray })
.then(({ data }) => {
@@ -810,7 +1018,7 @@ describe('Api', () => {
const mergeRequest = 1;
const expectedArray = [1, 2, 3];
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectId}/merge_requests/${mergeRequest}`;
- mock.onPut(expectedUrl).reply(200, { assigneeIds: expectedArray });
+ mock.onPut(expectedUrl).reply(httpStatus.OK, { assigneeIds: expectedArray });
Api.updateMergeRequest(projectId, mergeRequest, { assigneeIds: expectedArray })
.then(({ data }) => {
@@ -827,7 +1035,7 @@ describe('Api', () => {
const options = { unused: 'option' };
const projectId = 8;
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectId}/repository/tags`;
- mock.onGet(expectedUrl).reply(200, [
+ mock.onGet(expectedUrl).reply(httpStatus.OK, [
{
name: 'test',
},
@@ -842,4 +1050,83 @@ describe('Api', () => {
.catch(done.fail);
});
});
+
+ describe('freezePeriods', () => {
+ it('fetches freezePeriods', () => {
+ const projectId = 8;
+ const freezePeriod = {
+ id: 3,
+ freeze_start: '5 4 * * *',
+ freeze_end: '5 9 * 8 *',
+ cron_timezone: 'America/New_York',
+ created_at: '2020-07-10T05:10:35.122Z',
+ updated_at: '2020-07-10T05:10:35.122Z',
+ };
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectId}/freeze_periods`;
+ mock.onGet(expectedUrl).reply(httpStatus.OK, [freezePeriod]);
+
+ return Api.freezePeriods(projectId).then(({ data }) => {
+ expect(data[0]).toStrictEqual(freezePeriod);
+ });
+ });
+ });
+
+ describe('createFreezePeriod', () => {
+ const projectId = 8;
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectId}/freeze_periods`;
+ const options = {
+ freeze_start: '* * * * *',
+ freeze_end: '* * * * *',
+ cron_timezone: 'America/Juneau',
+ };
+
+ const expectedResult = {
+ id: 10,
+ freeze_start: '* * * * *',
+ freeze_end: '* * * * *',
+ cron_timezone: 'America/Juneau',
+ created_at: '2020-07-11T07:04:50.153Z',
+ updated_at: '2020-07-11T07:04:50.153Z',
+ };
+
+ describe('when the freeze period is successfully created', () => {
+ it('resolves the Promise', () => {
+ mock.onPost(expectedUrl, options).replyOnce(httpStatus.CREATED, expectedResult);
+
+ return Api.createFreezePeriod(projectId, options).then(({ data }) => {
+ expect(data).toStrictEqual(expectedResult);
+ });
+ });
+ });
+ });
+
+ describe('createPipeline', () => {
+ it('creates new pipeline', () => {
+ const redirectUrl = 'ci-project/-/pipelines/95';
+ const projectId = 8;
+ const postData = {
+ ref: 'tag-1',
+ variables: [
+ { key: 'test_file', value: 'test_file_val', variable_type: 'file' },
+ { key: 'test_var', value: 'test_var_val', variable_type: 'env_var' },
+ ],
+ };
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectId}/pipeline`;
+
+ jest.spyOn(axios, 'post');
+
+ mock.onPost(expectedUrl).replyOnce(httpStatus.OK, {
+ web_url: redirectUrl,
+ });
+
+ return Api.createPipeline(projectId, postData).then(({ data }) => {
+ expect(data.web_url).toBe(redirectUrl);
+ expect(axios.post).toHaveBeenCalledWith(expectedUrl, postData, {
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/awards_handler_spec.js b/spec/frontend/awards_handler_spec.js
index 6cfbc6024af..1a1738ecf4a 100644
--- a/spec/frontend/awards_handler_spec.js
+++ b/spec/frontend/awards_handler_spec.js
@@ -1,11 +1,11 @@
import $ from 'jquery';
import Cookies from 'js-cookie';
import MockAdapter from 'axios-mock-adapter';
+import { useFakeRequestAnimationFrame } from 'helpers/fake_request_animation_frame';
import axios from '~/lib/utils/axios_utils';
import loadAwardsHandler from '~/awards_handler';
import { setTestTimeout } from './helpers/timeout';
import { EMOJI_VERSION } from '~/emoji';
-import { useFakeRequestAnimationFrame } from 'helpers/fake_request_animation_frame';
window.gl = window.gl || {};
window.gon = window.gon || {};
@@ -162,7 +162,7 @@ describe('AwardsHandler', () => {
describe('::getAwardUrl', () => {
it('returns the url for request', () => {
- expect(awardsHandler.getAwardUrl()).toBe('http://test.host/snippets/1/toggle_award_emoji');
+ expect(awardsHandler.getAwardUrl()).toBe('http://test.host/-/snippets/1/toggle_award_emoji');
});
});
diff --git a/spec/frontend/badges/components/badge_form_spec.js b/spec/frontend/badges/components/badge_form_spec.js
index d61bd29ca9d..1edc9adbfb2 100644
--- a/spec/frontend/badges/components/badge_form_spec.js
+++ b/spec/frontend/badges/components/badge_form_spec.js
@@ -1,11 +1,11 @@
import Vue from 'vue';
import MockAdapter from 'axios-mock-adapter';
import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { DUMMY_IMAGE_URL, TEST_HOST } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
import store from '~/badges/store';
import createEmptyBadge from '~/badges/empty_badge';
import BadgeForm from '~/badges/components/badge_form.vue';
-import { DUMMY_IMAGE_URL, TEST_HOST } from 'helpers/test_constants';
// avoid preview background process
BadgeForm.methods.debouncedPreview = () => {};
@@ -182,11 +182,11 @@ describe('BadgeForm component', () => {
const buttons = vm.$el.querySelectorAll('.row-content-block button');
expect(buttons.length).toBe(2);
- const buttonSaveElement = buttons[0];
+ const buttonSaveElement = buttons[1];
expect(buttonSaveElement).toBeVisible();
expect(buttonSaveElement).toHaveText('Save changes');
- const buttonCancelElement = buttons[1];
+ const buttonCancelElement = buttons[0];
expect(buttonCancelElement).toBeVisible();
expect(buttonCancelElement).toHaveText('Cancel');
diff --git a/spec/frontend/batch_comments/components/draft_note_spec.js b/spec/frontend/batch_comments/components/draft_note_spec.js
index eea7f25dbc1..99980c98f8b 100644
--- a/spec/frontend/batch_comments/components/draft_note_spec.js
+++ b/spec/frontend/batch_comments/components/draft_note_spec.js
@@ -1,4 +1,5 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { getByRole } from '@testing-library/dom';
import DraftNote from '~/batch_comments/components/draft_note.vue';
import { createStore } from '~/batch_comments/stores';
import NoteableNote from '~/notes/components/noteable_note.vue';
@@ -8,21 +9,34 @@ import { createDraft } from '../mock_data';
const localVue = createLocalVue();
describe('Batch comments draft note component', () => {
+ let store;
let wrapper;
let draft;
+ const LINE_RANGE = {};
+ const draftWithLineRange = {
+ position: {
+ line_range: LINE_RANGE,
+ },
+ };
- beforeEach(() => {
- const store = createStore();
-
- draft = createDraft();
+ const getList = () => getByRole(wrapper.element, 'list');
+ const createComponent = (propsData = { draft }, features = {}) => {
wrapper = shallowMount(localVue.extend(DraftNote), {
store,
- propsData: { draft },
+ propsData,
localVue,
+ provide: {
+ glFeatures: { multilineComments: true, ...features },
+ },
});
jest.spyOn(wrapper.vm.$store, 'dispatch').mockImplementation();
+ };
+
+ beforeEach(() => {
+ store = createStore();
+ draft = createDraft();
});
afterEach(() => {
@@ -30,6 +44,7 @@ describe('Batch comments draft note component', () => {
});
it('renders template', () => {
+ createComponent();
expect(wrapper.find('.draft-pending-label').exists()).toBe(true);
const note = wrapper.find(NoteableNote);
@@ -40,6 +55,7 @@ describe('Batch comments draft note component', () => {
describe('add comment now', () => {
it('dispatches publishSingleDraft when clicking', () => {
+ createComponent();
const publishNowButton = wrapper.find({ ref: 'publishNowButton' });
publishNowButton.vm.$emit('click');
@@ -50,6 +66,7 @@ describe('Batch comments draft note component', () => {
});
it('sets as loading when draft is publishing', done => {
+ createComponent();
wrapper.vm.$store.state.batchComments.currentlyPublishingDrafts.push(1);
wrapper.vm.$nextTick(() => {
@@ -64,6 +81,7 @@ describe('Batch comments draft note component', () => {
describe('update', () => {
it('dispatches updateDraft', done => {
+ createComponent();
const note = wrapper.find(NoteableNote);
note.vm.$emit('handleEdit');
@@ -91,6 +109,7 @@ describe('Batch comments draft note component', () => {
describe('deleteDraft', () => {
it('dispatches deleteDraft', () => {
+ createComponent();
jest.spyOn(window, 'confirm').mockImplementation(() => true);
const note = wrapper.find(NoteableNote);
@@ -103,6 +122,7 @@ describe('Batch comments draft note component', () => {
describe('quick actions', () => {
it('renders referenced commands', done => {
+ createComponent();
wrapper.setProps({
draft: {
...draft,
@@ -122,4 +142,26 @@ describe('Batch comments draft note component', () => {
});
});
});
+
+ describe('multiline comments', () => {
+ describe.each`
+ desc | props | features | event | expectedCalls
+ ${'with `draft.position`'} | ${draftWithLineRange} | ${{}} | ${'mouseenter'} | ${[['setSelectedCommentPositionHover', LINE_RANGE]]}
+ ${'with `draft.position`'} | ${draftWithLineRange} | ${{}} | ${'mouseleave'} | ${[['setSelectedCommentPositionHover']]}
+ ${'with `draft.position`'} | ${draftWithLineRange} | ${{ multilineComments: false }} | ${'mouseenter'} | ${[]}
+ ${'with `draft.position`'} | ${draftWithLineRange} | ${{ multilineComments: false }} | ${'mouseleave'} | ${[]}
+ ${'without `draft.position`'} | ${{}} | ${{}} | ${'mouseenter'} | ${[]}
+ ${'without `draft.position`'} | ${{}} | ${{}} | ${'mouseleave'} | ${[]}
+ `('$desc and features $features', ({ props, event, features, expectedCalls }) => {
+ beforeEach(() => {
+ createComponent({ draft: { ...draft, ...props } }, features);
+ jest.spyOn(store, 'dispatch');
+ });
+
+ it(`calls store ${expectedCalls.length} times on ${event}`, () => {
+ getList().dispatchEvent(new MouseEvent(event, { bubbles: true }));
+ expect(store.dispatch.mock.calls).toEqual(expectedCalls);
+ });
+ });
+ });
});
diff --git a/spec/frontend/batch_comments/components/drafts_count_spec.js b/spec/frontend/batch_comments/components/drafts_count_spec.js
index 9d9fffce7e7..83d2f9eb639 100644
--- a/spec/frontend/batch_comments/components/drafts_count_spec.js
+++ b/spec/frontend/batch_comments/components/drafts_count_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
-import DraftsCount from '~/batch_comments/components/drafts_count.vue';
import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import DraftsCount from '~/batch_comments/components/drafts_count.vue';
import { createStore } from '~/batch_comments/stores';
describe('Batch comments drafts count component', () => {
@@ -24,7 +24,7 @@ describe('Batch comments drafts count component', () => {
});
it('renders count', () => {
- expect(vm.$el.querySelector('.drafts-count-number').textContent).toBe('1');
+ expect(vm.$el.textContent).toContain('1');
});
it('renders screen reader text', done => {
diff --git a/spec/frontend/batch_comments/components/preview_item_spec.js b/spec/frontend/batch_comments/components/preview_item_spec.js
index 7d951fd7799..2b63ece28ba 100644
--- a/spec/frontend/batch_comments/components/preview_item_spec.js
+++ b/spec/frontend/batch_comments/components/preview_item_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
-import PreviewItem from '~/batch_comments/components/preview_item.vue';
import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import PreviewItem from '~/batch_comments/components/preview_item.vue';
import { createStore } from '~/batch_comments/stores';
import diffsModule from '~/diffs/store/modules';
import notesModule from '~/notes/stores/modules';
diff --git a/spec/frontend/batch_comments/components/publish_button_spec.js b/spec/frontend/batch_comments/components/publish_button_spec.js
index 97f3a1c8939..4362f62c7f8 100644
--- a/spec/frontend/batch_comments/components/publish_button_spec.js
+++ b/spec/frontend/batch_comments/components/publish_button_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
-import PublishButton from '~/batch_comments/components/publish_button.vue';
import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import PublishButton from '~/batch_comments/components/publish_button.vue';
import { createStore } from '~/batch_comments/stores';
describe('Batch comments publish button component', () => {
diff --git a/spec/frontend/batch_comments/components/publish_dropdown_spec.js b/spec/frontend/batch_comments/components/publish_dropdown_spec.js
index b50ae340691..fb3c532174d 100644
--- a/spec/frontend/batch_comments/components/publish_dropdown_spec.js
+++ b/spec/frontend/batch_comments/components/publish_dropdown_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
-import PreviewDropdown from '~/batch_comments/components/preview_dropdown.vue';
import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import PreviewDropdown from '~/batch_comments/components/preview_dropdown.vue';
import { createStore } from '~/mr_notes/stores';
import '~/behaviors/markdown/render_gfm';
import { createDraft } from '../mock_data';
diff --git a/spec/frontend/batch_comments/mock_data.js b/spec/frontend/batch_comments/mock_data.js
index c50fea94fe3..5601e489066 100644
--- a/spec/frontend/batch_comments/mock_data.js
+++ b/spec/frontend/batch_comments/mock_data.js
@@ -1,5 +1,6 @@
import { TEST_HOST } from 'spec/test_constants';
+// eslint-disable-next-line import/prefer-default-export
export const createDraft = () => ({
author: {
id: 1,
@@ -23,5 +24,3 @@ export const createDraft = () => ({
isDraft: true,
position: null,
});
-
-export default () => {};
diff --git a/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js b/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
index 4bac6d4e3dc..a6942115649 100644
--- a/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
+++ b/spec/frontend/batch_comments/stores/modules/batch_comments/actions_spec.js
@@ -1,8 +1,8 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
+import { TEST_HOST } from 'jest/helpers/test_constants';
import * as actions from '~/batch_comments/stores/modules/batch_comments/actions';
import axios from '~/lib/utils/axios_utils';
-import { TEST_HOST } from 'jest/helpers/test_constants';
describe('Batch comments store actions', () => {
let res = {};
diff --git a/spec/frontend/behaviors/copy_as_gfm_spec.js b/spec/frontend/behaviors/copy_as_gfm_spec.js
index 33af9bc135e..46d4451c941 100644
--- a/spec/frontend/behaviors/copy_as_gfm_spec.js
+++ b/spec/frontend/behaviors/copy_as_gfm_spec.js
@@ -123,4 +123,14 @@ describe('CopyAsGFM', () => {
});
});
});
+
+ describe('CopyAsGFM.quoted', () => {
+ const sampleGFM = '* List 1\n* List 2\n\n`Some code`';
+
+ it('adds quote char `> ` to each line', done => {
+ const expectedQuotedGFM = '> * List 1\n> * List 2\n> \n> `Some code`';
+ expect(CopyAsGFM.quoted(sampleGFM)).toEqual(expectedQuotedGFM);
+ done();
+ });
+ });
});
diff --git a/spec/frontend/behaviors/gl_emoji_spec.js b/spec/frontend/behaviors/gl_emoji_spec.js
index 7ea0bafc328..ef6b1673b7c 100644
--- a/spec/frontend/behaviors/gl_emoji_spec.js
+++ b/spec/frontend/behaviors/gl_emoji_spec.js
@@ -1,10 +1,10 @@
import MockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'jest/helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import { initEmojiMap, EMOJI_VERSION } from '~/emoji';
import installGlEmojiElement from '~/behaviors/gl_emoji';
import * as EmojiUnicodeSupport from '~/emoji/support';
-import waitForPromises from 'jest/helpers/wait_for_promises';
jest.mock('~/emoji/support');
diff --git a/spec/frontend/blob/components/__snapshots__/blob_edit_content_spec.js.snap b/spec/frontend/blob/components/__snapshots__/blob_edit_content_spec.js.snap
index 0409b118222..72761c18b3d 100644
--- a/spec/frontend/blob/components/__snapshots__/blob_edit_content_spec.js.snap
+++ b/spec/frontend/blob/components/__snapshots__/blob_edit_content_spec.js.snap
@@ -4,11 +4,15 @@ exports[`Blob Header Editing rendering matches the snapshot 1`] = `
<div
class="file-content code"
>
- <pre
+ <div
data-editor-loading=""
id="editor"
>
- Lorem ipsum dolor sit amet, consectetur adipiscing elit.
- </pre>
+ <pre
+ class="editor-loading-content"
+ >
+ Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+ </pre>
+ </div>
</div>
`;
diff --git a/spec/frontend/blob/components/__snapshots__/blob_edit_header_spec.js.snap b/spec/frontend/blob/components/__snapshots__/blob_edit_header_spec.js.snap
index 1e639f91797..a5690844053 100644
--- a/spec/frontend/blob/components/__snapshots__/blob_edit_header_spec.js.snap
+++ b/spec/frontend/blob/components/__snapshots__/blob_edit_header_spec.js.snap
@@ -4,13 +4,18 @@ exports[`Blob Header Editing rendering matches the snapshot 1`] = `
<div
class="js-file-title file-title-flex-parent"
>
- <gl-form-input-stub
- class="form-control js-snippet-file-name"
- id="snippet_file_name"
- name="snippet_file_name"
- placeholder="Give your file a name to add code highlighting, e.g. example.rb for Ruby"
- type="text"
- value="foo.md"
- />
+ <div
+ class="gl-display-flex gl-align-items-center gl-w-full"
+ >
+ <gl-form-input-stub
+ class="form-control js-snippet-file-name"
+ name="snippet_file_name"
+ placeholder="Give your file a name to add code highlighting, e.g. example.rb for Ruby"
+ type="text"
+ value="foo.md"
+ />
+
+ <!---->
+ </div>
</div>
`;
diff --git a/spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap b/spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap
index 7d868625956..b54efb93bc9 100644
--- a/spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap
+++ b/spec/frontend/blob/components/__snapshots__/blob_header_spec.js.snap
@@ -9,7 +9,7 @@ exports[`Blob Header Default Actions rendering matches the snapshot 1`] = `
/>
<div
- class="file-actions d-none d-sm-flex"
+ class="gl-display-none gl-display-sm-flex"
>
<viewer-switcher-stub
value="simple"
diff --git a/spec/frontend/blob/components/blob_content_error_spec.js b/spec/frontend/blob/components/blob_content_error_spec.js
index 508b1ed7e68..0c6d269ad05 100644
--- a/spec/frontend/blob/components/blob_content_error_spec.js
+++ b/spec/frontend/blob/components/blob_content_error_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import BlobContentError from '~/blob/components/blob_content_error.vue';
import { GlSprintf } from '@gitlab/ui';
+import BlobContentError from '~/blob/components/blob_content_error.vue';
import { BLOB_RENDER_ERRORS } from '~/blob/components/constants';
diff --git a/spec/frontend/blob/components/blob_content_spec.js b/spec/frontend/blob/components/blob_content_spec.js
index 244ed41869d..9232a709194 100644
--- a/spec/frontend/blob/components/blob_content_spec.js
+++ b/spec/frontend/blob/components/blob_content_spec.js
@@ -1,4 +1,5 @@
import { shallowMount } from '@vue/test-utils';
+import { GlLoadingIcon } from '@gitlab/ui';
import BlobContent from '~/blob/components/blob_content.vue';
import BlobContentError from '~/blob/components/blob_content_error.vue';
import {
@@ -13,7 +14,6 @@ import {
RichBlobContentMock,
SimpleBlobContentMock,
} from './mock_data';
-import { GlLoadingIcon } from '@gitlab/ui';
import { RichViewer, SimpleViewer } from '~/vue_shared/components/blob_viewers';
describe('Blob Content component', () => {
diff --git a/spec/frontend/blob/components/blob_edit_content_spec.js b/spec/frontend/blob/components/blob_edit_content_spec.js
index 971ef72521d..3cc210e972c 100644
--- a/spec/frontend/blob/components/blob_edit_content_spec.js
+++ b/spec/frontend/blob/components/blob_edit_content_spec.js
@@ -1,28 +1,31 @@
import { shallowMount } from '@vue/test-utils';
-import BlobEditContent from '~/blob/components/blob_edit_content.vue';
-import { initEditorLite } from '~/blob/utils';
import { nextTick } from 'vue';
+import BlobEditContent from '~/blob/components/blob_edit_content.vue';
+import * as utils from '~/blob/utils';
+import Editor from '~/editor/editor_lite';
-jest.mock('~/blob/utils', () => ({
- initEditorLite: jest.fn(),
-}));
+jest.mock('~/editor/editor_lite');
describe('Blob Header Editing', () => {
let wrapper;
const value = 'Lorem ipsum dolor sit amet, consectetur adipiscing elit.';
const fileName = 'lorem.txt';
+ const fileGlobalId = 'snippet_777';
function createComponent(props = {}) {
wrapper = shallowMount(BlobEditContent, {
propsData: {
value,
fileName,
+ fileGlobalId,
...props,
},
});
}
beforeEach(() => {
+ jest.spyOn(utils, 'initEditorLite');
+
createComponent();
});
@@ -30,6 +33,15 @@ describe('Blob Header Editing', () => {
wrapper.destroy();
});
+ const triggerChangeContent = val => {
+ jest.spyOn(Editor.prototype, 'getValue').mockReturnValue(val);
+ const [cb] = Editor.prototype.onChangeContent.mock.calls[0];
+
+ cb();
+
+ jest.runOnlyPendingTimers();
+ };
+
describe('rendering', () => {
it('matches the snapshot', () => {
expect(wrapper.element).toMatchSnapshot();
@@ -51,18 +63,15 @@ describe('Blob Header Editing', () => {
it('initialises Editor Lite', () => {
const el = wrapper.find({ ref: 'editor' }).element;
- expect(initEditorLite).toHaveBeenCalledWith({
+ expect(utils.initEditorLite).toHaveBeenCalledWith({
el,
blobPath: fileName,
+ blobGlobalId: fileGlobalId,
blobContent: value,
});
});
it('reacts to the changes in fileName', () => {
- wrapper.vm.editor = {
- updateModelLanguage: jest.fn(),
- };
-
const newFileName = 'ipsum.txt';
wrapper.setProps({
@@ -70,21 +79,20 @@ describe('Blob Header Editing', () => {
});
return nextTick().then(() => {
- expect(wrapper.vm.editor.updateModelLanguage).toHaveBeenCalledWith(newFileName);
+ expect(Editor.prototype.updateModelLanguage).toHaveBeenCalledWith(newFileName);
});
});
+ it('registers callback with editor onChangeContent', () => {
+ expect(Editor.prototype.onChangeContent).toHaveBeenCalledWith(expect.any(Function));
+ });
+
it('emits input event when the blob content is changed', () => {
- const editorEl = wrapper.find({ ref: 'editor' });
- wrapper.vm.editor = {
- getValue: jest.fn().mockReturnValue(value),
- };
+ expect(wrapper.emitted().input).toBeUndefined();
- editorEl.trigger('keyup');
+ triggerChangeContent(value);
- return nextTick().then(() => {
- expect(wrapper.emitted().input[0]).toEqual([value]);
- });
+ expect(wrapper.emitted().input).toEqual([[value]]);
});
});
});
diff --git a/spec/frontend/blob/components/blob_edit_header_spec.js b/spec/frontend/blob/components/blob_edit_header_spec.js
index db7d7d7d48d..c71595a79cf 100644
--- a/spec/frontend/blob/components/blob_edit_header_spec.js
+++ b/spec/frontend/blob/components/blob_edit_header_spec.js
@@ -1,18 +1,21 @@
import { shallowMount } from '@vue/test-utils';
+import { GlFormInput, GlButton } from '@gitlab/ui';
import BlobEditHeader from '~/blob/components/blob_edit_header.vue';
-import { GlFormInput } from '@gitlab/ui';
describe('Blob Header Editing', () => {
let wrapper;
const value = 'foo.md';
- function createComponent() {
+ const createComponent = (props = {}) => {
wrapper = shallowMount(BlobEditHeader, {
propsData: {
value,
+ ...props,
},
});
- }
+ };
+ const findDeleteButton = () =>
+ wrapper.findAll(GlButton).wrappers.find(x => x.text() === 'Delete file');
beforeEach(() => {
createComponent();
@@ -30,6 +33,10 @@ describe('Blob Header Editing', () => {
it('contains a form input field', () => {
expect(wrapper.contains(GlFormInput)).toBe(true);
});
+
+ it('does not show delete button', () => {
+ expect(findDeleteButton()).toBeUndefined();
+ });
});
describe('functionality', () => {
@@ -47,4 +54,35 @@ describe('Blob Header Editing', () => {
});
});
});
+
+ describe.each`
+ props | expectedDisabled
+ ${{ showDelete: true }} | ${false}
+ ${{ showDelete: true, canDelete: false }} | ${true}
+ `('with $props', ({ props, expectedDisabled }) => {
+ beforeEach(() => {
+ createComponent(props);
+ });
+
+ it(`shows delete button (disabled=${expectedDisabled})`, () => {
+ const deleteButton = findDeleteButton();
+
+ expect(deleteButton.exists()).toBe(true);
+ expect(deleteButton.props('disabled')).toBe(expectedDisabled);
+ });
+ });
+
+ describe('with delete button', () => {
+ beforeEach(() => {
+ createComponent({ showDelete: true, canDelete: true });
+ });
+
+ it('emits delete when clicked', () => {
+ expect(wrapper.emitted().delete).toBeUndefined();
+
+ findDeleteButton().vm.$emit('click');
+
+ expect(wrapper.emitted().delete).toEqual([[]]);
+ });
+ });
});
diff --git a/spec/frontend/blob/components/blob_embeddable_spec.js b/spec/frontend/blob/components/blob_embeddable_spec.js
index b2fe71f1401..1f6790013ca 100644
--- a/spec/frontend/blob/components/blob_embeddable_spec.js
+++ b/spec/frontend/blob/components/blob_embeddable_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import BlobEmbeddable from '~/blob/components/blob_embeddable.vue';
import { GlFormInputGroup } from '@gitlab/ui';
+import BlobEmbeddable from '~/blob/components/blob_embeddable.vue';
describe('Blob Embeddable', () => {
let wrapper;
diff --git a/spec/frontend/blob/components/blob_header_default_actions_spec.js b/spec/frontend/blob/components/blob_header_default_actions_spec.js
index 529e7cc85f5..590e36b16af 100644
--- a/spec/frontend/blob/components/blob_header_default_actions_spec.js
+++ b/spec/frontend/blob/components/blob_header_default_actions_spec.js
@@ -1,4 +1,5 @@
import { mount } from '@vue/test-utils';
+import { GlButtonGroup, GlButton } from '@gitlab/ui';
import BlobHeaderActions from '~/blob/components/blob_header_default_actions.vue';
import {
BTN_COPY_CONTENTS_TITLE,
@@ -6,7 +7,6 @@ import {
BTN_RAW_TITLE,
RICH_BLOB_VIEWER,
} from '~/blob/components/constants';
-import { GlButtonGroup, GlDeprecatedButton } from '@gitlab/ui';
import { Blob } from './mock_data';
describe('Blob Header Default Actions', () => {
@@ -26,7 +26,7 @@ describe('Blob Header Default Actions', () => {
beforeEach(() => {
createComponent();
btnGroup = wrapper.find(GlButtonGroup);
- buttons = wrapper.findAll(GlDeprecatedButton);
+ buttons = wrapper.findAll(GlButton);
});
afterEach(() => {
@@ -61,7 +61,7 @@ describe('Blob Header Default Actions', () => {
createComponent({
activeViewer: RICH_BLOB_VIEWER,
});
- buttons = wrapper.findAll(GlDeprecatedButton);
+ buttons = wrapper.findAll(GlButton);
expect(buttons.at(0).attributes('disabled')).toBeTruthy();
});
diff --git a/spec/frontend/blob/components/blob_header_viewer_switcher_spec.js b/spec/frontend/blob/components/blob_header_viewer_switcher_spec.js
index f1a7ac8b21a..cf1101bc22c 100644
--- a/spec/frontend/blob/components/blob_header_viewer_switcher_spec.js
+++ b/spec/frontend/blob/components/blob_header_viewer_switcher_spec.js
@@ -1,4 +1,5 @@
import { mount } from '@vue/test-utils';
+import { GlButtonGroup, GlButton } from '@gitlab/ui';
import BlobHeaderViewerSwitcher from '~/blob/components/blob_header_viewer_switcher.vue';
import {
RICH_BLOB_VIEWER,
@@ -6,7 +7,6 @@ import {
SIMPLE_BLOB_VIEWER,
SIMPLE_BLOB_VIEWER_TITLE,
} from '~/blob/components/constants';
-import { GlButtonGroup, GlButton } from '@gitlab/ui';
describe('Blob Header Viewer Switcher', () => {
let wrapper;
diff --git a/spec/frontend/blob/components/mock_data.js b/spec/frontend/blob/components/mock_data.js
index 58aa1dc6dc9..8cfcec2693c 100644
--- a/spec/frontend/blob/components/mock_data.js
+++ b/spec/frontend/blob/components/mock_data.js
@@ -47,10 +47,12 @@ export const BinaryBlob = {
};
export const RichBlobContentMock = {
+ path: 'foo.md',
richData: '<h1>Rich</h1>',
};
export const SimpleBlobContentMock = {
+ path: 'foo.js',
plainData: 'Plain',
};
diff --git a/spec/frontend/blob/notebook/notebook_viever_spec.js b/spec/frontend/blob/notebook/notebook_viever_spec.js
index 535d2bd544a..f6a926a5ecb 100644
--- a/spec/frontend/blob/notebook/notebook_viever_spec.js
+++ b/spec/frontend/blob/notebook/notebook_viever_spec.js
@@ -1,10 +1,10 @@
import { shallowMount } from '@vue/test-utils';
import { GlLoadingIcon } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import component from '~/blob/notebook/notebook_viewer.vue';
import NotebookLab from '~/notebook/index.vue';
-import waitForPromises from 'helpers/wait_for_promises';
describe('iPython notebook renderer', () => {
let wrapper;
diff --git a/spec/frontend/blob/pipeline_tour_success_modal_spec.js b/spec/frontend/blob/pipeline_tour_success_modal_spec.js
index 6d4e5e46cb8..9998cd7f91c 100644
--- a/spec/frontend/blob/pipeline_tour_success_modal_spec.js
+++ b/spec/frontend/blob/pipeline_tour_success_modal_spec.js
@@ -1,8 +1,8 @@
-import pipelineTourSuccess from '~/blob/pipeline_tour_success_modal.vue';
import { shallowMount } from '@vue/test-utils';
import Cookies from 'js-cookie';
-import { GlSprintf, GlModal } from '@gitlab/ui';
+import { GlSprintf, GlModal, GlLink } from '@gitlab/ui';
import { mockTracking, triggerEvent, unmockTracking } from 'helpers/tracking_helper';
+import pipelineTourSuccess from '~/blob/pipeline_tour_success_modal.vue';
import modalProps from './pipeline_tour_success_mock_data';
describe('PipelineTourSuccessModal', () => {
@@ -18,6 +18,7 @@ describe('PipelineTourSuccessModal', () => {
propsData: modalProps,
stubs: {
GlModal,
+ GlSprintf,
},
});
@@ -37,6 +38,10 @@ describe('PipelineTourSuccessModal', () => {
expect(sprintf.exists()).toBe(true);
});
+ it('renders the link for codeQualityLink', () => {
+ expect(wrapper.find(GlLink).attributes('href')).toBe(wrapper.vm.$options.codeQualityLink);
+ });
+
it('calls to remove cookie', () => {
wrapper.vm.disableModalFromRenderingAgain();
diff --git a/spec/frontend/blob/suggest_gitlab_ci_yml/components/popover_spec.js b/spec/frontend/blob/suggest_gitlab_ci_yml/components/popover_spec.js
index 3c03e6f04ab..4714d34dbec 100644
--- a/spec/frontend/blob/suggest_gitlab_ci_yml/components/popover_spec.js
+++ b/spec/frontend/blob/suggest_gitlab_ci_yml/components/popover_spec.js
@@ -1,8 +1,8 @@
import { shallowMount } from '@vue/test-utils';
-import Popover from '~/blob/suggest_gitlab_ci_yml/components/popover.vue';
import { mockTracking, unmockTracking, triggerEvent } from 'helpers/tracking_helper';
+import { GlButton } from '@gitlab/ui';
+import Popover from '~/blob/suggest_gitlab_ci_yml/components/popover.vue';
import * as utils from '~/lib/utils/common_utils';
-import { GlDeprecatedButton } from '@gitlab/ui';
jest.mock('~/lib/utils/common_utils', () => ({
...jest.requireActual('~/lib/utils/common_utils'),
@@ -96,7 +96,7 @@ describe('Suggest gitlab-ci.yml Popover', () => {
const expectedAction = 'click_button';
const expectedProperty = 'owner';
const expectedValue = '10';
- const dismissButton = wrapper.find(GlDeprecatedButton);
+ const dismissButton = wrapper.find(GlButton);
trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
triggerEvent(dismissButton.element);
diff --git a/spec/frontend/blob/utils_spec.js b/spec/frontend/blob/utils_spec.js
index 119ed2dfe7a..ab9e325e963 100644
--- a/spec/frontend/blob/utils_spec.js
+++ b/spec/frontend/blob/utils_spec.js
@@ -1,53 +1,44 @@
import Editor from '~/editor/editor_lite';
import * as utils from '~/blob/utils';
-const mockCreateMonacoInstance = jest.fn();
-jest.mock('~/editor/editor_lite', () => {
- return jest.fn().mockImplementation(() => {
- return { createInstance: mockCreateMonacoInstance };
- });
-});
+jest.mock('~/editor/editor_lite');
describe('Blob utilities', () => {
- beforeEach(() => {
- Editor.mockClear();
- });
-
describe('initEditorLite', () => {
let editorEl;
const blobPath = 'foo.txt';
const blobContent = 'Foo bar';
+ const blobGlobalId = 'snippet_777';
beforeEach(() => {
- setFixtures('<div id="editor"></div>');
- editorEl = document.getElementById('editor');
+ editorEl = document.createElement('div');
});
describe('Monaco editor', () => {
it('initializes the Editor Lite', () => {
utils.initEditorLite({ el: editorEl });
- expect(Editor).toHaveBeenCalled();
+ expect(Editor).toHaveBeenCalledWith({
+ scrollbar: {
+ alwaysConsumeMouseWheel: false,
+ },
+ });
});
- it('creates the instance with the passed parameters', () => {
- utils.initEditorLite({ el: editorEl });
- expect(mockCreateMonacoInstance.mock.calls[0]).toEqual([
- {
+ it.each([[{}], [{ blobPath, blobContent, blobGlobalId }]])(
+ 'creates the instance with the passed parameters %s',
+ extraParams => {
+ const params = {
el: editorEl,
- blobPath: undefined,
- blobContent: undefined,
- },
- ]);
+ ...extraParams,
+ };
- utils.initEditorLite({ el: editorEl, blobPath, blobContent });
- expect(mockCreateMonacoInstance.mock.calls[1]).toEqual([
- {
- el: editorEl,
- blobPath,
- blobContent,
- },
- ]);
- });
+ expect(Editor.prototype.createInstance).not.toHaveBeenCalled();
+
+ utils.initEditorLite(params);
+
+ expect(Editor.prototype.createInstance).toHaveBeenCalledWith(params);
+ },
+ );
});
});
});
diff --git a/spec/frontend/blob/viewer/index_spec.js b/spec/frontend/blob/viewer/index_spec.js
index 7239f59c6fa..97ac42a10bf 100644
--- a/spec/frontend/blob/viewer/index_spec.js
+++ b/spec/frontend/blob/viewer/index_spec.js
@@ -24,11 +24,11 @@ describe('Blob viewer', () => {
blob = new BlobViewer();
- mock.onGet('http://test.host/snippets/1.json?viewer=rich').reply(200, {
+ mock.onGet('http://test.host/-/snippets/1.json?viewer=rich').reply(200, {
html: '<div>testing</div>',
});
- mock.onGet('http://test.host/snippets/1.json?viewer=simple').reply(200, {
+ mock.onGet('http://test.host/-/snippets/1.json?viewer=simple').reply(200, {
html: '<div>testing</div>',
});
diff --git a/spec/frontend/blob_edit/blob_bundle_spec.js b/spec/frontend/blob_edit/blob_bundle_spec.js
index f5cd623ebce..98fa96de124 100644
--- a/spec/frontend/blob_edit/blob_bundle_spec.js
+++ b/spec/frontend/blob_edit/blob_bundle_spec.js
@@ -1,6 +1,6 @@
import $ from 'jquery';
-import blobBundle from '~/blob_edit/blob_bundle';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
+import blobBundle from '~/blob_edit/blob_bundle';
jest.mock('~/blob_edit/edit_blob');
diff --git a/spec/frontend/boards/board_card_spec.js b/spec/frontend/boards/board_card_spec.js
index 959c71d05ca..d01b895f996 100644
--- a/spec/frontend/boards/board_card_spec.js
+++ b/spec/frontend/boards/board_card_spec.js
@@ -5,8 +5,8 @@
import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
-import axios from '~/lib/utils/axios_utils';
import waitForPromises from 'helpers/wait_for_promises';
+import axios from '~/lib/utils/axios_utils';
import eventHub from '~/boards/eventhub';
import sidebarEventHub from '~/sidebar/event_hub';
diff --git a/spec/frontend/boards/components/board_column_spec.js b/spec/frontend/boards/components/board_column_spec.js
index 6853fe2559d..c06b7aceaad 100644
--- a/spec/frontend/boards/components/board_column_spec.js
+++ b/spec/frontend/boards/components/board_column_spec.js
@@ -2,14 +2,13 @@ import Vue from 'vue';
import { shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'helpers/test_constants';
+import { listObj } from 'jest/boards/mock_data';
import Board from '~/boards/components/board_column.vue';
import List from '~/boards/models/list';
import { ListType } from '~/boards/constants';
import axios from '~/lib/utils/axios_utils';
-import { TEST_HOST } from 'helpers/test_constants';
-import { listObj } from 'jest/boards/mock_data';
-
describe('Board Column Component', () => {
let wrapper;
let axiosMock;
diff --git a/spec/frontend/boards/components/board_form_spec.js b/spec/frontend/boards/components/board_form_spec.js
index 94f607698d7..b1d277863e8 100644
--- a/spec/frontend/boards/components/board_form_spec.js
+++ b/spec/frontend/boards/components/board_form_spec.js
@@ -1,9 +1,9 @@
import { mount } from '@vue/test-utils';
+import { TEST_HOST } from 'jest/helpers/test_constants';
import boardsStore from '~/boards/stores/boards_store';
import boardForm from '~/boards/components/board_form.vue';
import DeprecatedModal from '~/vue_shared/components/deprecated_modal.vue';
-import { TEST_HOST } from 'jest/helpers/test_constants';
describe('board_form.vue', () => {
let wrapper;
diff --git a/spec/frontend/boards/components/board_list_header_spec.js b/spec/frontend/boards/components/board_list_header_spec.js
index 95673da1c56..76a3d5e71c8 100644
--- a/spec/frontend/boards/components/board_list_header_spec.js
+++ b/spec/frontend/boards/components/board_list_header_spec.js
@@ -2,14 +2,13 @@ import Vue from 'vue';
import { shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'helpers/test_constants';
+import { listObj } from 'jest/boards/mock_data';
import BoardListHeader from '~/boards/components/board_list_header.vue';
import List from '~/boards/models/list';
import { ListType } from '~/boards/constants';
import axios from '~/lib/utils/axios_utils';
-import { TEST_HOST } from 'helpers/test_constants';
-import { listObj } from 'jest/boards/mock_data';
-
describe('Board List Header Component', () => {
let wrapper;
let axiosMock;
diff --git a/spec/frontend/boards/components/board_settings_sidebar_spec.js b/spec/frontend/boards/components/board_settings_sidebar_spec.js
new file mode 100644
index 00000000000..f39adc0fc49
--- /dev/null
+++ b/spec/frontend/boards/components/board_settings_sidebar_spec.js
@@ -0,0 +1,159 @@
+import '~/boards/models/list';
+import MockAdapter from 'axios-mock-adapter';
+import axios from 'axios';
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { GlDrawer, GlLabel } from '@gitlab/ui';
+import BoardSettingsSidebar from '~/boards/components/board_settings_sidebar.vue';
+import boardsStore from '~/boards/stores/boards_store';
+import sidebarEventHub from '~/sidebar/event_hub';
+import { inactiveId } from '~/boards/constants';
+
+const localVue = createLocalVue();
+
+localVue.use(Vuex);
+
+describe('BoardSettingsSidebar', () => {
+ let wrapper;
+ let mock;
+ let storeActions;
+ const labelTitle = 'test';
+ const labelColor = '#FFFF';
+ const listId = 1;
+
+ const createComponent = (state = { activeId: inactiveId }, actions = {}) => {
+ storeActions = actions;
+
+ const store = new Vuex.Store({
+ state,
+ actions: storeActions,
+ });
+
+ wrapper = shallowMount(BoardSettingsSidebar, {
+ store,
+ localVue,
+ });
+ };
+ const findLabel = () => wrapper.find(GlLabel);
+ const findDrawer = () => wrapper.find(GlDrawer);
+
+ beforeEach(() => {
+ boardsStore.create();
+ });
+
+ afterEach(() => {
+ jest.restoreAllMocks();
+ wrapper.destroy();
+ });
+
+ it('finds a GlDrawer component', () => {
+ createComponent();
+
+ expect(findDrawer().exists()).toBe(true);
+ });
+
+ describe('on close', () => {
+ it('calls closeSidebar', async () => {
+ const spy = jest.fn();
+ createComponent({ activeId: inactiveId }, { setActiveId: spy });
+
+ findDrawer().vm.$emit('close');
+
+ await wrapper.vm.$nextTick();
+
+ expect(storeActions.setActiveId).toHaveBeenCalledWith(
+ expect.anything(),
+ inactiveId,
+ undefined,
+ );
+ });
+
+ it('calls closeSidebar on sidebar.closeAll event', async () => {
+ createComponent({ activeId: inactiveId }, { setActiveId: jest.fn() });
+
+ sidebarEventHub.$emit('sidebar.closeAll');
+
+ await wrapper.vm.$nextTick();
+
+ expect(storeActions.setActiveId).toHaveBeenCalledWith(
+ expect.anything(),
+ inactiveId,
+ undefined,
+ );
+ });
+ });
+
+ describe('when activeId is zero', () => {
+ it('renders GlDrawer with open false', () => {
+ createComponent();
+
+ expect(findDrawer().props('open')).toBe(false);
+ });
+ });
+
+ describe('when activeId is greater than zero', () => {
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+
+ boardsStore.addList({
+ id: listId,
+ label: { title: labelTitle, color: labelColor },
+ list_type: 'label',
+ });
+ });
+
+ afterEach(() => {
+ boardsStore.removeList(listId);
+ });
+
+ it('renders GlDrawer with open false', () => {
+ createComponent({ activeId: 1 });
+
+ expect(findDrawer().props('open')).toBe(true);
+ });
+ });
+
+ describe('when activeId is in boardsStore', () => {
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+
+ boardsStore.addList({
+ id: listId,
+ label: { title: labelTitle, color: labelColor },
+ list_type: 'label',
+ });
+
+ createComponent({ activeId: listId });
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('renders label title', () => {
+ expect(findLabel().props('title')).toBe(labelTitle);
+ });
+
+ it('renders label background color', () => {
+ expect(findLabel().props('backgroundColor')).toBe(labelColor);
+ });
+ });
+
+ describe('when activeId is not in boardsStore', () => {
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+
+ boardsStore.addList({ id: listId, label: { title: labelTitle, color: labelColor } });
+
+ createComponent({ activeId: inactiveId });
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('does not render GlLabel', () => {
+ expect(findLabel().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/boards/components/boards_selector_spec.js b/spec/frontend/boards/components/boards_selector_spec.js
index b1ae86c2d3f..f2d4de238d1 100644
--- a/spec/frontend/boards/components/boards_selector_spec.js
+++ b/spec/frontend/boards/components/boards_selector_spec.js
@@ -1,6 +1,6 @@
import { nextTick } from 'vue';
import { mount } from '@vue/test-utils';
-import { GlDropdown, GlLoadingIcon } from '@gitlab/ui';
+import { GlDeprecatedDropdown, GlLoadingIcon } from '@gitlab/ui';
import { TEST_HOST } from 'spec/test_constants';
import BoardsSelector from '~/boards/components/boards_selector.vue';
import boardsStore from '~/boards/stores/boards_store';
@@ -103,7 +103,7 @@ describe('BoardsSelector', () => {
});
// Emits gl-dropdown show event to simulate the dropdown is opened at initialization time
- wrapper.find(GlDropdown).vm.$emit('show');
+ wrapper.find(GlDeprecatedDropdown).vm.$emit('show');
});
afterEach(() => {
diff --git a/spec/frontend/boards/components/sidebar/remove_issue_spec.js b/spec/frontend/boards/components/sidebar/remove_issue_spec.js
new file mode 100644
index 00000000000..a33e4046724
--- /dev/null
+++ b/spec/frontend/boards/components/sidebar/remove_issue_spec.js
@@ -0,0 +1,28 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+
+import RemoveIssue from '~/boards/components/sidebar/remove_issue.vue';
+
+describe('boards sidebar remove issue', () => {
+ let wrapper;
+
+ const findButton = () => wrapper.find(GlButton);
+
+ const createComponent = propsData => {
+ wrapper = shallowMount(RemoveIssue, {
+ propsData: {
+ issue: {},
+ list: {},
+ ...propsData,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders remove button', () => {
+ expect(findButton().exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/boards/issue_card_spec.js b/spec/frontend/boards/issue_card_spec.js
index 15750a161ae..dee8cb7b6e5 100644
--- a/spec/frontend/boards/issue_card_spec.js
+++ b/spec/frontend/boards/issue_card_spec.js
@@ -5,10 +5,10 @@ import '~/boards/models/label';
import '~/boards/models/assignee';
import '~/boards/models/issue';
import '~/boards/models/list';
+import { GlLabel } from '@gitlab/ui';
import IssueCardInner from '~/boards/components/issue_card_inner.vue';
import { listObj } from './mock_data';
import store from '~/boards/stores';
-import { GlLabel } from '@gitlab/ui';
describe('Issue card component', () => {
const user = new ListAssignee({
diff --git a/spec/frontend/boards/issue_spec.js b/spec/frontend/boards/issue_spec.js
index 412f20684f5..d68e17c06a7 100644
--- a/spec/frontend/boards/issue_spec.js
+++ b/spec/frontend/boards/issue_spec.js
@@ -5,7 +5,7 @@ import '~/boards/models/assignee';
import '~/boards/models/issue';
import '~/boards/models/list';
import boardsStore from '~/boards/stores/boards_store';
-import { setMockEndpoints } from './mock_data';
+import { setMockEndpoints, mockIssue } from './mock_data';
describe('Issue model', () => {
let issue;
@@ -14,28 +14,7 @@ describe('Issue model', () => {
setMockEndpoints();
boardsStore.create();
- issue = new ListIssue({
- title: 'Testing',
- id: 1,
- iid: 1,
- confidential: false,
- labels: [
- {
- id: 1,
- title: 'test',
- color: 'red',
- description: 'testing',
- },
- ],
- assignees: [
- {
- id: 1,
- name: 'name',
- username: 'username',
- avatar_url: 'http://avatar_url',
- },
- ],
- });
+ issue = new ListIssue(mockIssue);
});
it('has label', () => {
diff --git a/spec/frontend/boards/list_spec.js b/spec/frontend/boards/list_spec.js
index b30281f8df5..b731bb6e474 100644
--- a/spec/frontend/boards/list_spec.js
+++ b/spec/frontend/boards/list_spec.js
@@ -4,6 +4,7 @@
/* global ListLabel */
import MockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import '~/boards/models/label';
import '~/boards/models/assignee';
@@ -11,7 +12,6 @@ import '~/boards/models/issue';
import '~/boards/models/list';
import { ListType } from '~/boards/constants';
import boardsStore from '~/boards/stores/boards_store';
-import waitForPromises from 'helpers/wait_for_promises';
import { listObj, listObjDuplicate, boardsMockInterceptor } from './mock_data';
describe('List model', () => {
diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js
index 97d49de6f2e..8ef6efe23c7 100644
--- a/spec/frontend/boards/mock_data.js
+++ b/spec/frontend/boards/mock_data.js
@@ -92,6 +92,29 @@ export const mockMilestone = {
due_date: '2019-12-31',
};
+export const mockIssue = {
+ title: 'Testing',
+ id: 1,
+ iid: 1,
+ confidential: false,
+ labels: [
+ {
+ id: 1,
+ title: 'test',
+ color: 'red',
+ description: 'testing',
+ },
+ ],
+ assignees: [
+ {
+ id: 1,
+ name: 'name',
+ username: 'username',
+ avatar_url: 'http://avatar_url',
+ },
+ ],
+};
+
export const BoardsMockData = {
GET: {
'/test/-/boards/1/lists/300/issues?id=300&page=1': {
diff --git a/spec/frontend/boards/stores/actions_spec.js b/spec/frontend/boards/stores/actions_spec.js
index 0debca1310a..d539cba76ca 100644
--- a/spec/frontend/boards/stores/actions_spec.js
+++ b/spec/frontend/boards/stores/actions_spec.js
@@ -1,6 +1,7 @@
+import testAction from 'helpers/vuex_action_helper';
import actions from '~/boards/stores/actions';
import * as types from '~/boards/stores/mutation_types';
-import testAction from 'helpers/vuex_action_helper';
+import { inactiveId } from '~/boards/constants';
const expectNotImplemented = action => {
it('is not implemented', () => {
@@ -8,19 +9,36 @@ const expectNotImplemented = action => {
});
};
-describe('setEndpoints', () => {
- it('sets endpoints object', () => {
- const mockEndpoints = {
+describe('setInitialBoardData', () => {
+ it('sets data object', () => {
+ const mockData = {
foo: 'bar',
bar: 'baz',
};
return testAction(
- actions.setEndpoints,
- mockEndpoints,
+ actions.setInitialBoardData,
+ mockData,
{},
- [{ type: types.SET_ENDPOINTS, payload: mockEndpoints }],
+ [{ type: types.SET_INITIAL_BOARD_DATA, payload: mockData }],
+ [],
+ );
+ });
+});
+
+describe('setActiveId', () => {
+ it('should commit mutation SET_ACTIVE_ID', done => {
+ const state = {
+ activeId: inactiveId,
+ };
+
+ testAction(
+ actions.setActiveId,
+ 1,
+ state,
+ [{ type: types.SET_ACTIVE_ID, payload: 1 }],
[],
+ done,
);
});
});
diff --git a/spec/frontend/boards/stores/mutations_spec.js b/spec/frontend/boards/stores/mutations_spec.js
index bc57c30b354..c1f7f3dda6e 100644
--- a/spec/frontend/boards/stores/mutations_spec.js
+++ b/spec/frontend/boards/stores/mutations_spec.js
@@ -1,6 +1,6 @@
import mutations from '~/boards/stores/mutations';
-import * as types from '~/boards/stores/mutation_types';
import defaultState from '~/boards/stores/state';
+import { mockIssue } from '../mock_data';
const expectNotImplemented = action => {
it('is not implemented', () => {
@@ -15,7 +15,7 @@ describe('Board Store Mutations', () => {
state = defaultState();
});
- describe('SET_ENDPOINTS', () => {
+ describe('SET_INITIAL_BOARD_DATA', () => {
it('Should set initial Boards data to state', () => {
const endpoints = {
boardsEndpoint: '/boards/',
@@ -25,10 +25,22 @@ describe('Board Store Mutations', () => {
boardId: 1,
fullPath: 'gitlab-org',
};
+ const boardType = 'group';
- mutations[types.SET_ENDPOINTS](state, endpoints);
+ mutations.SET_INITIAL_BOARD_DATA(state, { ...endpoints, boardType });
expect(state.endpoints).toEqual(endpoints);
+ expect(state.boardType).toEqual(boardType);
+ });
+ });
+
+ describe('SET_ACTIVE_ID', () => {
+ it('updates activeListId to be the value that is passed', () => {
+ const expectedId = 1;
+
+ mutations.SET_ACTIVE_ID(state, expectedId);
+
+ expect(state.activeId).toBe(expectedId);
});
});
@@ -68,6 +80,35 @@ describe('Board Store Mutations', () => {
expectNotImplemented(mutations.RECEIVE_REMOVE_LIST_ERROR);
});
+ describe('REQUEST_ISSUES_FOR_ALL_LISTS', () => {
+ it('sets isLoadingIssues to true', () => {
+ expect(state.isLoadingIssues).toBe(false);
+
+ mutations.REQUEST_ISSUES_FOR_ALL_LISTS(state);
+
+ expect(state.isLoadingIssues).toBe(true);
+ });
+ });
+
+ describe('RECEIVE_ISSUES_FOR_ALL_LISTS_SUCCESS', () => {
+ it('sets isLoadingIssues to false and updates issuesByListId object', () => {
+ const listIssues = {
+ '1': [mockIssue],
+ };
+
+ state = {
+ ...state,
+ isLoadingIssues: true,
+ issuesByListId: {},
+ };
+
+ mutations.RECEIVE_ISSUES_FOR_ALL_LISTS_SUCCESS(state, listIssues);
+
+ expect(state.isLoadingIssues).toBe(false);
+ expect(state.issuesByListId).toEqual(listIssues);
+ });
+ });
+
describe('REQUEST_ADD_ISSUE', () => {
expectNotImplemented(mutations.REQUEST_ADD_ISSUE);
});
diff --git a/spec/frontend/branches/components/__snapshots__/divergence_graph_spec.js.snap b/spec/frontend/branches/components/__snapshots__/divergence_graph_spec.js.snap
index c9948db95f8..261c406171e 100644
--- a/spec/frontend/branches/components/__snapshots__/divergence_graph_spec.js.snap
+++ b/spec/frontend/branches/components/__snapshots__/divergence_graph_spec.js.snap
@@ -12,7 +12,7 @@ exports[`Branch divergence graph component renders ahead and behind count 1`] =
/>
<div
- class="graph-separator pull-left mt-1"
+ class="graph-separator float-left mt-1"
/>
<graph-bar-stub
diff --git a/spec/frontend/ci_variable_list/components/ci_enviroments_dropdown_spec.js b/spec/frontend/ci_variable_list/components/ci_enviroments_dropdown_spec.js
index a52b38599f7..7785d436834 100644
--- a/spec/frontend/ci_variable_list/components/ci_enviroments_dropdown_spec.js
+++ b/spec/frontend/ci_variable_list/components/ci_enviroments_dropdown_spec.js
@@ -1,7 +1,7 @@
import Vuex from 'vuex';
-import CiEnvironmentsDropdown from '~/ci_variable_list/components/ci_environments_dropdown.vue';
import { shallowMount, createLocalVue } from '@vue/test-utils';
-import { GlDropdownItem, GlIcon } from '@gitlab/ui';
+import { GlDeprecatedDropdownItem, GlIcon } from '@gitlab/ui';
+import CiEnvironmentsDropdown from '~/ci_variable_list/components/ci_environments_dropdown.vue';
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -26,8 +26,8 @@ describe('Ci environments dropdown', () => {
});
};
- const findAllDropdownItems = () => wrapper.findAll(GlDropdownItem);
- const findDropdownItemByIndex = index => wrapper.findAll(GlDropdownItem).at(index);
+ const findAllDropdownItems = () => wrapper.findAll(GlDeprecatedDropdownItem);
+ const findDropdownItemByIndex = index => wrapper.findAll(GlDeprecatedDropdownItem).at(index);
const findActiveIconByIndex = index => wrapper.findAll(GlIcon).at(index);
afterEach(() => {
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
index ad398d6ccd6..4e35243f484 100644
--- a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
+++ b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
@@ -1,6 +1,6 @@
import Vuex from 'vuex';
import { createLocalVue, shallowMount, mount } from '@vue/test-utils';
-import { GlDeprecatedButton, GlFormCombobox } from '@gitlab/ui';
+import { GlButton, GlFormCombobox } from '@gitlab/ui';
import { AWS_ACCESS_KEY_ID } from '~/ci_variable_list/constants';
import CiVariableModal from '~/ci_variable_list/components/ci_variable_modal.vue';
import createStore from '~/ci_variable_list/store';
@@ -29,14 +29,14 @@ describe('Ci variable modal', () => {
};
const findModal = () => wrapper.find(ModalStub);
- const addOrUpdateButton = index =>
+ const findAddorUpdateButton = () =>
findModal()
- .findAll(GlDeprecatedButton)
- .at(index);
+ .findAll(GlButton)
+ .wrappers.find(button => button.props('variant') === 'success');
const deleteVariableButton = () =>
findModal()
- .findAll(GlDeprecatedButton)
- .at(1);
+ .findAll(GlButton)
+ .wrappers.find(button => button.props('variant') === 'danger');
afterEach(() => {
wrapper.destroy();
@@ -69,7 +69,7 @@ describe('Ci variable modal', () => {
});
it('button is disabled when no key/value pair are present', () => {
- expect(addOrUpdateButton(1).attributes('disabled')).toBeTruthy();
+ expect(findAddorUpdateButton().attributes('disabled')).toBeTruthy();
});
});
@@ -82,11 +82,11 @@ describe('Ci variable modal', () => {
});
it('button is enabled when key/value pair are present', () => {
- expect(addOrUpdateButton(1).attributes('disabled')).toBeFalsy();
+ expect(findAddorUpdateButton().attributes('disabled')).toBeFalsy();
});
it('Add variable button dispatches addVariable action', () => {
- addOrUpdateButton(1).vm.$emit('click');
+ findAddorUpdateButton().vm.$emit('click');
expect(store.dispatch).toHaveBeenCalledWith('addVariable');
});
@@ -152,11 +152,11 @@ describe('Ci variable modal', () => {
});
it('button text is Update variable when updating', () => {
- expect(addOrUpdateButton(2).text()).toBe('Update variable');
+ expect(findAddorUpdateButton().text()).toBe('Update variable');
});
it('Update variable button dispatches updateVariable with correct variable', () => {
- addOrUpdateButton(2).vm.$emit('click');
+ findAddorUpdateButton().vm.$emit('click');
expect(store.dispatch).toHaveBeenCalledWith('updateVariable');
});
@@ -189,7 +189,7 @@ describe('Ci variable modal', () => {
});
it('disables the submit button', () => {
- expect(addOrUpdateButton(1).attributes('disabled')).toBeTruthy();
+ expect(findAddorUpdateButton().attributes('disabled')).toBeTruthy();
});
it('shows the correct error text', () => {
@@ -213,7 +213,7 @@ describe('Ci variable modal', () => {
});
it('does not disable the submit button', () => {
- expect(addOrUpdateButton(1).attributes('disabled')).toBeFalsy();
+ expect(findAddorUpdateButton().attributes('disabled')).toBeFalsy();
});
});
});
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_popover_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_popover_spec.js
index 46f77a6f11e..5d37f059161 100644
--- a/spec/frontend/ci_variable_list/components/ci_variable_popover_spec.js
+++ b/spec/frontend/ci_variable_list/components/ci_variable_popover_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDeprecatedButton } from '@gitlab/ui';
+import { GlButton } from '@gitlab/ui';
import CiVariablePopover from '~/ci_variable_list/components/ci_variable_popover.vue';
import mockData from '../services/mock_data';
@@ -18,7 +18,7 @@ describe('Ci Variable Popover', () => {
});
};
- const findButton = () => wrapper.find(GlDeprecatedButton);
+ const findButton = () => wrapper.find(GlButton);
beforeEach(() => {
createComponent();
diff --git a/spec/frontend/ci_variable_list/store/actions_spec.js b/spec/frontend/ci_variable_list/store/actions_spec.js
index eb565d4c979..4b89e467df0 100644
--- a/spec/frontend/ci_variable_list/store/actions_spec.js
+++ b/spec/frontend/ci_variable_list/store/actions_spec.js
@@ -1,8 +1,8 @@
-import Api from '~/api';
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
+import Api from '~/api';
import axios from '~/lib/utils/axios_utils';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import getInitialState from '~/ci_variable_list/store/state';
import * as actions from '~/ci_variable_list/store/actions';
import * as types from '~/ci_variable_list/store/mutation_types';
diff --git a/spec/frontend/clusters/clusters_bundle_spec.js b/spec/frontend/clusters/clusters_bundle_spec.js
index a9870e4db57..d3277cdb7cc 100644
--- a/spec/frontend/clusters/clusters_bundle_spec.js
+++ b/spec/frontend/clusters/clusters_bundle_spec.js
@@ -1,14 +1,8 @@
import MockAdapter from 'axios-mock-adapter';
-import $ from 'jquery';
import { loadHTMLFixture } from 'helpers/fixtures';
import { setTestTimeout } from 'helpers/timeout';
import Clusters from '~/clusters/clusters_bundle';
-import {
- APPLICATION_STATUS,
- INGRESS_DOMAIN_SUFFIX,
- APPLICATIONS,
- RUNNER,
-} from '~/clusters/constants';
+import { APPLICATION_STATUS, APPLICATIONS, RUNNER } from '~/clusters/constants';
import axios from '~/lib/utils/axios_utils';
import initProjectSelectDropdown from '~/project_select';
@@ -63,25 +57,6 @@ describe('Clusters', () => {
});
});
- describe('toggle', () => {
- it('should update the button and the input field on click', done => {
- const toggleButton = document.querySelector(
- '.js-cluster-enable-toggle-area .js-project-feature-toggle',
- );
- const toggleInput = document.querySelector(
- '.js-cluster-enable-toggle-area .js-project-feature-toggle-input',
- );
-
- $(toggleInput).one('trigger-change', () => {
- expect(toggleButton.classList).not.toContain('is-checked');
- expect(toggleInput.getAttribute('value')).toEqual('false');
- done();
- });
-
- toggleButton.click();
- });
- });
-
describe('checkForNewInstalls', () => {
const INITIAL_APP_MAP = {
helm: { status: null, title: 'Helm Tiller' },
@@ -328,7 +303,6 @@ describe('Clusters', () => {
return promise.then(() => {
expect(cluster.store.state.applications.helm.status).toEqual(INSTALLED);
expect(cluster.store.state.applications.helm.uninstallFailed).toBe(true);
-
expect(cluster.store.state.applications.helm.requestReason).toBeDefined();
});
});
@@ -354,10 +328,8 @@ describe('Clusters', () => {
describe('handleClusterStatusSuccess', () => {
beforeEach(() => {
jest.spyOn(cluster.store, 'updateStateFromServer').mockReturnThis();
- jest.spyOn(cluster, 'toggleIngressDomainHelpText').mockReturnThis();
jest.spyOn(cluster, 'checkForNewInstalls').mockReturnThis();
jest.spyOn(cluster, 'updateContainer').mockReturnThis();
-
cluster.handleClusterStatusSuccess({ data: {} });
});
@@ -369,53 +341,11 @@ describe('Clusters', () => {
expect(cluster.checkForNewInstalls).toHaveBeenCalled();
});
- it('toggles ingress domain help text', () => {
- expect(cluster.toggleIngressDomainHelpText).toHaveBeenCalled();
- });
-
it('updates message containers', () => {
expect(cluster.updateContainer).toHaveBeenCalled();
});
});
- describe('toggleIngressDomainHelpText', () => {
- let ingressPreviousState;
- let ingressNewState;
-
- beforeEach(() => {
- ingressPreviousState = { externalIp: null };
- ingressNewState = { externalIp: '127.0.0.1' };
- });
-
- describe(`when ingress have an external ip assigned`, () => {
- beforeEach(() => {
- cluster.toggleIngressDomainHelpText(ingressPreviousState, ingressNewState);
- });
-
- it('displays custom domain help text', () => {
- expect(cluster.ingressDomainHelpText.classList.contains('hide')).toEqual(false);
- });
-
- it('updates ingress external ip address', () => {
- expect(cluster.ingressDomainSnippet.textContent).toEqual(
- `${ingressNewState.externalIp}${INGRESS_DOMAIN_SUFFIX}`,
- );
- });
- });
-
- describe(`when ingress does not have an external ip assigned`, () => {
- it('hides custom domain help text', () => {
- ingressPreviousState.externalIp = '127.0.0.1';
- ingressNewState.externalIp = null;
- cluster.ingressDomainHelpText.classList.remove('hide');
-
- cluster.toggleIngressDomainHelpText(ingressPreviousState, ingressNewState);
-
- expect(cluster.ingressDomainHelpText.classList.contains('hide')).toEqual(true);
- });
- });
- });
-
describe('updateApplication', () => {
const params = { version: '1.0.0' };
let storeUpdateApplication;
diff --git a/spec/frontend/clusters/components/__snapshots__/applications_spec.js.snap b/spec/frontend/clusters/components/__snapshots__/applications_spec.js.snap
index 92237590550..3328ec724fd 100644
--- a/spec/frontend/clusters/components/__snapshots__/applications_spec.js.snap
+++ b/spec/frontend/clusters/components/__snapshots__/applications_spec.js.snap
@@ -17,6 +17,22 @@ exports[`Applications Cert-Manager application shows the correct description 1`]
</p>
`;
+exports[`Applications Cilium application shows the correct description 1`] = `
+<p
+ data-testid="ciliumDescription"
+>
+ Protect your clusters with GitLab Container Network Policies by enforcing how pods communicate with each other and other network endpoints.
+ <a
+ class="gl-link"
+ href="cilium-help-path"
+ rel="noopener"
+ target="_blank"
+ >
+ Learn more about configuring Network Policies here.
+ </a>
+</p>
+`;
+
exports[`Applications Crossplane application shows the correct description 1`] = `
<p
data-testid="crossplaneDescription"
diff --git a/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap b/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
index d4269bf14ba..93b757e008a 100644
--- a/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
+++ b/spec/frontend/clusters/components/__snapshots__/remove_cluster_confirmation_spec.js.snap
@@ -1,7 +1,9 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Remove cluster confirmation modal renders splitbutton with modal included 1`] = `
-<div>
+<div
+ class="gl-display-flex gl-justify-content-end"
+>
<div
class="dropdown b-dropdown gl-dropdown btn-group"
>
diff --git a/spec/frontend/clusters/components/application_row_spec.js b/spec/frontend/clusters/components/application_row_spec.js
index 94bdd7b7778..b97d4dbf355 100644
--- a/spec/frontend/clusters/components/application_row_spec.js
+++ b/spec/frontend/clusters/components/application_row_spec.js
@@ -83,6 +83,12 @@ describe('Application Row', () => {
checkButtonState('Installing', true, true);
});
+ it('has disabled "Install" when APPLICATION_STATUS.UNINSTALLED', () => {
+ mountComponent({ status: APPLICATION_STATUS.UNINSTALLED });
+
+ checkButtonState('Install', false, true);
+ });
+
it('has disabled "Installed" when application is installed and not uninstallable', () => {
mountComponent({
status: APPLICATION_STATUS.INSTALLED,
@@ -112,6 +118,15 @@ describe('Application Row', () => {
checkButtonState('Install', false, false);
});
+ it('has disabled "Install" when installation disabled', () => {
+ mountComponent({
+ status: APPLICATION_STATUS.INSTALLABLE,
+ installable: false,
+ });
+
+ checkButtonState('Install', false, true);
+ });
+
it('has enabled "Install" when REQUEST_FAILURE (so you can try installing again)', () => {
mountComponent({ status: APPLICATION_STATUS.INSTALLABLE });
diff --git a/spec/frontend/clusters/components/applications_spec.js b/spec/frontend/clusters/components/applications_spec.js
index 7fc771201c1..e0ccf36e868 100644
--- a/spec/frontend/clusters/components/applications_spec.js
+++ b/spec/frontend/clusters/components/applications_spec.js
@@ -14,10 +14,9 @@ describe('Applications', () => {
beforeEach(() => {
gon.features = gon.features || {};
- gon.features.managedAppsLocalTiller = false;
});
- const createApp = ({ applications, type } = {}, isShallow) => {
+ const createApp = ({ applications, type, props } = {}, isShallow) => {
const mountMethod = isShallow ? shallowMount : mount;
wrapper = mountMethod(Applications, {
@@ -25,6 +24,7 @@ describe('Applications', () => {
propsData: {
type,
applications: { ...APPLICATIONS_MOCK_STATE, ...applications },
+ ...props,
},
});
};
@@ -40,10 +40,6 @@ describe('Applications', () => {
createApp({ type: CLUSTER_TYPE.PROJECT });
});
- it('renders a row for Helm Tiller', () => {
- expect(wrapper.find('.js-cluster-application-row-helm').exists()).toBe(true);
- });
-
it('renders a row for Ingress', () => {
expect(wrapper.find('.js-cluster-application-row-ingress').exists()).toBe(true);
});
@@ -79,6 +75,9 @@ describe('Applications', () => {
it('renders a row for Fluentd', () => {
expect(wrapper.find('.js-cluster-application-row-fluentd').exists()).toBe(true);
});
+ it('renders a row for Cilium', () => {
+ expect(wrapper.find('.js-cluster-application-row-cilium').exists()).toBe(true);
+ });
});
describe('Group cluster applications', () => {
@@ -86,10 +85,6 @@ describe('Applications', () => {
createApp({ type: CLUSTER_TYPE.GROUP });
});
- it('renders a row for Helm Tiller', () => {
- expect(wrapper.find('.js-cluster-application-row-helm').exists()).toBe(true);
- });
-
it('renders a row for Ingress', () => {
expect(wrapper.find('.js-cluster-application-row-ingress').exists()).toBe(true);
});
@@ -125,6 +120,10 @@ describe('Applications', () => {
it('renders a row for Fluentd', () => {
expect(wrapper.find('.js-cluster-application-row-fluentd').exists()).toBe(true);
});
+
+ it('renders a row for Cilium', () => {
+ expect(wrapper.find('.js-cluster-application-row-cilium').exists()).toBe(true);
+ });
});
describe('Instance cluster applications', () => {
@@ -132,10 +131,6 @@ describe('Applications', () => {
createApp({ type: CLUSTER_TYPE.INSTANCE });
});
- it('renders a row for Helm Tiller', () => {
- expect(wrapper.find('.js-cluster-application-row-helm').exists()).toBe(true);
- });
-
it('renders a row for Ingress', () => {
expect(wrapper.find('.js-cluster-application-row-ingress').exists()).toBe(true);
});
@@ -171,18 +166,16 @@ describe('Applications', () => {
it('renders a row for Fluentd', () => {
expect(wrapper.find('.js-cluster-application-row-fluentd').exists()).toBe(true);
});
+
+ it('renders a row for Cilium', () => {
+ expect(wrapper.find('.js-cluster-application-row-cilium').exists()).toBe(true);
+ });
});
describe('Helm application', () => {
- describe('when managedAppsLocalTiller enabled', () => {
- beforeEach(() => {
- gon.features.managedAppsLocalTiller = true;
- });
-
- it('does not render a row for Helm Tiller', () => {
- createApp();
- expect(wrapper.find('.js-cluster-application-row-helm').exists()).toBe(false);
- });
+ it('does not render a row for Helm Tiller', () => {
+ createApp();
+ expect(wrapper.find('.js-cluster-application-row-helm').exists()).toBe(false);
});
});
@@ -240,7 +233,6 @@ describe('Applications', () => {
externalHostname: 'localhost.localdomain',
modsecurity_enabled: false,
},
- helm: { title: 'Helm Tiller' },
cert_manager: { title: 'Cert-Manager' },
crossplane: { title: 'Crossplane', stack: '' },
runner: { title: 'GitLab Runner' },
@@ -249,6 +241,7 @@ describe('Applications', () => {
knative: { title: 'Knative', hostname: '' },
elastic_stack: { title: 'Elastic Stack' },
fluentd: { title: 'Fluentd' },
+ cilium: { title: 'GitLab Container Network Policies' },
},
});
@@ -365,7 +358,11 @@ describe('Applications', () => {
it('renders readonly input', () => {
createApp({
applications: {
- ingress: { title: 'Ingress', status: 'installed', externalIp: '1.1.1.1' },
+ ingress: {
+ title: 'Ingress',
+ status: 'installed',
+ externalIp: '1.1.1.1',
+ },
jupyter: { title: 'JupyterHub', status: 'installed', hostname: '' },
},
});
@@ -386,14 +383,6 @@ describe('Applications', () => {
false,
);
});
-
- it('renders disabled install button', () => {
- expect(
- wrapper
- .find('.js-cluster-application-row-jupyter .js-cluster-application-install-button')
- .attributes('disabled'),
- ).toEqual('disabled');
- });
});
});
@@ -513,7 +502,7 @@ describe('Applications', () => {
describe('Elastic Stack application', () => {
describe('with elastic stack installable', () => {
- it('renders hostname active input', () => {
+ it('renders the install button enabled', () => {
createApp();
expect(
@@ -522,7 +511,7 @@ describe('Applications', () => {
'.js-cluster-application-row-elastic_stack .js-cluster-application-install-button',
)
.attributes('disabled'),
- ).toEqual('disabled');
+ ).toBeUndefined();
});
});
@@ -552,4 +541,11 @@ describe('Applications', () => {
expect(wrapper.find(FluentdOutputSettings).exists()).toBe(true);
});
});
+
+ describe('Cilium application', () => {
+ it('shows the correct description', () => {
+ createApp({ props: { ciliumHelpPath: 'cilium-help-path' } });
+ expect(findByTestId('ciliumDescription').element).toMatchSnapshot();
+ });
+ });
});
diff --git a/spec/frontend/clusters/components/fluentd_output_settings_spec.js b/spec/frontend/clusters/components/fluentd_output_settings_spec.js
index f03f2535947..0bc4eb73bf9 100644
--- a/spec/frontend/clusters/components/fluentd_output_settings_spec.js
+++ b/spec/frontend/clusters/components/fluentd_output_settings_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
+import { GlAlert, GlDeprecatedDropdown, GlFormCheckbox } from '@gitlab/ui';
import FluentdOutputSettings from '~/clusters/components/fluentd_output_settings.vue';
import { APPLICATION_STATUS, FLUENTD } from '~/clusters/constants';
-import { GlAlert, GlDropdown, GlFormCheckbox } from '@gitlab/ui';
import eventHub from '~/clusters/event_hub';
const { UPDATING } = APPLICATION_STATUS;
@@ -36,7 +36,7 @@ describe('FluentdOutputSettings', () => {
};
const findSaveButton = () => wrapper.find({ ref: 'saveBtn' });
const findCancelButton = () => wrapper.find({ ref: 'cancelBtn' });
- const findProtocolDropdown = () => wrapper.find(GlDropdown);
+ const findProtocolDropdown = () => wrapper.find(GlDeprecatedDropdown);
const findCheckbox = name =>
wrapper.findAll(GlFormCheckbox).wrappers.find(x => x.text() === name);
const findHost = () => wrapper.find('#fluentd-host');
diff --git a/spec/frontend/clusters/components/ingress_modsecurity_settings_spec.js b/spec/frontend/clusters/components/ingress_modsecurity_settings_spec.js
index 683f2e5c35a..3a9a608b2e2 100644
--- a/spec/frontend/clusters/components/ingress_modsecurity_settings_spec.js
+++ b/spec/frontend/clusters/components/ingress_modsecurity_settings_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
+import { GlAlert, GlToggle, GlDeprecatedDropdown } from '@gitlab/ui';
import IngressModsecuritySettings from '~/clusters/components/ingress_modsecurity_settings.vue';
import { APPLICATION_STATUS, INGRESS } from '~/clusters/constants';
-import { GlAlert, GlToggle, GlDropdown } from '@gitlab/ui';
import eventHub from '~/clusters/event_hub';
const { UPDATING } = APPLICATION_STATUS;
@@ -31,7 +31,7 @@ describe('IngressModsecuritySettings', () => {
const findSaveButton = () => wrapper.find('.btn-success');
const findCancelButton = () => wrapper.find('[variant="secondary"]');
const findModSecurityToggle = () => wrapper.find(GlToggle);
- const findModSecurityDropdown = () => wrapper.find(GlDropdown);
+ const findModSecurityDropdown = () => wrapper.find(GlDeprecatedDropdown);
describe('when ingress is installed', () => {
beforeEach(() => {
diff --git a/spec/frontend/clusters/components/knative_domain_editor_spec.js b/spec/frontend/clusters/components/knative_domain_editor_spec.js
index 73d08661199..a07258dcc69 100644
--- a/spec/frontend/clusters/components/knative_domain_editor_spec.js
+++ b/spec/frontend/clusters/components/knative_domain_editor_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDropdownItem } from '@gitlab/ui';
+import { GlDeprecatedDropdownItem } from '@gitlab/ui';
import KnativeDomainEditor from '~/clusters/components/knative_domain_editor.vue';
import LoadingButton from '~/vue_shared/components/loading_button.vue';
import { APPLICATION_STATUS } from '~/clusters/constants';
@@ -113,7 +113,7 @@ describe('KnativeDomainEditor', () => {
createComponent({ knative: { ...knative, availableDomains: [newDomain] } });
jest.spyOn(wrapper.vm, 'selectDomain');
- wrapper.find(GlDropdownItem).vm.$emit('click');
+ wrapper.find(GlDeprecatedDropdownItem).vm.$emit('click');
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.vm.selectDomain).toHaveBeenCalledWith(newDomain);
diff --git a/spec/frontend/clusters/forms/components/integration_form_spec.js b/spec/frontend/clusters/forms/components/integration_form_spec.js
new file mode 100644
index 00000000000..3a3700eb0b7
--- /dev/null
+++ b/spec/frontend/clusters/forms/components/integration_form_spec.js
@@ -0,0 +1,112 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { GlToggle, GlButton } from '@gitlab/ui';
+import IntegrationForm from '~/clusters/forms/components/integration_form.vue';
+import { createStore } from '~/clusters/forms/stores/index';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('ClusterIntegrationForm', () => {
+ let wrapper;
+
+ const defaultStoreValues = {
+ enabled: true,
+ editable: true,
+ environmentScope: '*',
+ baseDomain: 'testDomain',
+ applicationIngressExternalIp: null,
+ };
+
+ const createWrapper = (storeValues = defaultStoreValues) => {
+ wrapper = shallowMount(IntegrationForm, {
+ localVue,
+ store: createStore(storeValues),
+ provide: {
+ autoDevopsHelpPath: 'topics/autodevops/index',
+ externalEndpointHelpPath: 'user/clusters/applications.md',
+ },
+ });
+ };
+
+ const destroyWrapper = () => {
+ wrapper.destroy();
+ wrapper = null;
+ };
+
+ const findSubmitButton = () => wrapper.find(GlButton);
+ const findGlToggle = () => wrapper.find(GlToggle);
+
+ afterEach(() => {
+ destroyWrapper();
+ });
+
+ describe('rendering', () => {
+ beforeEach(() => createWrapper());
+
+ it('enables toggle if editable is true', () => {
+ expect(findGlToggle().props('disabled')).toBe(false);
+ });
+ it('sets the envScope to default', () => {
+ expect(wrapper.find('[id="cluster_environment_scope"]').attributes('value')).toBe('*');
+ });
+
+ it('sets the baseDomain to default', () => {
+ expect(wrapper.find('[id="cluster_base_domain"]').attributes('value')).toBe('testDomain');
+ });
+
+ describe('when editable is false', () => {
+ beforeEach(() => {
+ createWrapper({ ...defaultStoreValues, editable: false });
+ });
+
+ it('disables toggle if editable is false', () => {
+ expect(findGlToggle().props('disabled')).toBe(true);
+ });
+
+ it('does not render the save button', () => {
+ expect(findSubmitButton().exists()).toBe(false);
+ });
+ });
+
+ it('does not render external IP block if applicationIngressExternalIp was not passed', () => {
+ createWrapper({ ...defaultStoreValues });
+
+ expect(wrapper.find('.js-ingress-domain-help-text').exists()).toBe(false);
+ });
+
+ it('renders external IP block if applicationIngressExternalIp was passed', () => {
+ createWrapper({ ...defaultStoreValues, applicationIngressExternalIp: '127.0.0.1' });
+
+ expect(wrapper.find('.js-ingress-domain-help-text').exists()).toBe(true);
+ });
+ });
+
+ describe('reactivity', () => {
+ beforeEach(() => createWrapper());
+
+ it('enables the submit button on changing toggle to different value', () => {
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ // setData is a bad approach because it changes the internal implementation which we should not touch
+ // but our GlFormInput lacks the ability to set a new value.
+ wrapper.setData({ toggleEnabled: !defaultStoreValues.enabled });
+ })
+ .then(() => {
+ expect(findSubmitButton().props('disabled')).toBe(false);
+ });
+ });
+
+ it('enables the submit button on changing input values', () => {
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ wrapper.setData({ envScope: `${defaultStoreValues.environmentScope}1` });
+ })
+ .then(() => {
+ expect(findSubmitButton().props('disabled')).toBe(false);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/clusters/services/application_state_machine_spec.js b/spec/frontend/clusters/services/application_state_machine_spec.js
index b27cd2c80fd..7eee54949fa 100644
--- a/spec/frontend/clusters/services/application_state_machine_spec.js
+++ b/spec/frontend/clusters/services/application_state_machine_spec.js
@@ -19,6 +19,7 @@ const {
UPDATE_ERRORED,
UNINSTALLING,
UNINSTALL_ERRORED,
+ UNINSTALLED,
} = APPLICATION_STATUS;
const NO_EFFECTS = 'no effects';
@@ -40,6 +41,7 @@ describe('applicationStateMachine', () => {
${INSTALLED} | ${UPDATE_ERRORED} | ${{ updateFailed: true }}
${UNINSTALLING} | ${UNINSTALLING} | ${NO_EFFECTS}
${INSTALLED} | ${UNINSTALL_ERRORED} | ${{ uninstallFailed: true }}
+ ${UNINSTALLED} | ${UNINSTALLED} | ${NO_EFFECTS}
`(`transitions to $expectedState on $event event and applies $effects`, data => {
const { expectedState, event, effects } = data;
const currentAppState = {
@@ -74,8 +76,9 @@ describe('applicationStateMachine', () => {
it.each`
expectedState | event | effects
${INSTALLING} | ${INSTALL_EVENT} | ${{ installFailed: false }}
- ${INSTALLED} | ${INSTALLED} | ${NO_EFFECTS}
+ ${INSTALLED} | ${INSTALLED} | ${{ installFailed: false }}
${NOT_INSTALLABLE} | ${NOT_INSTALLABLE} | ${NO_EFFECTS}
+ ${UNINSTALLED} | ${UNINSTALLED} | ${{ installFailed: false }}
`(`transitions to $expectedState on $event event and applies $effects`, data => {
const { expectedState, event, effects } = data;
const currentAppState = {
@@ -113,6 +116,8 @@ describe('applicationStateMachine', () => {
${UPDATING} | ${UPDATE_EVENT} | ${{ updateFailed: false, updateSuccessful: false }}
${UNINSTALLING} | ${UNINSTALL_EVENT} | ${{ uninstallFailed: false, uninstallSuccessful: false }}
${NOT_INSTALLABLE} | ${NOT_INSTALLABLE} | ${NO_EFFECTS}
+ ${UNINSTALLED} | ${UNINSTALLED} | ${NO_EFFECTS}
+ ${INSTALLABLE} | ${ERROR} | ${{ installFailed: true }}
`(`transitions to $expectedState on $event event and applies $effects`, data => {
const { expectedState, event, effects } = data;
const currentAppState = {
@@ -162,6 +167,23 @@ describe('applicationStateMachine', () => {
});
});
+ describe(`current state is ${UNINSTALLED}`, () => {
+ it.each`
+ expectedState | event | effects
+ ${INSTALLED} | ${INSTALLED} | ${NO_EFFECTS}
+ ${INSTALLABLE} | ${ERROR} | ${{ installFailed: true }}
+ `(`transitions to $expectedState on $event event and applies $effects`, data => {
+ const { expectedState, event, effects } = data;
+ const currentAppState = {
+ status: UNINSTALLED,
+ };
+
+ expect(transitionApplicationState(currentAppState, event)).toEqual({
+ status: expectedState,
+ ...noEffectsToEmptyObject(effects),
+ });
+ });
+ });
describe('current state is undefined', () => {
it('returns the current state without having any effects', () => {
const currentAppState = {};
diff --git a/spec/frontend/clusters/services/crossplane_provider_stack_spec.js b/spec/frontend/clusters/services/crossplane_provider_stack_spec.js
index 3e5f8de8e7b..57c538d2650 100644
--- a/spec/frontend/clusters/services/crossplane_provider_stack_spec.js
+++ b/spec/frontend/clusters/services/crossplane_provider_stack_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDropdownItem, GlIcon } from '@gitlab/ui';
+import { GlDeprecatedDropdownItem, GlIcon } from '@gitlab/ui';
import CrossplaneProviderStack from '~/clusters/components/crossplane_provider_stack.vue';
describe('CrossplaneProviderStack component', () => {
@@ -37,7 +37,7 @@ describe('CrossplaneProviderStack component', () => {
createComponent({ crossplane });
});
- const findDropdownElements = () => wrapper.findAll(GlDropdownItem);
+ const findDropdownElements = () => wrapper.findAll(GlDeprecatedDropdownItem);
const findFirstDropdownElement = () => findDropdownElements().at(0);
afterEach(() => {
diff --git a/spec/frontend/clusters/services/mock_data.js b/spec/frontend/clusters/services/mock_data.js
index c5ec3f6e6a8..4f8b27d623c 100644
--- a/spec/frontend/clusters/services/mock_data.js
+++ b/spec/frontend/clusters/services/mock_data.js
@@ -151,7 +151,11 @@ const DEFAULT_APPLICATION_STATE = {
const APPLICATIONS_MOCK_STATE = {
helm: { title: 'Helm Tiller', status: 'installable' },
- ingress: { title: 'Ingress', status: 'installable', modsecurity_enabled: false },
+ ingress: {
+ title: 'Ingress',
+ status: 'installable',
+ modsecurity_enabled: false,
+ },
crossplane: { title: 'Crossplane', status: 'installable', stack: '' },
cert_manager: { title: 'Cert-Manager', status: 'installable' },
runner: { title: 'GitLab Runner' },
@@ -160,6 +164,10 @@ const APPLICATIONS_MOCK_STATE = {
knative: { title: 'Knative ', status: 'installable', hostname: '' },
elastic_stack: { title: 'Elastic Stack', status: 'installable' },
fluentd: { title: 'Fluentd', status: 'installable' },
+ cilium: {
+ title: 'GitLab Container Network Policies',
+ status: 'not_installable',
+ },
};
export { CLUSTERS_MOCK_DATA, DEFAULT_APPLICATION_STATE, APPLICATIONS_MOCK_STATE };
diff --git a/spec/frontend/clusters/stores/clusters_store_spec.js b/spec/frontend/clusters/stores/clusters_store_spec.js
index 36e99c37be5..ed862818c7b 100644
--- a/spec/frontend/clusters/stores/clusters_store_spec.js
+++ b/spec/frontend/clusters/stores/clusters_store_spec.js
@@ -66,6 +66,7 @@ describe('Clusters Store', () => {
status: mockResponseData.applications[0].status,
statusReason: mockResponseData.applications[0].status_reason,
requestReason: null,
+ installable: true,
installed: false,
installFailed: false,
uninstallable: false,
@@ -80,6 +81,7 @@ describe('Clusters Store', () => {
requestReason: null,
externalIp: null,
externalHostname: null,
+ installable: true,
installed: false,
isEditingModSecurityEnabled: false,
isEditingModSecurityMode: false,
@@ -100,6 +102,7 @@ describe('Clusters Store', () => {
version: mockResponseData.applications[2].version,
updateAvailable: mockResponseData.applications[2].update_available,
chartRepo: 'https://gitlab.com/gitlab-org/charts/gitlab-runner',
+ installable: true,
installed: false,
installFailed: false,
updateFailed: false,
@@ -114,6 +117,7 @@ describe('Clusters Store', () => {
status: APPLICATION_STATUS.INSTALLABLE,
statusReason: mockResponseData.applications[3].status_reason,
requestReason: null,
+ installable: true,
installed: false,
installFailed: true,
uninstallable: false,
@@ -130,6 +134,7 @@ describe('Clusters Store', () => {
ciliumLogEnabled: null,
host: null,
protocol: null,
+ installable: true,
installed: false,
isEditingSettings: false,
installFailed: false,
@@ -145,6 +150,7 @@ describe('Clusters Store', () => {
statusReason: mockResponseData.applications[4].status_reason,
requestReason: null,
hostname: '',
+ installable: true,
installed: false,
installFailed: false,
uninstallable: false,
@@ -161,6 +167,7 @@ describe('Clusters Store', () => {
isEditingDomain: false,
externalIp: null,
externalHostname: null,
+ installable: true,
installed: false,
installFailed: false,
uninstallable: false,
@@ -177,6 +184,7 @@ describe('Clusters Store', () => {
statusReason: mockResponseData.applications[6].status_reason,
requestReason: null,
email: mockResponseData.applications[6].email,
+ installable: true,
installed: false,
uninstallable: false,
uninstallSuccessful: false,
@@ -189,6 +197,7 @@ describe('Clusters Store', () => {
installFailed: true,
statusReason: mockResponseData.applications[7].status_reason,
requestReason: null,
+ installable: true,
installed: false,
uninstallable: false,
uninstallSuccessful: false,
@@ -201,12 +210,26 @@ describe('Clusters Store', () => {
installFailed: true,
statusReason: mockResponseData.applications[8].status_reason,
requestReason: null,
+ installable: true,
installed: false,
uninstallable: false,
uninstallSuccessful: false,
uninstallFailed: false,
validationError: null,
},
+ cilium: {
+ title: 'GitLab Container Network Policies',
+ status: null,
+ statusReason: null,
+ requestReason: null,
+ installable: false,
+ installed: false,
+ installFailed: false,
+ uninstallable: false,
+ uninstallSuccessful: false,
+ uninstallFailed: false,
+ validationError: null,
+ },
},
environments: [],
fetchingEnvironments: false,
diff --git a/spec/frontend/clusters_list/components/ancestor_notice_spec.js b/spec/frontend/clusters_list/components/ancestor_notice_spec.js
index c931912eaf9..cff84180f26 100644
--- a/spec/frontend/clusters_list/components/ancestor_notice_spec.js
+++ b/spec/frontend/clusters_list/components/ancestor_notice_spec.js
@@ -1,7 +1,7 @@
-import AncestorNotice from '~/clusters_list/components/ancestor_notice.vue';
-import ClusterStore from '~/clusters_list/store';
import { shallowMount } from '@vue/test-utils';
import { GlLink, GlSprintf } from '@gitlab/ui';
+import AncestorNotice from '~/clusters_list/components/ancestor_notice.vue';
+import ClusterStore from '~/clusters_list/store';
describe('ClustersAncestorNotice', () => {
let store;
diff --git a/spec/frontend/clusters_list/components/clusters_spec.js b/spec/frontend/clusters_list/components/clusters_spec.js
index deb275a9bb9..c6a5f66a627 100644
--- a/spec/frontend/clusters_list/components/clusters_spec.js
+++ b/spec/frontend/clusters_list/components/clusters_spec.js
@@ -1,11 +1,11 @@
-import axios from '~/lib/utils/axios_utils';
-import Clusters from '~/clusters_list/components/clusters.vue';
-import ClusterStore from '~/clusters_list/store';
import MockAdapter from 'axios-mock-adapter';
-import { apiData } from '../mock_data';
import { mount } from '@vue/test-utils';
import { GlLoadingIcon, GlPagination, GlSkeletonLoading, GlTable } from '@gitlab/ui';
import * as Sentry from '@sentry/browser';
+import axios from '~/lib/utils/axios_utils';
+import Clusters from '~/clusters_list/components/clusters.vue';
+import ClusterStore from '~/clusters_list/store';
+import { apiData } from '../mock_data';
describe('Clusters', () => {
let mock;
diff --git a/spec/frontend/clusters_list/store/actions_spec.js b/spec/frontend/clusters_list/store/actions_spec.js
index c8556350747..053128a179a 100644
--- a/spec/frontend/clusters_list/store/actions_spec.js
+++ b/spec/frontend/clusters_list/store/actions_spec.js
@@ -1,14 +1,14 @@
import MockAdapter from 'axios-mock-adapter';
-import Poll from '~/lib/utils/poll';
-import flashError from '~/flash';
import testAction from 'helpers/vuex_action_helper';
-import axios from '~/lib/utils/axios_utils';
import waitForPromises from 'helpers/wait_for_promises';
+import * as Sentry from '@sentry/browser';
+import Poll from '~/lib/utils/poll';
+import { deprecatedCreateFlash as flashError } from '~/flash';
+import axios from '~/lib/utils/axios_utils';
import { apiData } from '../mock_data';
import { MAX_REQUESTS } from '~/clusters_list/constants';
import * as types from '~/clusters_list/store/mutation_types';
import * as actions from '~/clusters_list/store/actions';
-import * as Sentry from '@sentry/browser';
jest.mock('~/flash.js');
diff --git a/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap b/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap
index 161c2bade05..745a163951a 100644
--- a/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap
+++ b/spec/frontend/code_navigation/components/__snapshots__/popover_spec.js.snap
@@ -56,7 +56,7 @@ exports[`Code navigation popover component renders popover 1`] = `
class="popover-body border-top"
>
<gl-button-stub
- category="tertiary"
+ category="primary"
class="w-100"
data-testid="go-to-definition-btn"
href="http://gitlab.com/test.js"
diff --git a/spec/frontend/collapsed_sidebar_todo_spec.js b/spec/frontend/collapsed_sidebar_todo_spec.js
index 0ea797ce4b3..0c74491aa74 100644
--- a/spec/frontend/collapsed_sidebar_todo_spec.js
+++ b/spec/frontend/collapsed_sidebar_todo_spec.js
@@ -1,10 +1,10 @@
/* eslint-disable no-new */
import { clone } from 'lodash';
import MockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'spec/test_constants';
import axios from '~/lib/utils/axios_utils';
import Sidebar from '~/right_sidebar';
import waitForPromises from './helpers/wait_for_promises';
-import { TEST_HOST } from 'spec/test_constants';
describe('Issuable right sidebar collapsed todo toggle', () => {
const fixtureName = 'issues/open-issue.html';
diff --git a/spec/frontend/commit/commit_pipeline_status_component_spec.js b/spec/frontend/commit/commit_pipeline_status_component_spec.js
index 9281d1d02a3..1086985eec0 100644
--- a/spec/frontend/commit/commit_pipeline_status_component_spec.js
+++ b/spec/frontend/commit/commit_pipeline_status_component_spec.js
@@ -2,7 +2,7 @@ import Visibility from 'visibilityjs';
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Poll from '~/lib/utils/poll';
-import flash from '~/flash';
+import { deprecatedCreateFlash as flash } from '~/flash';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import CommitPipelineStatus from '~/projects/tree/components/commit_pipeline_status_component.vue';
import { getJSONFixture } from '../helpers/fixtures';
diff --git a/spec/frontend/commit/pipelines/pipelines_spec.js b/spec/frontend/commit/pipelines/pipelines_spec.js
index 86ae207e7b7..fdf3c2e85f3 100644
--- a/spec/frontend/commit/pipelines/pipelines_spec.js
+++ b/spec/frontend/commit/pipelines/pipelines_spec.js
@@ -121,14 +121,14 @@ describe('Pipelines table in Commits and Merge requests', () => {
pipelineCopy = { ...pipeline };
});
- describe('when latest pipeline has detached flag and canRunPipeline is true', () => {
+ describe('when latest pipeline has detached flag', () => {
it('renders the run pipeline button', done => {
pipelineCopy.flags.detached_merge_request_pipeline = true;
pipelineCopy.flags.merge_request_pipeline = true;
mock.onGet('endpoint.json').reply(200, [pipelineCopy]);
- vm = mountComponent(PipelinesTable, { ...props, canRunPipeline: true });
+ vm = mountComponent(PipelinesTable, { ...props });
setImmediate(() => {
expect(vm.$el.querySelector('.js-run-mr-pipeline')).not.toBeNull();
@@ -137,14 +137,14 @@ describe('Pipelines table in Commits and Merge requests', () => {
});
});
- describe('when latest pipeline has detached flag and canRunPipeline is false', () => {
+ describe('when latest pipeline does not have detached flag', () => {
it('does not render the run pipeline button', done => {
- pipelineCopy.flags.detached_merge_request_pipeline = true;
- pipelineCopy.flags.merge_request_pipeline = true;
+ pipelineCopy.flags.detached_merge_request_pipeline = false;
+ pipelineCopy.flags.merge_request_pipeline = false;
mock.onGet('endpoint.json').reply(200, [pipelineCopy]);
- vm = mountComponent(PipelinesTable, { ...props, canRunPipeline: false });
+ vm = mountComponent(PipelinesTable, { ...props });
setImmediate(() => {
expect(vm.$el.querySelector('.js-run-mr-pipeline')).toBeNull();
@@ -153,39 +153,47 @@ describe('Pipelines table in Commits and Merge requests', () => {
});
});
- describe('when latest pipeline does not have detached flag and canRunPipeline is true', () => {
- it('does not render the run pipeline button', done => {
- pipelineCopy.flags.detached_merge_request_pipeline = false;
- pipelineCopy.flags.merge_request_pipeline = false;
+ describe('on click', () => {
+ const findModal = () =>
+ document.querySelector('#create-pipeline-for-fork-merge-request-modal');
- mock.onGet('endpoint.json').reply(200, [pipelineCopy]);
+ beforeEach(() => {
+ pipelineCopy.flags.detached_merge_request_pipeline = true;
- vm = mountComponent(PipelinesTable, { ...props, canRunPipeline: true });
+ mock.onGet('endpoint.json').reply(200, [pipelineCopy]);
- setImmediate(() => {
- expect(vm.$el.querySelector('.js-run-mr-pipeline')).toBeNull();
- done();
+ vm = mountComponent(PipelinesTable, {
+ ...props,
+ canRunPipeline: true,
+ projectId: '5',
+ mergeRequestId: 3,
});
});
- });
- describe('when latest pipeline does not have detached flag and merge_request_pipeline is true', () => {
- it('does not render the run pipeline button', done => {
- pipelineCopy.flags.detached_merge_request_pipeline = false;
- pipelineCopy.flags.merge_request_pipeline = true;
+ it('updates the loading state', done => {
+ jest.spyOn(Api, 'postMergeRequestPipeline').mockReturnValue(Promise.resolve());
- mock.onGet('endpoint.json').reply(200, [pipelineCopy]);
+ setImmediate(() => {
+ vm.$el.querySelector('.js-run-mr-pipeline').click();
- vm = mountComponent(PipelinesTable, { ...props, canRunPipeline: false });
+ vm.$nextTick(() => {
+ expect(findModal()).toBeNull();
+ expect(vm.state.isRunningMergeRequestPipeline).toBe(true);
- setImmediate(() => {
- expect(vm.$el.querySelector('.js-run-mr-pipeline')).toBeNull();
- done();
+ setImmediate(() => {
+ expect(vm.state.isRunningMergeRequestPipeline).toBe(false);
+
+ done();
+ });
+ });
});
});
});
- describe('on click', () => {
+ describe('on click for fork merge request', () => {
+ const findModal = () =>
+ document.querySelector('#create-pipeline-for-fork-merge-request-modal');
+
beforeEach(() => {
pipelineCopy.flags.detached_merge_request_pipeline = true;
@@ -193,26 +201,23 @@ describe('Pipelines table in Commits and Merge requests', () => {
vm = mountComponent(PipelinesTable, {
...props,
- canRunPipeline: true,
projectId: '5',
mergeRequestId: 3,
+ canCreatePipelineInTargetProject: true,
+ sourceProjectFullPath: 'test/parent-project',
+ targetProjectFullPath: 'test/fork-project',
});
});
- it('updates the loading state', done => {
+ it('shows a security warning modal', done => {
jest.spyOn(Api, 'postMergeRequestPipeline').mockReturnValue(Promise.resolve());
setImmediate(() => {
vm.$el.querySelector('.js-run-mr-pipeline').click();
vm.$nextTick(() => {
- expect(vm.state.isRunningMergeRequestPipeline).toBe(true);
-
- setImmediate(() => {
- expect(vm.state.isRunningMergeRequestPipeline).toBe(false);
-
- done();
- });
+ expect(findModal()).not.toBeNull();
+ done();
});
});
});
diff --git a/spec/frontend/confidential_merge_request/components/dropdown_spec.js b/spec/frontend/confidential_merge_request/components/dropdown_spec.js
index 69495f3c161..3e95cd6c0d7 100644
--- a/spec/frontend/confidential_merge_request/components/dropdown_spec.js
+++ b/spec/frontend/confidential_merge_request/components/dropdown_spec.js
@@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
-import { GlDropdownItem } from '@gitlab/ui';
+import { GlDeprecatedDropdownItem } from '@gitlab/ui';
import Dropdown from '~/confidential_merge_request/components/dropdown.vue';
let vm;
@@ -30,7 +30,7 @@ describe('Confidential merge request project dropdown component', () => {
},
]);
- expect(vm.findAll(GlDropdownItem).length).toBe(2);
+ expect(vm.findAll(GlDeprecatedDropdownItem).length).toBe(2);
});
it('renders selected project icon', () => {
diff --git a/spec/frontend/confirm_modal_spec.js b/spec/frontend/confirm_modal_spec.js
index b14d1c3e01d..70076532a94 100644
--- a/spec/frontend/confirm_modal_spec.js
+++ b/spec/frontend/confirm_modal_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
-import initConfirmModal from '~/confirm_modal';
import { TEST_HOST } from 'helpers/test_constants';
+import initConfirmModal from '~/confirm_modal';
describe('ConfirmModal', () => {
const buttons = [
diff --git a/spec/frontend/contributors/store/actions_spec.js b/spec/frontend/contributors/store/actions_spec.js
index 55437da837c..ad490ea4b67 100644
--- a/spec/frontend/contributors/store/actions_spec.js
+++ b/spec/frontend/contributors/store/actions_spec.js
@@ -1,7 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import axios from '~/lib/utils/axios_utils';
-import flashError from '~/flash';
+import { deprecatedCreateFlash as flashError } from '~/flash';
import * as actions from '~/contributors/stores/actions';
import * as types from '~/contributors/stores/mutation_types';
diff --git a/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js b/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js
index 01f7ada9cd6..882a4a002bd 100644
--- a/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js
+++ b/spec/frontend/create_cluster/eks_cluster/store/actions_spec.js
@@ -23,7 +23,7 @@ import {
CREATE_CLUSTER_ERROR,
} from '~/create_cluster/eks_cluster/store/mutation_types';
import axios from '~/lib/utils/axios_utils';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
jest.mock('~/flash');
diff --git a/spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js b/spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js
new file mode 100644
index 00000000000..9ecf6bf375b
--- /dev/null
+++ b/spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js
@@ -0,0 +1,92 @@
+import Vuex from 'vuex';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { GlButton, GlModal } from '@gitlab/ui';
+import DeployFreezeModal from '~/deploy_freeze/components/deploy_freeze_modal.vue';
+import TimezoneDropdown from '~/vue_shared/components/timezone_dropdown.vue';
+import createStore from '~/deploy_freeze/store';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Deploy freeze modal', () => {
+ let wrapper;
+ let store;
+ const freezePeriodsFixture = getJSONFixture('/api/freeze-periods/freeze_periods.json');
+ const timezoneDataFixture = getJSONFixture('/api/freeze-periods/timezone_data.json');
+
+ beforeEach(() => {
+ store = createStore({
+ projectId: '8',
+ timezoneData: timezoneDataFixture,
+ });
+ wrapper = shallowMount(DeployFreezeModal, {
+ attachToDocument: true,
+ stubs: {
+ GlModal,
+ },
+ localVue,
+ store,
+ });
+ });
+
+ const findModal = () => wrapper.find(GlModal);
+ const addDeployFreezeButton = () =>
+ findModal()
+ .findAll(GlButton)
+ .at(1);
+
+ const setInput = (freezeStartCron, freezeEndCron, selectedTimezone) => {
+ store.state.freezeStartCron = freezeStartCron;
+ store.state.freezeEndCron = freezeEndCron;
+ store.state.selectedTimezone = selectedTimezone;
+
+ wrapper.find('#deploy-freeze-start').trigger('input');
+ wrapper.find('#deploy-freeze-end').trigger('input');
+ wrapper.find(TimezoneDropdown).trigger('input');
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('Basic interactions', () => {
+ it('button is disabled when freeze period is invalid', () => {
+ expect(addDeployFreezeButton().attributes('disabled')).toBeTruthy();
+ });
+ });
+
+ describe('Adding a new deploy freeze', () => {
+ beforeEach(() => {
+ const { freeze_start, freeze_end, cron_timezone } = freezePeriodsFixture[0];
+ setInput(freeze_start, freeze_end, cron_timezone);
+ });
+
+ it('button is enabled when valid freeze period settings are present', () => {
+ expect(addDeployFreezeButton().attributes('disabled')).toBeUndefined();
+ });
+ });
+
+ describe('Validations', () => {
+ describe('when the cron state is invalid', () => {
+ beforeEach(() => {
+ setInput('invalid cron', 'invalid cron', 'invalid timezone');
+ });
+
+ it('disables the add deploy freeze button', () => {
+ expect(addDeployFreezeButton().attributes('disabled')).toBeTruthy();
+ });
+ });
+
+ describe('when the cron state is valid', () => {
+ beforeEach(() => {
+ const { freeze_start, freeze_end, cron_timezone } = freezePeriodsFixture[0];
+ setInput(freeze_start, freeze_end, cron_timezone);
+ });
+
+ it('does not disable the submit button', () => {
+ expect(addDeployFreezeButton().attributes('disabled')).toBeFalsy();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/deploy_freeze/components/deploy_freeze_settings_spec.js b/spec/frontend/deploy_freeze/components/deploy_freeze_settings_spec.js
new file mode 100644
index 00000000000..d40df7de7d1
--- /dev/null
+++ b/spec/frontend/deploy_freeze/components/deploy_freeze_settings_spec.js
@@ -0,0 +1,42 @@
+import Vuex from 'vuex';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import DeployFreezeSettings from '~/deploy_freeze/components/deploy_freeze_settings.vue';
+import DeployFreezeTable from '~/deploy_freeze/components/deploy_freeze_table.vue';
+import DeployFreezeModal from '~/deploy_freeze/components/deploy_freeze_modal.vue';
+import createStore from '~/deploy_freeze/store';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Deploy freeze settings', () => {
+ let wrapper;
+ let store;
+ const timezoneDataFixture = getJSONFixture('/api/freeze-periods/timezone_data.json');
+
+ beforeEach(() => {
+ store = createStore({
+ projectId: '8',
+ timezoneData: timezoneDataFixture,
+ });
+ jest.spyOn(store, 'dispatch').mockImplementation();
+ wrapper = shallowMount(DeployFreezeSettings, {
+ localVue,
+ store,
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('Deploy freeze table contains components', () => {
+ it('contains deploy freeze table', () => {
+ expect(wrapper.find(DeployFreezeTable).exists()).toBe(true);
+ });
+
+ it('contains deploy freeze modal', () => {
+ expect(wrapper.find(DeployFreezeModal).exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js b/spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js
new file mode 100644
index 00000000000..383ffa90b22
--- /dev/null
+++ b/spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js
@@ -0,0 +1,70 @@
+import Vuex from 'vuex';
+import { createLocalVue, mount } from '@vue/test-utils';
+import DeployFreezeTable from '~/deploy_freeze/components/deploy_freeze_table.vue';
+import createStore from '~/deploy_freeze/store';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Deploy freeze table', () => {
+ let wrapper;
+ let store;
+ const timezoneDataFixture = getJSONFixture('/api/freeze-periods/timezone_data.json');
+
+ const createComponent = () => {
+ store = createStore({
+ projectId: '8',
+ timezoneData: timezoneDataFixture,
+ });
+ jest.spyOn(store, 'dispatch').mockImplementation();
+ wrapper = mount(DeployFreezeTable, {
+ attachToDocument: true,
+ localVue,
+ store,
+ });
+ };
+
+ const findEmptyFreezePeriods = () => wrapper.find('[data-testid="empty-freeze-periods"]');
+ const findAddDeployFreezeButton = () => wrapper.find('[data-testid="add-deploy-freeze"]');
+ const findDeployFreezeTable = () => wrapper.find('[data-testid="deploy-freeze-table"]');
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('dispatches fetchFreezePeriods when mounted', () => {
+ expect(store.dispatch).toHaveBeenCalledWith('fetchFreezePeriods');
+ });
+
+ describe('Renders correct data', () => {
+ it('displays empty', () => {
+ expect(findEmptyFreezePeriods().exists()).toBe(true);
+ expect(findEmptyFreezePeriods().text()).toBe(
+ 'No deploy freezes exist for this project. To add one, click Add deploy freeze',
+ );
+ });
+
+ it('displays data', () => {
+ const freezePeriodsFixture = getJSONFixture('/api/freeze-periods/freeze_periods.json');
+ store.state.freezePeriods = freezePeriodsFixture;
+
+ return wrapper.vm.$nextTick(() => {
+ const tableRows = findDeployFreezeTable().findAll('tbody tr');
+ expect(tableRows.length).toBe(freezePeriodsFixture.length);
+ expect(findEmptyFreezePeriods().exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('Table click actions', () => {
+ it('displays add deploy freeze button', () => {
+ expect(findAddDeployFreezeButton().exists()).toBe(true);
+ expect(findAddDeployFreezeButton().text()).toBe('Add deploy freeze');
+ });
+ });
+});
diff --git a/spec/frontend/deploy_freeze/components/timezone_dropdown_spec.js b/spec/frontend/deploy_freeze/components/timezone_dropdown_spec.js
new file mode 100644
index 00000000000..644cd0b5f27
--- /dev/null
+++ b/spec/frontend/deploy_freeze/components/timezone_dropdown_spec.js
@@ -0,0 +1,98 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { GlDeprecatedDropdownItem, GlNewDropdown } from '@gitlab/ui';
+import TimezoneDropdown from '~/vue_shared/components/timezone_dropdown.vue';
+import createStore from '~/deploy_freeze/store';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Deploy freeze timezone dropdown', () => {
+ let wrapper;
+ let store;
+ const timezoneDataFixture = getJSONFixture('/api/freeze-periods/timezone_data.json');
+
+ const createComponent = (searchTerm, selectedTimezone) => {
+ store = createStore({
+ projectId: '8',
+ timezoneData: timezoneDataFixture,
+ });
+ wrapper = shallowMount(TimezoneDropdown, {
+ store,
+ localVue,
+ propsData: {
+ value: selectedTimezone,
+ timezoneData: timezoneDataFixture,
+ },
+ });
+
+ wrapper.setData({ searchTerm });
+ };
+
+ const findAllDropdownItems = () => wrapper.findAll(GlDeprecatedDropdownItem);
+ const findDropdownItemByIndex = index => wrapper.findAll(GlDeprecatedDropdownItem).at(index);
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('No time zones found', () => {
+ beforeEach(() => {
+ createComponent('UTC timezone');
+ });
+
+ it('renders empty results message', () => {
+ expect(findDropdownItemByIndex(0).text()).toBe('No matching results');
+ });
+ });
+
+ describe('Search term is empty', () => {
+ beforeEach(() => {
+ createComponent('');
+ });
+
+ it('renders all timezones when search term is empty', () => {
+ expect(findAllDropdownItems()).toHaveLength(timezoneDataFixture.length);
+ });
+ });
+
+ describe('Time zones found', () => {
+ beforeEach(() => {
+ createComponent('Alaska');
+ });
+
+ it('renders only the time zone searched for', () => {
+ expect(findAllDropdownItems()).toHaveLength(1);
+ expect(findDropdownItemByIndex(0).text()).toBe('[UTC -8] Alaska');
+ });
+
+ it('should not display empty results message', () => {
+ expect(wrapper.find('[data-testid="noMatchingResults"]').exists()).toBe(false);
+ });
+
+ describe('Custom events', () => {
+ it('should emit input if a time zone is clicked', () => {
+ findDropdownItemByIndex(0).vm.$emit('click');
+ expect(wrapper.emitted('input')).toEqual([
+ [
+ {
+ formattedTimezone: '[UTC -8] Alaska',
+ identifier: 'America/Juneau',
+ },
+ ],
+ ]);
+ });
+ });
+ });
+
+ describe('Selected time zone', () => {
+ beforeEach(() => {
+ createComponent('', 'Alaska');
+ });
+
+ it('renders selected time zone as dropdown label', () => {
+ expect(wrapper.find(GlNewDropdown).vm.text).toBe('Alaska');
+ });
+ });
+});
diff --git a/spec/frontend/deploy_freeze/store/actions_spec.js b/spec/frontend/deploy_freeze/store/actions_spec.js
new file mode 100644
index 00000000000..97f94cdbf5e
--- /dev/null
+++ b/spec/frontend/deploy_freeze/store/actions_spec.js
@@ -0,0 +1,123 @@
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import Api from '~/api';
+import axios from '~/lib/utils/axios_utils';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
+import getInitialState from '~/deploy_freeze/store/state';
+import * as actions from '~/deploy_freeze/store/actions';
+import * as types from '~/deploy_freeze/store/mutation_types';
+
+jest.mock('~/api.js');
+jest.mock('~/flash.js');
+
+describe('deploy freeze store actions', () => {
+ let mock;
+ let state;
+ const freezePeriodsFixture = getJSONFixture('/api/freeze-periods/freeze_periods.json');
+ const timezoneDataFixture = getJSONFixture('/api/freeze-periods/timezone_data.json');
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ state = getInitialState({
+ projectId: '8',
+ timezoneData: timezoneDataFixture,
+ });
+ Api.freezePeriods.mockResolvedValue({ data: freezePeriodsFixture });
+ Api.createFreezePeriod.mockResolvedValue();
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('setSelectedTimezone', () => {
+ it('commits SET_SELECTED_TIMEZONE mutation', () => {
+ testAction(actions.setSelectedTimezone, {}, {}, [
+ {
+ payload: {},
+ type: types.SET_SELECTED_TIMEZONE,
+ },
+ ]);
+ });
+ });
+
+ describe('setFreezeStartCron', () => {
+ it('commits SET_FREEZE_START_CRON mutation', () => {
+ testAction(actions.setFreezeStartCron, {}, {}, [
+ {
+ type: types.SET_FREEZE_START_CRON,
+ },
+ ]);
+ });
+ });
+
+ describe('setFreezeEndCron', () => {
+ it('commits SET_FREEZE_END_CRON mutation', () => {
+ testAction(actions.setFreezeEndCron, {}, {}, [
+ {
+ type: types.SET_FREEZE_END_CRON,
+ },
+ ]);
+ });
+ });
+
+ describe('addFreezePeriod', () => {
+ it('dispatch correct actions on adding a freeze period', () => {
+ testAction(
+ actions.addFreezePeriod,
+ {},
+ state,
+ [{ type: 'RESET_MODAL' }],
+ [
+ { type: 'requestAddFreezePeriod' },
+ { type: 'receiveAddFreezePeriodSuccess' },
+ { type: 'fetchFreezePeriods' },
+ ],
+ );
+ });
+
+ it('should show flash error and set error in state on add failure', () => {
+ Api.createFreezePeriod.mockRejectedValue();
+
+ testAction(
+ actions.addFreezePeriod,
+ {},
+ state,
+ [],
+ [{ type: 'requestAddFreezePeriod' }, { type: 'receiveAddFreezePeriodError' }],
+ () => expect(createFlash).toHaveBeenCalled(),
+ );
+ });
+ });
+
+ describe('fetchFreezePeriods', () => {
+ it('dispatch correct actions on fetchFreezePeriods', () => {
+ testAction(
+ actions.fetchFreezePeriods,
+ {},
+ state,
+ [
+ { type: types.REQUEST_FREEZE_PERIODS },
+ { type: types.RECEIVE_FREEZE_PERIODS_SUCCESS, payload: freezePeriodsFixture },
+ ],
+ [],
+ );
+ });
+
+ it('should show flash error and set error in state on fetch variables failure', () => {
+ Api.freezePeriods.mockRejectedValue();
+
+ testAction(
+ actions.fetchFreezePeriods,
+ {},
+ state,
+ [{ type: types.REQUEST_FREEZE_PERIODS }],
+ [],
+ () =>
+ expect(createFlash).toHaveBeenCalledWith(
+ 'There was an error fetching the deploy freezes.',
+ ),
+ );
+ });
+ });
+});
diff --git a/spec/frontend/deploy_freeze/store/mutations_spec.js b/spec/frontend/deploy_freeze/store/mutations_spec.js
new file mode 100644
index 00000000000..0453e037e15
--- /dev/null
+++ b/spec/frontend/deploy_freeze/store/mutations_spec.js
@@ -0,0 +1,72 @@
+import state from '~/deploy_freeze/store/state';
+import mutations from '~/deploy_freeze/store/mutations';
+import * as types from '~/deploy_freeze/store/mutation_types';
+import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+
+describe('Deploy freeze mutations', () => {
+ let stateCopy;
+ const timezoneDataFixture = getJSONFixture('/api/freeze-periods/timezone_data.json');
+
+ beforeEach(() => {
+ stateCopy = state({
+ projectId: '8',
+ timezoneData: timezoneDataFixture,
+ });
+ });
+
+ describe('RESET_MODAL', () => {
+ it('should reset modal state', () => {
+ mutations[types.RESET_MODAL](stateCopy);
+
+ expect(stateCopy.freezeStartCron).toBe('');
+ expect(stateCopy.freezeEndCron).toBe('');
+ expect(stateCopy.selectedTimezone).toBe('');
+ expect(stateCopy.selectedTimezoneIdentifier).toBe('');
+ });
+ });
+
+ describe('RECEIVE_FREEZE_PERIODS_SUCCESS', () => {
+ it('should set freeze periods and format timezones from identifiers to names', () => {
+ const timezoneNames = ['Berlin', 'UTC', 'Eastern Time (US & Canada)'];
+ const freezePeriodsFixture = getJSONFixture('/api/freeze-periods/freeze_periods.json');
+
+ mutations[types.RECEIVE_FREEZE_PERIODS_SUCCESS](stateCopy, freezePeriodsFixture);
+
+ const expectedFreezePeriods = freezePeriodsFixture.map((freezePeriod, index) => ({
+ ...convertObjectPropsToCamelCase(freezePeriod),
+ cronTimezone: timezoneNames[index],
+ }));
+
+ expect(stateCopy.freezePeriods).toMatchObject(expectedFreezePeriods);
+ });
+ });
+
+ describe('SET_SELECTED_TIMEZONE', () => {
+ it('should set the cron timezone', () => {
+ const timezone = {
+ formattedTimezone: '[UTC -7] Pacific Time (US & Canada)',
+ identifier: 'America/Los_Angeles',
+ };
+ mutations[types.SET_SELECTED_TIMEZONE](stateCopy, timezone);
+
+ expect(stateCopy.selectedTimezone).toEqual(timezone.formattedTimezone);
+ expect(stateCopy.selectedTimezoneIdentifier).toEqual(timezone.identifier);
+ });
+ });
+
+ describe('SET_FREEZE_START_CRON', () => {
+ it('should set freezeStartCron', () => {
+ mutations[types.SET_FREEZE_START_CRON](stateCopy, '5 0 * 8 *');
+
+ expect(stateCopy.freezeStartCron).toBe('5 0 * 8 *');
+ });
+ });
+
+ describe('SET_FREEZE_ENDT_CRON', () => {
+ it('should set freezeEndCron', () => {
+ mutations[types.SET_FREEZE_END_CRON](stateCopy, '5 0 * 8 *');
+
+ expect(stateCopy.freezeEndCron).toBe('5 0 * 8 *');
+ });
+ });
+});
diff --git a/spec/frontend/design_management/components/__snapshots__/design_note_pin_spec.js.snap b/spec/frontend/design_management/components/__snapshots__/design_note_pin_spec.js.snap
index 4c848256e5b..62a0f675cff 100644
--- a/spec/frontend/design_management/components/__snapshots__/design_note_pin_spec.js.snap
+++ b/spec/frontend/design_management/components/__snapshots__/design_note_pin_spec.js.snap
@@ -3,13 +3,13 @@
exports[`Design note pin component should match the snapshot of note when repositioning 1`] = `
<button
aria-label="Comment form position"
- class="design-pin gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center btn-transparent comment-indicator"
+ class="design-pin gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-0 btn-transparent comment-indicator"
style="left: 10px; top: 10px; cursor: move;"
type="button"
>
- <icon-stub
+ <gl-icon-stub
name="image-comment-dark"
- size="16"
+ size="24"
/>
</button>
`;
@@ -17,7 +17,7 @@ exports[`Design note pin component should match the snapshot of note when reposi
exports[`Design note pin component should match the snapshot of note with index 1`] = `
<button
aria-label="Comment '1' position"
- class="design-pin gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center js-image-badge badge badge-pill"
+ class="design-pin gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-0 js-image-badge badge badge-pill"
style="left: 10px; top: 10px;"
type="button"
>
@@ -30,13 +30,13 @@ exports[`Design note pin component should match the snapshot of note with index
exports[`Design note pin component should match the snapshot of note without index 1`] = `
<button
aria-label="Comment form position"
- class="design-pin gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center btn-transparent comment-indicator"
+ class="design-pin gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-0 btn-transparent comment-indicator"
style="left: 10px; top: 10px;"
type="button"
>
- <icon-stub
+ <gl-icon-stub
name="image-comment-dark"
- size="16"
+ size="24"
/>
</button>
`;
diff --git a/spec/frontend/design_management/components/delete_button_spec.js b/spec/frontend/design_management/components/delete_button_spec.js
index 9d3bcd98e44..cd4ef1f0ccd 100644
--- a/spec/frontend/design_management/components/delete_button_spec.js
+++ b/spec/frontend/design_management/components/delete_button_spec.js
@@ -1,11 +1,11 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDeprecatedButton, GlModal, GlModalDirective } from '@gitlab/ui';
+import { GlButton, GlModal, GlModalDirective } from '@gitlab/ui';
import BatchDeleteButton from '~/design_management/components/delete_button.vue';
describe('Batch delete button component', () => {
let wrapper;
- const findButton = () => wrapper.find(GlDeprecatedButton);
+ const findButton = () => wrapper.find(GlButton);
const findModal = () => wrapper.find(GlModal);
function createComponent(isDeleting = false) {
diff --git a/spec/frontend/design_management/components/design_notes/design_discussion_spec.js b/spec/frontend/design_management/components/design_notes/design_discussion_spec.js
index 102e8e0664c..176c10ea584 100644
--- a/spec/frontend/design_management/components/design_notes/design_discussion_spec.js
+++ b/spec/frontend/design_management/components/design_notes/design_discussion_spec.js
@@ -61,6 +61,10 @@ describe('Design discussions component', () => {
...data,
};
},
+ provide: {
+ projectPath: 'project-path',
+ issueIid: '1',
+ },
mocks: {
$apollo,
$route: {
diff --git a/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap b/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
index 9cd427f6aae..d76b6e712fe 100644
--- a/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
+++ b/spec/frontend/design_management/components/list/__snapshots__/item_spec.js.snap
@@ -8,328 +8,9 @@ exports[`Design management list item component when item appears in view after i
/>
`;
-exports[`Design management list item component with no notes renders item with correct status icon for creation event 1`] = `
-<router-link-stub
- class="card cursor-pointer text-plain js-design-list-item design-list-item"
- to="[object Object]"
->
- <div
- class="card-body p-0 d-flex-center overflow-hidden position-relative"
- >
- <div
- class="design-event position-absolute"
- >
- <span
- aria-label="Added in this version"
- title="Added in this version"
- >
- <icon-stub
- class="text-success-500"
- name="file-addition-solid"
- size="18"
- />
- </span>
- </div>
-
- <gl-intersection-observer-stub
- options="[object Object]"
- >
- <!---->
-
- <img
- alt="test"
- class="block mx-auto mw-100 mh-100 design-img"
- data-qa-selector="design_image"
- src=""
- />
- </gl-intersection-observer-stub>
- </div>
-
- <div
- class="card-footer d-flex w-100"
- >
- <div
- class="d-flex flex-column str-truncated-100"
- >
- <span
- class="bold str-truncated-100"
- data-qa-selector="design_file_name"
- >
- test
- </span>
-
- <span
- class="str-truncated-100"
- >
-
- Updated
- <timeago-stub
- cssclass=""
- time="01-01-2019"
- tooltipplacement="bottom"
- />
- </span>
- </div>
-
- <!---->
- </div>
-</router-link-stub>
-`;
-
-exports[`Design management list item component with no notes renders item with correct status icon for deletion event 1`] = `
-<router-link-stub
- class="card cursor-pointer text-plain js-design-list-item design-list-item"
- to="[object Object]"
->
- <div
- class="card-body p-0 d-flex-center overflow-hidden position-relative"
- >
- <div
- class="design-event position-absolute"
- >
- <span
- aria-label="Deleted in this version"
- title="Deleted in this version"
- >
- <icon-stub
- class="text-danger-500"
- name="file-deletion-solid"
- size="18"
- />
- </span>
- </div>
-
- <gl-intersection-observer-stub
- options="[object Object]"
- >
- <!---->
-
- <img
- alt="test"
- class="block mx-auto mw-100 mh-100 design-img"
- data-qa-selector="design_image"
- src=""
- />
- </gl-intersection-observer-stub>
- </div>
-
- <div
- class="card-footer d-flex w-100"
- >
- <div
- class="d-flex flex-column str-truncated-100"
- >
- <span
- class="bold str-truncated-100"
- data-qa-selector="design_file_name"
- >
- test
- </span>
-
- <span
- class="str-truncated-100"
- >
-
- Updated
- <timeago-stub
- cssclass=""
- time="01-01-2019"
- tooltipplacement="bottom"
- />
- </span>
- </div>
-
- <!---->
- </div>
-</router-link-stub>
-`;
-
-exports[`Design management list item component with no notes renders item with correct status icon for modification event 1`] = `
-<router-link-stub
- class="card cursor-pointer text-plain js-design-list-item design-list-item"
- to="[object Object]"
->
- <div
- class="card-body p-0 d-flex-center overflow-hidden position-relative"
- >
- <div
- class="design-event position-absolute"
- >
- <span
- aria-label="Modified in this version"
- title="Modified in this version"
- >
- <icon-stub
- class="text-primary-500"
- name="file-modified-solid"
- size="18"
- />
- </span>
- </div>
-
- <gl-intersection-observer-stub
- options="[object Object]"
- >
- <!---->
-
- <img
- alt="test"
- class="block mx-auto mw-100 mh-100 design-img"
- data-qa-selector="design_image"
- src=""
- />
- </gl-intersection-observer-stub>
- </div>
-
- <div
- class="card-footer d-flex w-100"
- >
- <div
- class="d-flex flex-column str-truncated-100"
- >
- <span
- class="bold str-truncated-100"
- data-qa-selector="design_file_name"
- >
- test
- </span>
-
- <span
- class="str-truncated-100"
- >
-
- Updated
- <timeago-stub
- cssclass=""
- time="01-01-2019"
- tooltipplacement="bottom"
- />
- </span>
- </div>
-
- <!---->
- </div>
-</router-link-stub>
-`;
-
-exports[`Design management list item component with no notes renders item with no status icon for none event 1`] = `
-<router-link-stub
- class="card cursor-pointer text-plain js-design-list-item design-list-item"
- to="[object Object]"
->
- <div
- class="card-body p-0 d-flex-center overflow-hidden position-relative"
- >
- <!---->
-
- <gl-intersection-observer-stub
- options="[object Object]"
- >
- <!---->
-
- <img
- alt="test"
- class="block mx-auto mw-100 mh-100 design-img"
- data-qa-selector="design_image"
- src=""
- />
- </gl-intersection-observer-stub>
- </div>
-
- <div
- class="card-footer d-flex w-100"
- >
- <div
- class="d-flex flex-column str-truncated-100"
- >
- <span
- class="bold str-truncated-100"
- data-qa-selector="design_file_name"
- >
- test
- </span>
-
- <span
- class="str-truncated-100"
- >
-
- Updated
- <timeago-stub
- cssclass=""
- time="01-01-2019"
- tooltipplacement="bottom"
- />
- </span>
- </div>
-
- <!---->
- </div>
-</router-link-stub>
-`;
-
-exports[`Design management list item component with no notes renders loading spinner when isUploading is true 1`] = `
-<router-link-stub
- class="card cursor-pointer text-plain js-design-list-item design-list-item"
- to="[object Object]"
->
- <div
- class="card-body p-0 d-flex-center overflow-hidden position-relative"
- >
- <!---->
-
- <gl-intersection-observer-stub
- options="[object Object]"
- >
- <gl-loading-icon-stub
- color="orange"
- label="Loading"
- size="md"
- />
-
- <img
- alt="test"
- class="block mx-auto mw-100 mh-100 design-img"
- data-qa-selector="design_image"
- src=""
- style="display: none;"
- />
- </gl-intersection-observer-stub>
- </div>
-
- <div
- class="card-footer d-flex w-100"
- >
- <div
- class="d-flex flex-column str-truncated-100"
- >
- <span
- class="bold str-truncated-100"
- data-qa-selector="design_file_name"
- >
- test
- </span>
-
- <span
- class="str-truncated-100"
- >
-
- Updated
- <timeago-stub
- cssclass=""
- time="01-01-2019"
- tooltipplacement="bottom"
- />
- </span>
- </div>
-
- <!---->
- </div>
-</router-link-stub>
-`;
-
exports[`Design management list item component with notes renders item with multiple comments 1`] = `
<router-link-stub
- class="card cursor-pointer text-plain js-design-list-item design-list-item"
+ class="card cursor-pointer text-plain js-design-list-item design-list-item design-list-item-new"
to="[object Object]"
>
<div
@@ -337,9 +18,7 @@ exports[`Design management list item component with notes renders item with mult
>
<!---->
- <gl-intersection-observer-stub
- options="[object Object]"
- >
+ <gl-intersection-observer-stub>
<!---->
<img
@@ -401,7 +80,7 @@ exports[`Design management list item component with notes renders item with mult
exports[`Design management list item component with notes renders item with single comment 1`] = `
<router-link-stub
- class="card cursor-pointer text-plain js-design-list-item design-list-item"
+ class="card cursor-pointer text-plain js-design-list-item design-list-item design-list-item-new"
to="[object Object]"
>
<div
@@ -409,9 +88,7 @@ exports[`Design management list item component with notes renders item with sing
>
<!---->
- <gl-intersection-observer-stub
- options="[object Object]"
- >
+ <gl-intersection-observer-stub>
<!---->
<img
diff --git a/spec/frontend/design_management/components/list/item_spec.js b/spec/frontend/design_management/components/list/item_spec.js
index 705b532454f..d1c90bd57b0 100644
--- a/spec/frontend/design_management/components/list/item_spec.js
+++ b/spec/frontend/design_management/components/list/item_spec.js
@@ -1,6 +1,7 @@
import { createLocalVue, shallowMount } from '@vue/test-utils';
import { GlIcon, GlLoadingIcon, GlIntersectionObserver } from '@gitlab/ui';
import VueRouter from 'vue-router';
+import Icon from '~/vue_shared/components/icon.vue';
import Item from '~/design_management/components/list/item.vue';
const localVue = createLocalVue();
@@ -18,6 +19,10 @@ const DESIGN_VERSION_EVENT = {
describe('Design management list item component', () => {
let wrapper;
+ const findDesignEvent = () => wrapper.find('[data-testid="designEvent"]');
+ const findEventIcon = () => findDesignEvent().find(Icon);
+ const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
+
function createComponent({
notesCount = 0,
event = DESIGN_VERSION_EVENT.NO_CHANGE,
@@ -134,35 +139,31 @@ describe('Design management list item component', () => {
});
});
- describe('with no notes', () => {
- it('renders item with no status icon for none event', () => {
- createComponent();
-
- expect(wrapper.element).toMatchSnapshot();
- });
-
- it('renders item with correct status icon for modification event', () => {
- createComponent({ event: DESIGN_VERSION_EVENT.MODIFICATION });
-
- expect(wrapper.element).toMatchSnapshot();
- });
-
- it('renders item with correct status icon for deletion event', () => {
- createComponent({ event: DESIGN_VERSION_EVENT.DELETION });
+ it('renders loading spinner when isUploading is true', () => {
+ createComponent({ isUploading: true });
- expect(wrapper.element).toMatchSnapshot();
- });
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
- it('renders item with correct status icon for creation event', () => {
- createComponent({ event: DESIGN_VERSION_EVENT.CREATION });
+ it('renders item with no status icon for none event', () => {
+ createComponent();
- expect(wrapper.element).toMatchSnapshot();
- });
-
- it('renders loading spinner when isUploading is true', () => {
- createComponent({ isUploading: true });
+ expect(findDesignEvent().exists()).toBe(false);
+ });
- expect(wrapper.element).toMatchSnapshot();
+ describe('with associated event', () => {
+ it.each`
+ event | icon | className
+ ${DESIGN_VERSION_EVENT.MODIFICATION} | ${'file-modified-solid'} | ${'text-primary-500'}
+ ${DESIGN_VERSION_EVENT.DELETION} | ${'file-deletion-solid'} | ${'text-danger-500'}
+ ${DESIGN_VERSION_EVENT.CREATION} | ${'file-addition-solid'} | ${'text-success-500'}
+ `('renders item with correct status icon for $event event', ({ event, icon, className }) => {
+ createComponent({ event });
+ const eventIcon = findEventIcon();
+
+ expect(eventIcon.exists()).toBe(true);
+ expect(eventIcon.props('name')).toBe(icon);
+ expect(eventIcon.classes()).toContain(className);
});
});
});
diff --git a/spec/frontend/design_management_new/components/toolbar/__snapshots__/pagination_spec.js.snap b/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap
index 0197b4bff79..a7d6145285c 100644
--- a/spec/frontend/design_management_new/components/toolbar/__snapshots__/pagination_spec.js.snap
+++ b/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap
@@ -2,28 +2,34 @@
exports[`Design management pagination component hides components when designs are empty 1`] = `<!---->`;
-exports[`Design management pagination component renders pagination buttons 1`] = `
+exports[`Design management pagination component renders navigation buttons 1`] = `
<div
class="d-flex align-items-center"
>
0 of 2
- <div
- class="btn-group ml-3 mr-3"
+ <gl-button-group-stub
+ class="ml-3 mr-3"
>
- <pagination-button-stub
+ <gl-button-stub
+ category="primary"
class="js-previous-design"
- iconname="angle-left"
+ disabled="true"
+ icon="angle-left"
+ size="medium"
title="Go to previous design"
+ variant="default"
/>
- <pagination-button-stub
+ <gl-button-stub
+ category="primary"
class="js-next-design"
- design="[object Object]"
- iconname="angle-right"
+ icon="angle-right"
+ size="medium"
title="Go to next design"
+ variant="default"
/>
- </div>
+ </gl-button-group-stub>
</div>
`;
diff --git a/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap
index e55cff8de3d..b286a74ebb8 100644
--- a/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap
@@ -2,60 +2,60 @@
exports[`Design management toolbar component renders design and updated data 1`] = `
<header
- class="d-flex p-2 bg-white align-items-center js-design-header"
+ class="gl-display-flex gl-align-items-center gl-justify-content-space-between gl-bg-white gl-py-4 gl-pl-4 js-design-header"
>
- <a
- aria-label="Go back to designs"
- class="mr-3 text-plain d-flex justify-content-center align-items-center"
- >
- <icon-stub
- name="close"
- size="18"
- />
- </a>
-
<div
- class="overflow-hidden d-flex align-items-center"
+ class="gl-display-flex gl-align-items-center"
>
- <h2
- class="m-0 str-truncated-100 gl-font-base"
+ <a
+ aria-label="Go back to designs"
+ class="gl-mr-5 gl-display-flex gl-align-items-center gl-justify-content-center text-plain"
+ data-testid="close-design"
>
- test.jpg
- </h2>
+ <gl-icon-stub
+ name="close"
+ size="16"
+ />
+ </a>
- <small
- class="text-secondary"
+ <div
+ class="overflow-hidden d-flex align-items-center"
>
- Updated 1 hour ago by Test Name
- </small>
+ <h2
+ class="m-0 str-truncated-100 gl-font-base"
+ >
+ test.jpg
+ </h2>
+
+ <small
+ class="text-secondary"
+ >
+ Updated 1 hour ago by Test Name
+ </small>
+ </div>
</div>
- <pagination-stub
+ <design-navigation-stub
class="ml-auto flex-shrink-0"
id="1"
/>
- <gl-deprecated-button-stub
- class="mr-2"
+ <gl-button-stub
+ category="primary"
href="/-/designs/306/7f747adcd4693afadbe968d7ba7d983349b9012d"
- size="md"
- variant="secondary"
- >
- <icon-stub
- name="download"
- size="18"
- />
- </gl-deprecated-button-stub>
+ icon="download"
+ size="medium"
+ variant="default"
+ />
<delete-button-stub
+ buttoncategory="secondary"
buttonclass=""
- buttonvariant="danger"
+ buttonicon="archive"
+ buttonsize="medium"
+ buttonvariant="warning"
+ class="gl-ml-3"
hasselecteddesigns="true"
- >
- <icon-stub
- name="remove"
- size="18"
- />
- </delete-button-stub>
+ />
</header>
`;
diff --git a/spec/frontend/design_management_new/components/toolbar/pagination_spec.js b/spec/frontend/design_management/components/toolbar/design_navigation_spec.js
index 45dce15e292..1c6588a9628 100644
--- a/spec/frontend/design_management_new/components/toolbar/pagination_spec.js
+++ b/spec/frontend/design_management/components/toolbar/design_navigation_spec.js
@@ -1,8 +1,8 @@
/* global Mousetrap */
import 'mousetrap';
import { shallowMount } from '@vue/test-utils';
-import Pagination from '~/design_management_new/components/toolbar/pagination.vue';
-import { DESIGN_ROUTE_NAME } from '~/design_management_new/router/constants';
+import DesignNavigation from '~/design_management/components/toolbar/design_navigation.vue';
+import { DESIGN_ROUTE_NAME } from '~/design_management/router/constants';
const push = jest.fn();
const $router = {
@@ -18,7 +18,7 @@ describe('Design management pagination component', () => {
let wrapper;
function createComponent() {
- wrapper = shallowMount(Pagination, {
+ wrapper = shallowMount(DesignNavigation, {
propsData: {
id: '2',
},
@@ -41,7 +41,7 @@ describe('Design management pagination component', () => {
expect(wrapper.element).toMatchSnapshot();
});
- it('renders pagination buttons', () => {
+ it('renders navigation buttons', () => {
wrapper.setData({
designs: [{ id: '1' }, { id: '2' }],
});
diff --git a/spec/frontend/design_management/components/toolbar/index_spec.js b/spec/frontend/design_management/components/toolbar/index_spec.js
index 2910b2f62ba..2914365b0df 100644
--- a/spec/frontend/design_management/components/toolbar/index_spec.js
+++ b/spec/frontend/design_management/components/toolbar/index_spec.js
@@ -1,9 +1,9 @@
import { createLocalVue, shallowMount } from '@vue/test-utils';
import VueRouter from 'vue-router';
+import { GlButton } from '@gitlab/ui';
import Toolbar from '~/design_management/components/toolbar/index.vue';
import DeleteButton from '~/design_management/components/delete_button.vue';
import { DESIGNS_ROUTE_NAME } from '~/design_management/router/constants';
-import { GlDeprecatedButton } from '@gitlab/ui';
const localVue = createLocalVue();
localVue.use(VueRouter);
@@ -116,7 +116,7 @@ describe('Design management toolbar component', () => {
});
it('renders download button with correct link', () => {
- expect(wrapper.find(GlDeprecatedButton).attributes('href')).toBe(
+ expect(wrapper.find(GlButton).attributes('href')).toBe(
'/-/designs/306/7f747adcd4693afadbe968d7ba7d983349b9012d',
);
});
diff --git a/spec/frontend/design_management/components/toolbar/pagination_button_spec.js b/spec/frontend/design_management/components/toolbar/pagination_button_spec.js
deleted file mode 100644
index b7df201795b..00000000000
--- a/spec/frontend/design_management/components/toolbar/pagination_button_spec.js
+++ /dev/null
@@ -1,61 +0,0 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
-import VueRouter from 'vue-router';
-import PaginationButton from '~/design_management/components/toolbar/pagination_button.vue';
-import { DESIGN_ROUTE_NAME } from '~/design_management/router/constants';
-
-const localVue = createLocalVue();
-localVue.use(VueRouter);
-const router = new VueRouter();
-
-describe('Design management pagination button component', () => {
- let wrapper;
-
- function createComponent(design = null) {
- wrapper = shallowMount(PaginationButton, {
- localVue,
- router,
- propsData: {
- design,
- title: 'Test title',
- iconName: 'angle-right',
- },
- stubs: ['router-link'],
- });
- }
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('disables button when no design is passed', () => {
- createComponent();
-
- expect(wrapper.element).toMatchSnapshot();
- });
-
- it('renders router-link', () => {
- createComponent({ id: '2' });
-
- expect(wrapper.element).toMatchSnapshot();
- });
-
- describe('designLink', () => {
- it('returns empty link when design is null', () => {
- createComponent();
-
- expect(wrapper.vm.designLink).toEqual({});
- });
-
- it('returns design link', () => {
- createComponent({ id: '2', filename: 'test' });
-
- wrapper.vm.$router.replace('/root/test-project/issues/1/designs/test?version=1');
-
- expect(wrapper.vm.designLink).toEqual({
- name: DESIGN_ROUTE_NAME,
- params: { id: 'test' },
- query: { version: '1' },
- });
- });
- });
-});
diff --git a/spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap b/spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap
index 27c0ba589e6..3d7939df28e 100644
--- a/spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap
+++ b/spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap
@@ -4,16 +4,18 @@ exports[`Design management upload button component renders inverted upload desig
<div
isinverted="true"
>
- <gl-deprecated-button-stub
- size="md"
+ <gl-button-stub
+ category="primary"
+ icon=""
+ size="small"
title="Adding a design with the same filename replaces the file in a new version."
- variant="success"
+ variant="default"
>
Upload designs
<!---->
- </gl-deprecated-button-stub>
+ </gl-button-stub>
<input
accept="image/*"
@@ -27,11 +29,13 @@ exports[`Design management upload button component renders inverted upload desig
exports[`Design management upload button component renders loading icon 1`] = `
<div>
- <gl-deprecated-button-stub
+ <gl-button-stub
+ category="primary"
disabled="true"
- size="md"
+ icon=""
+ size="small"
title="Adding a design with the same filename replaces the file in a new version."
- variant="success"
+ variant="default"
>
Upload designs
@@ -43,7 +47,7 @@ exports[`Design management upload button component renders loading icon 1`] = `
label="Loading"
size="sm"
/>
- </gl-deprecated-button-stub>
+ </gl-button-stub>
<input
accept="image/*"
@@ -57,16 +61,18 @@ exports[`Design management upload button component renders loading icon 1`] = `
exports[`Design management upload button component renders upload design button 1`] = `
<div>
- <gl-deprecated-button-stub
- size="md"
+ <gl-button-stub
+ category="primary"
+ icon=""
+ size="small"
title="Adding a design with the same filename replaces the file in a new version."
- variant="success"
+ variant="default"
>
Upload designs
<!---->
- </gl-deprecated-button-stub>
+ </gl-button-stub>
<input
accept="image/*"
diff --git a/spec/frontend/design_management/components/upload/__snapshots__/design_dropzone_spec.js.snap b/spec/frontend/design_management/components/upload/__snapshots__/design_dropzone_spec.js.snap
index 0737b9729a2..9284099b40d 100644
--- a/spec/frontend/design_management/components/upload/__snapshots__/design_dropzone_spec.js.snap
+++ b/spec/frontend/design_management/components/upload/__snapshots__/design_dropzone_spec.js.snap
@@ -5,20 +5,23 @@ exports[`Design management dropzone component when dragging renders correct temp
class="w-100 position-relative"
>
<button
- class="card design-dropzone-card design-dropzone-border w-100 h-100 d-flex-center p-3"
+ class="card design-dropzone-card design-dropzone-border w-100 h-100 gl-align-items-center gl-justify-content-center gl-p-3"
>
<div
- class="d-flex-center flex-column text-center"
+ class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
+ data-testid="dropzone-area"
>
<gl-icon-stub
- class="mb-4"
- name="doc-new"
- size="48"
+ class="gl-mb-2"
+ name="upload"
+ size="24"
/>
- <p>
+ <p
+ class="gl-mb-0"
+ >
<gl-sprintf-stub
- message="%{lineOneStart}Drag and drop to upload your designs%{lineOneEnd} or %{linkStart}click to upload%{linkEnd}."
+ message="Drop or %{linkStart}upload%{linkEnd} designs to attach"
/>
</p>
</div>
@@ -43,7 +46,9 @@ exports[`Design management dropzone component when dragging renders correct temp
class="mw-50 text-center"
style="display: none;"
>
- <h3>
+ <h3
+ class=""
+ >
Oh no!
</h3>
@@ -56,7 +61,9 @@ exports[`Design management dropzone component when dragging renders correct temp
class="mw-50 text-center"
style=""
>
- <h3>
+ <h3
+ class=""
+ >
Incoming!
</h3>
@@ -74,20 +81,23 @@ exports[`Design management dropzone component when dragging renders correct temp
class="w-100 position-relative"
>
<button
- class="card design-dropzone-card design-dropzone-border w-100 h-100 d-flex-center p-3"
+ class="card design-dropzone-card design-dropzone-border w-100 h-100 gl-align-items-center gl-justify-content-center gl-p-3"
>
<div
- class="d-flex-center flex-column text-center"
+ class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
+ data-testid="dropzone-area"
>
<gl-icon-stub
- class="mb-4"
- name="doc-new"
- size="48"
+ class="gl-mb-2"
+ name="upload"
+ size="24"
/>
- <p>
+ <p
+ class="gl-mb-0"
+ >
<gl-sprintf-stub
- message="%{lineOneStart}Drag and drop to upload your designs%{lineOneEnd} or %{linkStart}click to upload%{linkEnd}."
+ message="Drop or %{linkStart}upload%{linkEnd} designs to attach"
/>
</p>
</div>
@@ -112,7 +122,9 @@ exports[`Design management dropzone component when dragging renders correct temp
class="mw-50 text-center"
style="display: none;"
>
- <h3>
+ <h3
+ class=""
+ >
Oh no!
</h3>
@@ -125,7 +137,9 @@ exports[`Design management dropzone component when dragging renders correct temp
class="mw-50 text-center"
style=""
>
- <h3>
+ <h3
+ class=""
+ >
Incoming!
</h3>
@@ -143,20 +157,23 @@ exports[`Design management dropzone component when dragging renders correct temp
class="w-100 position-relative"
>
<button
- class="card design-dropzone-card design-dropzone-border w-100 h-100 d-flex-center p-3"
+ class="card design-dropzone-card design-dropzone-border w-100 h-100 gl-align-items-center gl-justify-content-center gl-p-3"
>
<div
- class="d-flex-center flex-column text-center"
+ class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
+ data-testid="dropzone-area"
>
<gl-icon-stub
- class="mb-4"
- name="doc-new"
- size="48"
+ class="gl-mb-2"
+ name="upload"
+ size="24"
/>
- <p>
+ <p
+ class="gl-mb-0"
+ >
<gl-sprintf-stub
- message="%{lineOneStart}Drag and drop to upload your designs%{lineOneEnd} or %{linkStart}click to upload%{linkEnd}."
+ message="Drop or %{linkStart}upload%{linkEnd} designs to attach"
/>
</p>
</div>
@@ -180,7 +197,9 @@ exports[`Design management dropzone component when dragging renders correct temp
<div
class="mw-50 text-center"
>
- <h3>
+ <h3
+ class=""
+ >
Oh no!
</h3>
@@ -193,7 +212,9 @@ exports[`Design management dropzone component when dragging renders correct temp
class="mw-50 text-center"
style="display: none;"
>
- <h3>
+ <h3
+ class=""
+ >
Incoming!
</h3>
@@ -211,20 +232,23 @@ exports[`Design management dropzone component when dragging renders correct temp
class="w-100 position-relative"
>
<button
- class="card design-dropzone-card design-dropzone-border w-100 h-100 d-flex-center p-3"
+ class="card design-dropzone-card design-dropzone-border w-100 h-100 gl-align-items-center gl-justify-content-center gl-p-3"
>
<div
- class="d-flex-center flex-column text-center"
+ class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
+ data-testid="dropzone-area"
>
<gl-icon-stub
- class="mb-4"
- name="doc-new"
- size="48"
+ class="gl-mb-2"
+ name="upload"
+ size="24"
/>
- <p>
+ <p
+ class="gl-mb-0"
+ >
<gl-sprintf-stub
- message="%{lineOneStart}Drag and drop to upload your designs%{lineOneEnd} or %{linkStart}click to upload%{linkEnd}."
+ message="Drop or %{linkStart}upload%{linkEnd} designs to attach"
/>
</p>
</div>
@@ -248,7 +272,9 @@ exports[`Design management dropzone component when dragging renders correct temp
<div
class="mw-50 text-center"
>
- <h3>
+ <h3
+ class=""
+ >
Oh no!
</h3>
@@ -261,7 +287,9 @@ exports[`Design management dropzone component when dragging renders correct temp
class="mw-50 text-center"
style="display: none;"
>
- <h3>
+ <h3
+ class=""
+ >
Incoming!
</h3>
@@ -279,20 +307,23 @@ exports[`Design management dropzone component when dragging renders correct temp
class="w-100 position-relative"
>
<button
- class="card design-dropzone-card design-dropzone-border w-100 h-100 d-flex-center p-3"
+ class="card design-dropzone-card design-dropzone-border w-100 h-100 gl-align-items-center gl-justify-content-center gl-p-3"
>
<div
- class="d-flex-center flex-column text-center"
+ class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
+ data-testid="dropzone-area"
>
<gl-icon-stub
- class="mb-4"
- name="doc-new"
- size="48"
+ class="gl-mb-2"
+ name="upload"
+ size="24"
/>
- <p>
+ <p
+ class="gl-mb-0"
+ >
<gl-sprintf-stub
- message="%{lineOneStart}Drag and drop to upload your designs%{lineOneEnd} or %{linkStart}click to upload%{linkEnd}."
+ message="Drop or %{linkStart}upload%{linkEnd} designs to attach"
/>
</p>
</div>
@@ -316,7 +347,9 @@ exports[`Design management dropzone component when dragging renders correct temp
<div
class="mw-50 text-center"
>
- <h3>
+ <h3
+ class=""
+ >
Oh no!
</h3>
@@ -329,7 +362,9 @@ exports[`Design management dropzone component when dragging renders correct temp
class="mw-50 text-center"
style="display: none;"
>
- <h3>
+ <h3
+ class=""
+ >
Incoming!
</h3>
@@ -347,20 +382,23 @@ exports[`Design management dropzone component when no slot provided renders defa
class="w-100 position-relative"
>
<button
- class="card design-dropzone-card design-dropzone-border w-100 h-100 d-flex-center p-3"
+ class="card design-dropzone-card design-dropzone-border w-100 h-100 gl-align-items-center gl-justify-content-center gl-p-3"
>
<div
- class="d-flex-center flex-column text-center"
+ class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
+ data-testid="dropzone-area"
>
<gl-icon-stub
- class="mb-4"
- name="doc-new"
- size="48"
+ class="gl-mb-2"
+ name="upload"
+ size="24"
/>
- <p>
+ <p
+ class="gl-mb-0"
+ >
<gl-sprintf-stub
- message="%{lineOneStart}Drag and drop to upload your designs%{lineOneEnd} or %{linkStart}click to upload%{linkEnd}."
+ message="Drop or %{linkStart}upload%{linkEnd} designs to attach"
/>
</p>
</div>
@@ -384,7 +422,9 @@ exports[`Design management dropzone component when no slot provided renders defa
<div
class="mw-50 text-center"
>
- <h3>
+ <h3
+ class=""
+ >
Oh no!
</h3>
@@ -397,7 +437,9 @@ exports[`Design management dropzone component when no slot provided renders defa
class="mw-50 text-center"
style="display: none;"
>
- <h3>
+ <h3
+ class=""
+ >
Incoming!
</h3>
@@ -428,7 +470,9 @@ exports[`Design management dropzone component when slot provided renders dropzon
<div
class="mw-50 text-center"
>
- <h3>
+ <h3
+ class=""
+ >
Oh no!
</h3>
@@ -441,7 +485,9 @@ exports[`Design management dropzone component when slot provided renders dropzon
class="mw-50 text-center"
style="display: none;"
>
- <h3>
+ <h3
+ class=""
+ >
Incoming!
</h3>
diff --git a/spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap b/spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap
index 00f1a40dfb2..d6fd09eb698 100644
--- a/spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap
+++ b/spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap
@@ -1,111 +1,77 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Design management design version dropdown component renders design version dropdown button 1`] = `
-<gl-dropdown-stub
- class="design-version-dropdown"
+<gl-new-dropdown-stub
+ category="tertiary"
+ headertext=""
issueiid=""
projectpath=""
- text="Showing Latest Version"
- variant="link"
+ size="small"
+ text="Showing latest version"
+ variant="default"
>
- <gl-dropdown-item-stub>
- <router-link-stub
- class="d-flex js-version-link"
- to="[object Object]"
- >
- <div
- class="flex-grow-1 ml-2"
- >
- <div>
- <strong>
- Version 2
-
- <span>
- (latest)
- </span>
- </strong>
- </div>
- </div>
-
- <i
- class="fa fa-check pull-right"
- />
- </router-link-stub>
- </gl-dropdown-item-stub>
- <gl-dropdown-item-stub>
- <router-link-stub
- class="d-flex js-version-link"
- to="[object Object]"
- >
- <div
- class="flex-grow-1 ml-2"
- >
- <div>
- <strong>
- Version 1
-
- <!---->
- </strong>
- </div>
- </div>
-
- <!---->
- </router-link-stub>
- </gl-dropdown-item-stub>
-</gl-dropdown-stub>
+ <gl-new-dropdown-item-stub
+ avatarurl=""
+ iconcolor=""
+ iconname=""
+ iconrightname=""
+ ischecked="true"
+ ischeckitem="true"
+ secondarytext=""
+ >
+ Version
+ 2
+ (latest)
+ </gl-new-dropdown-item-stub>
+ <gl-new-dropdown-item-stub
+ avatarurl=""
+ iconcolor=""
+ iconname=""
+ iconrightname=""
+ ischeckitem="true"
+ secondarytext=""
+ >
+ Version
+ 1
+
+ </gl-new-dropdown-item-stub>
+</gl-new-dropdown-stub>
`;
exports[`Design management design version dropdown component renders design version list 1`] = `
-<gl-dropdown-stub
- class="design-version-dropdown"
+<gl-new-dropdown-stub
+ category="tertiary"
+ headertext=""
issueiid=""
projectpath=""
- text="Showing Latest Version"
- variant="link"
+ size="small"
+ text="Showing latest version"
+ variant="default"
>
- <gl-dropdown-item-stub>
- <router-link-stub
- class="d-flex js-version-link"
- to="[object Object]"
- >
- <div
- class="flex-grow-1 ml-2"
- >
- <div>
- <strong>
- Version 2
-
- <span>
- (latest)
- </span>
- </strong>
- </div>
- </div>
-
- <i
- class="fa fa-check pull-right"
- />
- </router-link-stub>
- </gl-dropdown-item-stub>
- <gl-dropdown-item-stub>
- <router-link-stub
- class="d-flex js-version-link"
- to="[object Object]"
- >
- <div
- class="flex-grow-1 ml-2"
- >
- <div>
- <strong>
- Version 1
-
- <!---->
- </strong>
- </div>
- </div>
-
- <!---->
- </router-link-stub>
- </gl-dropdown-item-stub>
-</gl-dropdown-stub>
+ <gl-new-dropdown-item-stub
+ avatarurl=""
+ iconcolor=""
+ iconname=""
+ iconrightname=""
+ ischecked="true"
+ ischeckitem="true"
+ secondarytext=""
+ >
+ Version
+ 2
+ (latest)
+ </gl-new-dropdown-item-stub>
+ <gl-new-dropdown-item-stub
+ avatarurl=""
+ iconcolor=""
+ iconname=""
+ iconrightname=""
+ ischeckitem="true"
+ secondarytext=""
+ >
+ Version
+ 1
+
+ </gl-new-dropdown-item-stub>
+</gl-new-dropdown-stub>
`;
diff --git a/spec/frontend/design_management/components/upload/design_dropzone_spec.js b/spec/frontend/design_management/components/upload/design_dropzone_spec.js
index 9b86b5b2878..bf97399368f 100644
--- a/spec/frontend/design_management/components/upload/design_dropzone_spec.js
+++ b/spec/frontend/design_management/components/upload/design_dropzone_spec.js
@@ -1,6 +1,7 @@
import { shallowMount } from '@vue/test-utils';
+import { GlIcon } from '@gitlab/ui';
import DesignDropzone from '~/design_management/components/upload/design_dropzone.vue';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
jest.mock('~/flash');
@@ -12,10 +13,16 @@ describe('Design management dropzone component', () => {
};
const findDropzoneCard = () => wrapper.find('.design-dropzone-card');
+ const findDropzoneArea = () => wrapper.find('[data-testid="dropzone-area"]');
+ const findIcon = () => wrapper.find(GlIcon);
- function createComponent({ slots = {}, data = {} } = {}) {
+ function createComponent({ slots = {}, data = {}, props = {} } = {}) {
wrapper = shallowMount(DesignDropzone, {
slots,
+ propsData: {
+ hasDesigns: true,
+ ...props,
+ },
data() {
return data;
},
@@ -129,4 +136,18 @@ describe('Design management dropzone component', () => {
});
});
});
+
+ it('applies correct classes when there are no designs or no design saving loader', () => {
+ createComponent({ props: { hasDesigns: false } });
+ expect(findDropzoneArea().classes()).not.toContain('gl-flex-direction-column');
+ expect(findIcon().classes()).toEqual(['gl-mr-3', 'gl-text-gray-500']);
+ expect(findIcon().props('size')).toBe(16);
+ });
+
+ it('applies correct classes when there are designs or design saving loader', () => {
+ createComponent({ props: { hasDesigns: true } });
+ expect(findDropzoneArea().classes()).toContain('gl-flex-direction-column');
+ expect(findIcon().classes()).toEqual(['gl-mb-2']);
+ expect(findIcon().props('size')).toBe(24);
+ });
});
diff --git a/spec/frontend/design_management/components/upload/design_version_dropdown_spec.js b/spec/frontend/design_management/components/upload/design_version_dropdown_spec.js
index 7521b9fad2a..f4206cdaeb3 100644
--- a/spec/frontend/design_management/components/upload/design_version_dropdown_spec.js
+++ b/spec/frontend/design_management/components/upload/design_version_dropdown_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
+import { GlNewDropdown, GlNewDropdownItem, GlSprintf } from '@gitlab/ui';
import DesignVersionDropdown from '~/design_management/components/upload/design_version_dropdown.vue';
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import mockAllVersions from './mock_data/all_versions';
const LATEST_VERSION_ID = 3;
@@ -30,7 +30,7 @@ describe('Design management design version dropdown component', () => {
mocks: {
$route,
},
- stubs: ['router-link'],
+ stubs: { GlSprintf },
});
wrapper.setData({
@@ -42,7 +42,7 @@ describe('Design management design version dropdown component', () => {
wrapper.destroy();
});
- const findVersionLink = index => wrapper.findAll('.js-version-link').at(index);
+ const findVersionLink = index => wrapper.findAll(GlNewDropdownItem).at(index);
it('renders design version dropdown button', () => {
createComponent();
@@ -75,7 +75,7 @@ describe('Design management design version dropdown component', () => {
createComponent();
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find(GlDropdown).attributes('text')).toBe('Showing Latest Version');
+ expect(wrapper.find(GlNewDropdown).attributes('text')).toBe('Showing latest version');
});
});
@@ -83,7 +83,7 @@ describe('Design management design version dropdown component', () => {
createComponent({ maxVersions: 1 });
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find(GlDropdown).attributes('text')).toBe('Showing Latest Version');
+ expect(wrapper.find(GlNewDropdown).attributes('text')).toBe('Showing latest version');
});
});
@@ -91,7 +91,7 @@ describe('Design management design version dropdown component', () => {
createComponent({ $route: designRouteFactory(PREVIOUS_VERSION_ID) });
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find(GlDropdown).attributes('text')).toBe(`Showing Version #1`);
+ expect(wrapper.find(GlNewDropdown).attributes('text')).toBe(`Showing version #1`);
});
});
@@ -99,7 +99,7 @@ describe('Design management design version dropdown component', () => {
createComponent({ $route: designRouteFactory(LATEST_VERSION_ID) });
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find(GlDropdown).attributes('text')).toBe('Showing Latest Version');
+ expect(wrapper.find(GlNewDropdown).attributes('text')).toBe('Showing latest version');
});
});
@@ -107,7 +107,7 @@ describe('Design management design version dropdown component', () => {
createComponent();
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.findAll(GlDropdownItem)).toHaveLength(wrapper.vm.allVersions.length);
+ expect(wrapper.findAll(GlNewDropdownItem)).toHaveLength(wrapper.vm.allVersions.length);
});
});
});
diff --git a/spec/frontend/design_management/components/upload/mock_data/all_versions.js b/spec/frontend/design_management/components/upload/mock_data/all_versions.js
index e76bbd261bd..237e1654f9b 100644
--- a/spec/frontend/design_management/components/upload/mock_data/all_versions.js
+++ b/spec/frontend/design_management/components/upload/mock_data/all_versions.js
@@ -1,14 +1,10 @@
export default [
{
- node: {
- id: 'gid://gitlab/DesignManagement::Version/3',
- sha: '0945756378e0b1588b9dd40d5a6b99e8b7198f55',
- },
+ id: 'gid://gitlab/DesignManagement::Version/3',
+ sha: '0945756378e0b1588b9dd40d5a6b99e8b7198f55',
},
{
- node: {
- id: 'gid://gitlab/DesignManagement::Version/2',
- sha: '5b063fef0cd7213b312db65b30e24f057df21b20',
- },
+ id: 'gid://gitlab/DesignManagement::Version/2',
+ sha: '5b063fef0cd7213b312db65b30e24f057df21b20',
},
];
diff --git a/spec/frontend/design_management/mock_data/all_versions.js b/spec/frontend/design_management/mock_data/all_versions.js
index c389fdb8747..2b216574e27 100644
--- a/spec/frontend/design_management/mock_data/all_versions.js
+++ b/spec/frontend/design_management/mock_data/all_versions.js
@@ -1,8 +1,6 @@
export default [
{
- node: {
- id: 'gid://gitlab/DesignManagement::Version/1',
- sha: 'b389071a06c153509e11da1f582005b316667001',
- },
+ id: 'gid://gitlab/DesignManagement::Version/1',
+ sha: 'b389071a06c153509e11da1f582005b316667001',
},
];
diff --git a/spec/frontend/design_management/mock_data/apollo_mock.js b/spec/frontend/design_management/mock_data/apollo_mock.js
new file mode 100644
index 00000000000..5e2df3877a5
--- /dev/null
+++ b/spec/frontend/design_management/mock_data/apollo_mock.js
@@ -0,0 +1,106 @@
+export const designListQueryResponse = {
+ data: {
+ project: {
+ id: '1',
+ issue: {
+ designCollection: {
+ designs: {
+ nodes: [
+ {
+ id: '1',
+ event: 'NONE',
+ filename: 'fox_1.jpg',
+ notesCount: 3,
+ image: 'image-1',
+ imageV432x230: 'image-1',
+ },
+ {
+ id: '2',
+ event: 'NONE',
+ filename: 'fox_2.jpg',
+ notesCount: 2,
+ image: 'image-2',
+ imageV432x230: 'image-2',
+ },
+ {
+ id: '3',
+ event: 'NONE',
+ filename: 'fox_3.jpg',
+ notesCount: 1,
+ image: 'image-3',
+ imageV432x230: 'image-3',
+ },
+ ],
+ },
+ versions: {
+ nodes: [],
+ },
+ },
+ },
+ },
+ },
+};
+
+export const permissionsQueryResponse = {
+ data: {
+ project: {
+ id: '1',
+ issue: {
+ userPermissions: { createDesign: true },
+ },
+ },
+ },
+};
+
+export const reorderedDesigns = [
+ {
+ id: '2',
+ event: 'NONE',
+ filename: 'fox_2.jpg',
+ notesCount: 2,
+ image: 'image-2',
+ imageV432x230: 'image-2',
+ },
+ {
+ id: '1',
+ event: 'NONE',
+ filename: 'fox_1.jpg',
+ notesCount: 3,
+ image: 'image-1',
+ imageV432x230: 'image-1',
+ },
+ {
+ id: '3',
+ event: 'NONE',
+ filename: 'fox_3.jpg',
+ notesCount: 1,
+ image: 'image-3',
+ imageV432x230: 'image-3',
+ },
+];
+
+export const moveDesignMutationResponse = {
+ data: {
+ designManagementMove: {
+ designCollection: {
+ designs: {
+ nodes: [...reorderedDesigns],
+ },
+ },
+ errors: [],
+ },
+ },
+};
+
+export const moveDesignMutationResponseWithErrors = {
+ data: {
+ designManagementMove: {
+ designCollection: {
+ designs: {
+ nodes: [...reorderedDesigns],
+ },
+ },
+ errors: ['Houston, we have a problem'],
+ },
+ },
+};
diff --git a/spec/frontend/design_management/mock_data/design.js b/spec/frontend/design_management/mock_data/design.js
index 675198b9408..72be33fef1d 100644
--- a/spec/frontend/design_management/mock_data/design.js
+++ b/spec/frontend/design_management/mock_data/design.js
@@ -12,14 +12,12 @@ export default {
webPath: 'full-issue-path',
webUrl: 'full-issue-url',
participants: {
- edges: [
+ nodes: [
{
- node: {
- name: 'Administrator',
- username: 'root',
- webUrl: 'link-to-author',
- avatarUrl: 'link-to-avatar',
- },
+ name: 'Administrator',
+ username: 'root',
+ webUrl: 'link-to-author',
+ avatarUrl: 'link-to-avatar',
},
],
},
diff --git a/spec/frontend/design_management/mock_data/designs.js b/spec/frontend/design_management/mock_data/designs.js
index 07f5c1b7457..98a24081ae6 100644
--- a/spec/frontend/design_management/mock_data/designs.js
+++ b/spec/frontend/design_management/mock_data/designs.js
@@ -5,11 +5,7 @@ export default {
issue: {
designCollection: {
designs: {
- edges: [
- {
- node: design,
- },
- ],
+ nodes: [design],
},
},
},
diff --git a/spec/frontend/design_management/mock_data/no_designs.js b/spec/frontend/design_management/mock_data/no_designs.js
index 9db0ffcade2..0ccb83492fc 100644
--- a/spec/frontend/design_management/mock_data/no_designs.js
+++ b/spec/frontend/design_management/mock_data/no_designs.js
@@ -3,7 +3,7 @@ export default {
issue: {
designCollection: {
designs: {
- edges: [],
+ nodes: [],
},
},
},
diff --git a/spec/frontend/design_management/mock_data/versions_list.js b/spec/frontend/design_management/mock_data/versions_list.js
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/spec/frontend/design_management/mock_data/versions_list.js
diff --git a/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap
index 3ba63fd14f0..3881b2d7679 100644
--- a/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management/pages/__snapshots__/index_spec.js.snap
@@ -1,7 +1,10 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Design management index page designs does not render toolbar when there is no permission 1`] = `
-<div>
+<div
+ class="gl-mt-5"
+ data-testid="designs-root"
+>
<!---->
<div
@@ -11,18 +14,22 @@ exports[`Design management index page designs does not render toolbar when there
class="list-unstyled row"
>
<li
- class="col-md-6 col-lg-4 mb-3"
+ class="gl-flex-direction-column col-md-6 col-lg-3 gl-mb-3"
+ data-testid="design-dropzone-wrapper"
>
<design-dropzone-stub
- class="design-list-item"
+ class="design-list-item design-list-item-new"
+ hasdesigns="true"
/>
</li>
-
<li
- class="col-md-6 col-lg-4 mb-3"
+ class="col-md-6 col-lg-3 gl-mb-3 gl-bg-transparent gl-shadow-none js-design-tile"
>
- <design-dropzone-stub>
+ <design-dropzone-stub
+ hasdesigns="true"
+ >
<design-stub
+ class="gl-bg-white"
event="NONE"
filename="design-1-name"
id="design-1"
@@ -34,10 +41,13 @@ exports[`Design management index page designs does not render toolbar when there
<!---->
</li>
<li
- class="col-md-6 col-lg-4 mb-3"
+ class="col-md-6 col-lg-3 gl-mb-3 gl-bg-transparent gl-shadow-none js-design-tile"
>
- <design-dropzone-stub>
+ <design-dropzone-stub
+ hasdesigns="true"
+ >
<design-stub
+ class="gl-bg-white"
event="NONE"
filename="design-2-name"
id="design-2"
@@ -49,10 +59,13 @@ exports[`Design management index page designs does not render toolbar when there
<!---->
</li>
<li
- class="col-md-6 col-lg-4 mb-3"
+ class="col-md-6 col-lg-3 gl-mb-3 gl-bg-transparent gl-shadow-none js-design-tile"
>
- <design-dropzone-stub>
+ <design-dropzone-stub
+ hasdesigns="true"
+ >
<design-stub
+ class="gl-bg-white"
event="NONE"
filename="design-3-name"
id="design-3"
@@ -73,35 +86,50 @@ exports[`Design management index page designs does not render toolbar when there
`;
exports[`Design management index page designs renders designs list and header with upload button 1`] = `
-<div>
+<div
+ class="gl-mt-5"
+ data-testid="designs-root"
+>
<header
class="row-content-block border-top-0 p-2 d-flex"
>
<div
- class="d-flex justify-content-between align-items-center w-100"
+ class="gl-display-flex gl-justify-content-space-between gl-align-items-center gl-w-full"
>
- <design-version-dropdown-stub />
+ <div>
+ <span
+ class="gl-font-weight-bold gl-mr-3"
+ >
+ Designs
+ </span>
+
+ <design-version-dropdown-stub />
+ </div>
<div
- class="qa-selector-toolbar d-flex"
+ class="qa-selector-toolbar gl-display-flex gl-align-items-center"
>
- <gl-deprecated-button-stub
- class="mr-2 js-select-all"
- size="md"
+ <gl-button-stub
+ category="primary"
+ class="gl-mr-3 js-select-all"
+ icon=""
+ size="small"
variant="link"
>
Select all
- </gl-deprecated-button-stub>
+
+ </gl-button-stub>
<div>
<delete-button-stub
- buttonclass="btn-danger btn-inverted mr-2"
- buttonvariant=""
+ buttoncategory="secondary"
+ buttonclass="gl-mr-3"
+ buttonsize="small"
+ buttonvariant="warning"
>
- Delete selected
-
- <!---->
+ Archive selected
+
</delete-button-stub>
</div>
@@ -117,18 +145,22 @@ exports[`Design management index page designs renders designs list and header wi
class="list-unstyled row"
>
<li
- class="col-md-6 col-lg-4 mb-3"
+ class="gl-flex-direction-column col-md-6 col-lg-3 gl-mb-3"
+ data-testid="design-dropzone-wrapper"
>
<design-dropzone-stub
- class="design-list-item"
+ class="design-list-item design-list-item-new"
+ hasdesigns="true"
/>
</li>
-
<li
- class="col-md-6 col-lg-4 mb-3"
+ class="col-md-6 col-lg-3 gl-mb-3 gl-bg-transparent gl-shadow-none js-design-tile"
>
- <design-dropzone-stub>
+ <design-dropzone-stub
+ hasdesigns="true"
+ >
<design-stub
+ class="gl-bg-white"
event="NONE"
filename="design-1-name"
id="design-1"
@@ -143,10 +175,13 @@ exports[`Design management index page designs renders designs list and header wi
/>
</li>
<li
- class="col-md-6 col-lg-4 mb-3"
+ class="col-md-6 col-lg-3 gl-mb-3 gl-bg-transparent gl-shadow-none js-design-tile"
>
- <design-dropzone-stub>
+ <design-dropzone-stub
+ hasdesigns="true"
+ >
<design-stub
+ class="gl-bg-white"
event="NONE"
filename="design-2-name"
id="design-2"
@@ -161,10 +196,13 @@ exports[`Design management index page designs renders designs list and header wi
/>
</li>
<li
- class="col-md-6 col-lg-4 mb-3"
+ class="col-md-6 col-lg-3 gl-mb-3 gl-bg-transparent gl-shadow-none js-design-tile"
>
- <design-dropzone-stub>
+ <design-dropzone-stub
+ hasdesigns="true"
+ >
<design-stub
+ class="gl-bg-white"
event="NONE"
filename="design-3-name"
id="design-3"
@@ -188,7 +226,10 @@ exports[`Design management index page designs renders designs list and header wi
`;
exports[`Design management index page designs renders error 1`] = `
-<div>
+<div
+ class="gl-mt-5"
+ data-testid="designs-root"
+>
<!---->
<div
@@ -216,7 +257,10 @@ exports[`Design management index page designs renders error 1`] = `
`;
exports[`Design management index page designs renders loading icon 1`] = `
-<div>
+<div
+ class="gl-mt-5"
+ data-testid="designs-root"
+>
<!---->
<div
@@ -235,8 +279,11 @@ exports[`Design management index page designs renders loading icon 1`] = `
</div>
`;
-exports[`Design management index page when has no designs renders empty text 1`] = `
-<div>
+exports[`Design management index page when has no designs renders design dropzone 1`] = `
+<div
+ class="gl-mt-5"
+ data-testid="designs-root"
+>
<!---->
<div
@@ -246,13 +293,13 @@ exports[`Design management index page when has no designs renders empty text 1`]
class="list-unstyled row"
>
<li
- class="col-md-6 col-lg-4 mb-3"
+ class="col-12"
+ data-testid="design-dropzone-wrapper"
>
<design-dropzone-stub
- class="design-list-item"
+ class=""
/>
</li>
-
</ol>
</div>
diff --git a/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
index 65c4811536e..823294efc38 100644
--- a/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management/pages/design/__snapshots__/index_spec.js.snap
@@ -10,7 +10,7 @@ exports[`Design management design index page renders design index 1`] = `
<design-destroyer-stub
filenames="test.jpg"
iid="1"
- projectpath=""
+ project-path="project-path"
/>
<!---->
@@ -41,7 +41,7 @@ exports[`Design management design index page renders design index 1`] = `
</h2>
<a
- class="gl-text-gray-600 gl-text-decoration-none gl-mb-6 gl-display-block"
+ class="gl-text-gray-400 gl-text-decoration-none gl-mb-6 gl-display-block"
href="full-issue-url"
>
ull-issue-path
@@ -60,13 +60,13 @@ exports[`Design management design index page renders design index 1`] = `
designid="test"
discussion="[object Object]"
discussionwithopenform=""
- markdownpreviewpath="//preview_markdown?target_type=Issue"
+ markdownpreviewpath="/project-path/preview_markdown?target_type=Issue"
noteableid="design-id"
/>
<gl-button-stub
- category="tertiary"
- class="link-inherit-color gl-text-black-normal gl-text-decoration-none gl-font-weight-bold gl-mb-4"
+ category="primary"
+ class="link-inherit-color gl-text-body gl-text-decoration-none gl-font-weight-bold gl-mb-4"
data-testid="resolved-comments"
icon="chevron-right"
id="resolved-comments"
@@ -108,7 +108,7 @@ exports[`Design management design index page renders design index 1`] = `
designid="test"
discussion="[object Object]"
discussionwithopenform=""
- markdownpreviewpath="//preview_markdown?target_type=Issue"
+ markdownpreviewpath="/project-path/preview_markdown?target_type=Issue"
noteableid="design-id"
/>
</gl-collapse-stub>
@@ -140,7 +140,7 @@ exports[`Design management design index page with error GlAlert is rendered in c
<design-destroyer-stub
filenames="test.jpg"
iid="1"
- projectpath=""
+ project-path="project-path"
/>
<div
@@ -188,7 +188,7 @@ exports[`Design management design index page with error GlAlert is rendered in c
</h2>
<a
- class="gl-text-gray-600 gl-text-decoration-none gl-mb-6 gl-display-block"
+ class="gl-text-gray-400 gl-text-decoration-none gl-mb-6 gl-display-block"
href="full-issue-url"
>
ull-issue-path
diff --git a/spec/frontend/design_management/pages/design/index_spec.js b/spec/frontend/design_management/pages/design/index_spec.js
index 82b607eb77d..369c8667f4d 100644
--- a/spec/frontend/design_management/pages/design/index_spec.js
+++ b/spec/frontend/design_management/pages/design/index_spec.js
@@ -2,7 +2,7 @@ import { shallowMount, createLocalVue } from '@vue/test-utils';
import VueRouter from 'vue-router';
import { GlAlert } from '@gitlab/ui';
import { ApolloMutation } from 'vue-apollo';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import DesignIndex from '~/design_management/pages/design/index.vue';
import DesignSidebar from '~/design_management/components/design_sidebar.vue';
import DesignPresentation from '~/design_management/components/design_presentation.vue';
@@ -95,9 +95,12 @@ describe('Design management design index page', () => {
DesignSidebar,
DesignReplyForm,
},
+ provide: {
+ issueIid: '1',
+ projectPath: 'project-path',
+ },
data() {
return {
- issueIid: '1',
activeDiscussion: {
id: null,
source: null,
@@ -149,7 +152,7 @@ describe('Design management design index page', () => {
expect(findSidebar().props()).toEqual({
design,
- markdownPreviewPath: '//preview_markdown?target_type=Issue',
+ markdownPreviewPath: '/project-path/preview_markdown?target_type=Issue',
resolvedDiscussionsExpanded: false,
});
});
diff --git a/spec/frontend/design_management/pages/index_apollo_spec.js b/spec/frontend/design_management/pages/index_apollo_spec.js
new file mode 100644
index 00000000000..3ea711c2cfa
--- /dev/null
+++ b/spec/frontend/design_management/pages/index_apollo_spec.js
@@ -0,0 +1,162 @@
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { createMockClient } from 'mock-apollo-client';
+import VueApollo from 'vue-apollo';
+import VueRouter from 'vue-router';
+import VueDraggable from 'vuedraggable';
+import { InMemoryCache } from 'apollo-cache-inmemory';
+import Design from '~/design_management/components/list/item.vue';
+import createRouter from '~/design_management/router';
+import getDesignListQuery from '~/design_management/graphql/queries/get_design_list.query.graphql';
+import permissionsQuery from '~/design_management/graphql/queries/design_permissions.query.graphql';
+import moveDesignMutation from '~/design_management/graphql/mutations/move_design.mutation.graphql';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
+import Index from '~/design_management/pages/index.vue';
+import {
+ designListQueryResponse,
+ permissionsQueryResponse,
+ moveDesignMutationResponse,
+ reorderedDesigns,
+ moveDesignMutationResponseWithErrors,
+} from '../mock_data/apollo_mock';
+
+jest.mock('~/flash');
+
+const localVue = createLocalVue();
+localVue.use(VueApollo);
+
+const router = createRouter();
+localVue.use(VueRouter);
+
+const designToMove = {
+ __typename: 'Design',
+ id: '2',
+ event: 'NONE',
+ filename: 'fox_2.jpg',
+ notesCount: 2,
+ image: 'image-2',
+ imageV432x230: 'image-2',
+};
+
+describe('Design management index page with Apollo mock', () => {
+ let wrapper;
+ let mockClient;
+ let apolloProvider;
+ let moveDesignHandler;
+
+ async function moveDesigns(localWrapper) {
+ await jest.runOnlyPendingTimers();
+ await localWrapper.vm.$nextTick();
+
+ localWrapper.find(VueDraggable).vm.$emit('input', reorderedDesigns);
+ localWrapper.find(VueDraggable).vm.$emit('change', {
+ moved: {
+ newIndex: 0,
+ element: designToMove,
+ },
+ });
+ }
+
+ const fragmentMatcher = { match: () => true };
+
+ const cache = new InMemoryCache({
+ fragmentMatcher,
+ addTypename: false,
+ });
+
+ const findDesigns = () => wrapper.findAll(Design);
+
+ function createComponent({
+ moveHandler = jest.fn().mockResolvedValue(moveDesignMutationResponse),
+ }) {
+ mockClient = createMockClient({ cache });
+
+ mockClient.setRequestHandler(
+ getDesignListQuery,
+ jest.fn().mockResolvedValue(designListQueryResponse),
+ );
+
+ mockClient.setRequestHandler(
+ permissionsQuery,
+ jest.fn().mockResolvedValue(permissionsQueryResponse),
+ );
+
+ moveDesignHandler = moveHandler;
+
+ mockClient.setRequestHandler(moveDesignMutation, moveDesignHandler);
+
+ apolloProvider = new VueApollo({
+ defaultClient: mockClient,
+ });
+
+ wrapper = shallowMount(Index, {
+ localVue,
+ apolloProvider,
+ router,
+ stubs: { VueDraggable },
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ mockClient = null;
+ apolloProvider = null;
+ });
+
+ it('has a design with id 1 as a first one', async () => {
+ createComponent({});
+
+ await jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+
+ expect(findDesigns()).toHaveLength(3);
+ expect(
+ findDesigns()
+ .at(0)
+ .props('id'),
+ ).toBe('1');
+ });
+
+ it('calls a mutation with correct parameters and reorders designs', async () => {
+ createComponent({});
+
+ await moveDesigns(wrapper);
+
+ expect(moveDesignHandler).toHaveBeenCalled();
+
+ await wrapper.vm.$nextTick();
+
+ expect(
+ findDesigns()
+ .at(0)
+ .props('id'),
+ ).toBe('2');
+ });
+
+ it('displays flash if mutation had a recoverable error', async () => {
+ createComponent({
+ moveHandler: jest.fn().mockResolvedValue(moveDesignMutationResponseWithErrors),
+ });
+
+ await moveDesigns(wrapper);
+
+ await wrapper.vm.$nextTick();
+
+ expect(createFlash).toHaveBeenCalledWith('Houston, we have a problem');
+ });
+
+ it('displays flash if mutation had a non-recoverable error', async () => {
+ createComponent({
+ moveHandler: jest.fn().mockRejectedValue('Error'),
+ });
+
+ await moveDesigns(wrapper);
+
+ await jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+
+ expect(createFlash).toHaveBeenCalledWith(
+ 'Something went wrong when reordering designs. Please try again',
+ );
+ });
+});
diff --git a/spec/frontend/design_management/pages/index_spec.js b/spec/frontend/design_management/pages/index_spec.js
index d3761bf09e9..093fa155d2e 100644
--- a/spec/frontend/design_management/pages/index_spec.js
+++ b/spec/frontend/design_management/pages/index_spec.js
@@ -1,5 +1,6 @@
import { createLocalVue, shallowMount } from '@vue/test-utils';
import { ApolloMutation } from 'vue-apollo';
+import VueDraggable from 'vuedraggable';
import VueRouter from 'vue-router';
import { GlEmptyState } from '@gitlab/ui';
import Index from '~/design_management/pages/index.vue';
@@ -12,7 +13,7 @@ import {
EXISTING_DESIGN_DROP_MANY_FILES_MESSAGE,
EXISTING_DESIGN_DROP_INVALID_FILENAME_MESSAGE,
} from '~/design_management/utils/error_messages';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import createRouter from '~/design_management/router';
import * as utils from '~/design_management/utils/design_management_utils';
import { DESIGN_DETAIL_LAYOUT_CLASSLIST } from '~/design_management/constants';
@@ -25,6 +26,9 @@ const mockPageEl = {
};
jest.spyOn(utils, 'getPageLayoutElement').mockReturnValue(mockPageEl);
+const scrollIntoViewMock = jest.fn();
+HTMLElement.prototype.scrollIntoView = scrollIntoViewMock;
+
const localVue = createLocalVue();
const router = createRouter();
localVue.use(VueRouter);
@@ -54,9 +58,7 @@ const mockDesigns = [
];
const mockVersion = {
- node: {
- id: 'gid://gitlab/DesignManagement::Version/1',
- },
+ id: 'gid://gitlab/DesignManagement::Version/1',
};
describe('Design management index page', () => {
@@ -68,7 +70,10 @@ describe('Design management index page', () => {
const findToolbar = () => wrapper.find('.qa-selector-toolbar');
const findDeleteButton = () => wrapper.find(DeleteButton);
const findDropzone = () => wrapper.findAll(DesignDropzone).at(0);
+ const dropzoneClasses = () => findDropzone().classes();
+ const findDropzoneWrapper = () => wrapper.find('[data-testid="design-dropzone-wrapper"]');
const findFirstDropzoneWithDesign = () => wrapper.findAll(DesignDropzone).at(1);
+ const findDesignsWrapper = () => wrapper.find('[data-testid="designs-root"]');
function createComponent({
loading = false,
@@ -92,19 +97,23 @@ describe('Design management index page', () => {
};
wrapper = shallowMount(Index, {
+ data() {
+ return {
+ designs,
+ allVersions,
+ permissions: {
+ createDesign,
+ },
+ };
+ },
mocks: { $apollo },
localVue,
router,
- stubs: { DesignDestroyer, ApolloMutation, ...stubs },
+ stubs: { DesignDestroyer, ApolloMutation, VueDraggable, ...stubs },
attachToDocument: true,
- });
-
- wrapper.setData({
- designs,
- allVersions,
- issueIid: '1',
- permissions: {
- createDesign,
+ provide: {
+ projectPath: 'project-path',
+ issueIid: '1',
},
});
}
@@ -117,9 +126,7 @@ describe('Design management index page', () => {
it('renders loading icon', () => {
createComponent({ loading: true });
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.element).toMatchSnapshot();
- });
+ expect(wrapper.element).toMatchSnapshot();
});
it('renders error', () => {
@@ -135,25 +142,35 @@ describe('Design management index page', () => {
it('renders a toolbar with buttons when there are designs', () => {
createComponent({ designs: mockDesigns, allVersions: [mockVersion] });
- return wrapper.vm.$nextTick().then(() => {
- expect(findToolbar().exists()).toBe(true);
- });
+ expect(findToolbar().exists()).toBe(true);
});
it('renders designs list and header with upload button', () => {
createComponent({ designs: mockDesigns, allVersions: [mockVersion] });
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.element).toMatchSnapshot();
- });
+ expect(wrapper.element).toMatchSnapshot();
});
it('does not render toolbar when there is no permission', () => {
createComponent({ designs: mockDesigns, allVersions: [mockVersion], createDesign: false });
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.element).toMatchSnapshot();
- });
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('has correct classes applied to design dropzone', () => {
+ createComponent({ designs: mockDesigns, allVersions: [mockVersion] });
+ expect(dropzoneClasses()).toContain('design-list-item');
+ expect(dropzoneClasses()).toContain('design-list-item-new');
+ });
+
+ it('has correct classes applied to dropzone wrapper', () => {
+ createComponent({ designs: mockDesigns, allVersions: [mockVersion] });
+ expect(findDropzoneWrapper().classes()).toEqual([
+ 'gl-flex-direction-column',
+ 'col-md-6',
+ 'col-lg-3',
+ 'gl-mb-3',
+ ]);
});
});
@@ -162,11 +179,20 @@ describe('Design management index page', () => {
createComponent();
});
- it('renders empty text', () =>
+ it('renders design dropzone', () =>
wrapper.vm.$nextTick().then(() => {
expect(wrapper.element).toMatchSnapshot();
}));
+ it('has correct classes applied to design dropzone', () => {
+ expect(dropzoneClasses()).not.toContain('design-list-item');
+ expect(dropzoneClasses()).not.toContain('design-list-item-new');
+ });
+
+ it('has correct classes applied to dropzone wrapper', () => {
+ expect(findDropzoneWrapper().classes()).toEqual(['col-12']);
+ });
+
it('does not render a toolbar with buttons', () =>
wrapper.vm.$nextTick().then(() => {
expect(findToolbar().exists()).toBe(false);
@@ -185,7 +211,7 @@ describe('Design management index page', () => {
mutation: uploadDesignQuery,
variables: {
files: [{ name: 'test' }],
- projectPath: '',
+ projectPath: 'project-path',
iid: '1',
},
optimisticResponse: {
@@ -214,13 +240,10 @@ describe('Design management index page', () => {
},
versions: {
__typename: 'DesignVersionConnection',
- edges: {
- __typename: 'DesignVersionEdge',
- node: {
- __typename: 'DesignVersion',
- id: expect.anything(),
- sha: expect.anything(),
- },
+ nodes: {
+ __typename: 'DesignVersion',
+ id: expect.anything(),
+ sha: expect.anything(),
},
},
},
@@ -231,12 +254,18 @@ describe('Design management index page', () => {
},
};
- return wrapper.vm.$nextTick().then(() => {
- findDropzone().vm.$emit('change', [{ name: 'test' }]);
- expect(mutate).toHaveBeenCalledWith(mutationVariables);
- expect(wrapper.vm.filesToBeSaved).toEqual([{ name: 'test' }]);
- expect(wrapper.vm.isSaving).toBeTruthy();
- });
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ findDropzone().vm.$emit('change', [{ name: 'test' }]);
+ expect(mutate).toHaveBeenCalledWith(mutationVariables);
+ expect(wrapper.vm.filesToBeSaved).toEqual([{ name: 'test' }]);
+ expect(wrapper.vm.isSaving).toBeTruthy();
+ })
+ .then(() => {
+ expect(dropzoneClasses()).toContain('design-list-item');
+ expect(dropzoneClasses()).toContain('design-list-item-new');
+ });
});
it('sets isSaving', () => {
@@ -384,8 +413,7 @@ describe('Design management index page', () => {
it('renders toolbar buttons', () => {
expect(findToolbar().exists()).toBe(true);
- expect(findToolbar().classes()).toContain('d-flex');
- expect(findToolbar().classes()).not.toContain('d-none');
+ expect(findToolbar().isVisible()).toBe(true);
});
it('adds two designs to selected designs when their checkboxes are checked', () => {
@@ -442,9 +470,9 @@ describe('Design management index page', () => {
});
});
- it('on latest version when has no designs does not render toolbar buttons', () => {
+ it('on latest version when has no designs toolbar buttons are invisible', () => {
createComponent({ designs: [], allVersions: [mockVersion] });
- expect(findToolbar().exists()).toBe(false);
+ expect(findToolbar().isVisible()).toBe(false);
});
describe('on non-latest version', () => {
@@ -482,6 +510,10 @@ describe('Design management index page', () => {
});
event = new Event('paste');
+ event.clipboardData = {
+ files: [{ name: 'image.png', type: 'image/png' }],
+ getData: () => 'test.png',
+ };
router.replace({
name: DESIGNS_ROUTE_NAME,
@@ -491,43 +523,52 @@ describe('Design management index page', () => {
});
});
- it('calls onUploadDesign with valid paste', () => {
- event.clipboardData = {
- files: [{ name: 'image.png', type: 'image/png' }],
- getData: () => 'test.png',
- };
-
+ it('does not call paste event if designs wrapper is not hovered', () => {
document.dispatchEvent(event);
- expect(wrapper.vm.onUploadDesign).toHaveBeenCalledTimes(1);
- expect(wrapper.vm.onUploadDesign).toHaveBeenCalledWith([
- new File([{ name: 'image.png' }], 'test.png'),
- ]);
+ expect(wrapper.vm.onUploadDesign).not.toHaveBeenCalled();
});
- it('renames a design if it has an image.png filename', () => {
- event.clipboardData = {
- files: [{ name: 'image.png', type: 'image/png' }],
- getData: () => 'image.png',
- };
+ describe('when designs wrapper is hovered', () => {
+ beforeEach(() => {
+ findDesignsWrapper().trigger('mouseenter');
+ });
- document.dispatchEvent(event);
+ it('calls onUploadDesign with valid paste', () => {
+ document.dispatchEvent(event);
- expect(wrapper.vm.onUploadDesign).toHaveBeenCalledTimes(1);
- expect(wrapper.vm.onUploadDesign).toHaveBeenCalledWith([
- new File([{ name: 'image.png' }], `design_${Date.now()}.png`),
- ]);
- });
+ expect(wrapper.vm.onUploadDesign).toHaveBeenCalledTimes(1);
+ expect(wrapper.vm.onUploadDesign).toHaveBeenCalledWith([
+ new File([{ name: 'image.png' }], 'test.png'),
+ ]);
+ });
- it('does not call onUploadDesign with invalid paste', () => {
- event.clipboardData = {
- items: [{ type: 'text/plain' }, { type: 'text' }],
- files: [],
- };
+ it('renames a design if it has an image.png filename', () => {
+ document.dispatchEvent(event);
- document.dispatchEvent(event);
+ expect(wrapper.vm.onUploadDesign).toHaveBeenCalledTimes(1);
+ expect(wrapper.vm.onUploadDesign).toHaveBeenCalledWith([
+ new File([{ name: 'image.png' }], `design_${Date.now()}.png`),
+ ]);
+ });
- expect(wrapper.vm.onUploadDesign).not.toHaveBeenCalled();
+ it('does not call onUploadDesign with invalid paste', () => {
+ event.clipboardData = {
+ items: [{ type: 'text/plain' }, { type: 'text' }],
+ files: [],
+ };
+
+ document.dispatchEvent(event);
+
+ expect(wrapper.vm.onUploadDesign).not.toHaveBeenCalled();
+ });
+
+ it('removes onPaste listener after mouseleave event', async () => {
+ findDesignsWrapper().trigger('mouseleave');
+ document.dispatchEvent(event);
+
+ expect(wrapper.vm.onUploadDesign).not.toHaveBeenCalled();
+ });
});
});
@@ -535,9 +576,18 @@ describe('Design management index page', () => {
it('ensures fullscreen layout is not applied', () => {
createComponent(true);
- wrapper.vm.$router.push('/designs');
+ wrapper.vm.$router.push('/');
expect(mockPageEl.classList.remove).toHaveBeenCalledTimes(1);
expect(mockPageEl.classList.remove).toHaveBeenCalledWith(...DESIGN_DETAIL_LAYOUT_CLASSLIST);
});
+
+ it('should trigger a scrollIntoView method if designs route is detected', () => {
+ router.replace({
+ path: '/designs',
+ });
+ createComponent(true);
+
+ expect(scrollIntoViewMock).toHaveBeenCalled();
+ });
});
});
diff --git a/spec/frontend/design_management/router_spec.js b/spec/frontend/design_management/router_spec.js
index d6488d3837a..2b8c7ee959b 100644
--- a/spec/frontend/design_management/router_spec.js
+++ b/spec/frontend/design_management/router_spec.js
@@ -5,11 +5,7 @@ import App from '~/design_management/components/app.vue';
import Designs from '~/design_management/pages/index.vue';
import DesignDetail from '~/design_management/pages/design/index.vue';
import createRouter from '~/design_management/router';
-import {
- ROOT_ROUTE_NAME,
- DESIGNS_ROUTE_NAME,
- DESIGN_ROUTE_NAME,
-} from '~/design_management/router/constants';
+import { DESIGNS_ROUTE_NAME, DESIGN_ROUTE_NAME } from '~/design_management/router/constants';
import '~/commons/bootstrap';
function factory(routeArg) {
@@ -49,7 +45,7 @@ describe('Design management router', () => {
window.location.hash = '';
});
- describe.each([['/'], [{ name: ROOT_ROUTE_NAME }]])('root route', routeArg => {
+ describe.each([['/'], [{ name: DESIGNS_ROUTE_NAME }]])('root route', routeArg => {
it('pushes home component', () => {
const wrapper = factory(routeArg);
@@ -57,14 +53,6 @@ describe('Design management router', () => {
});
});
- describe.each([['/designs'], [{ name: DESIGNS_ROUTE_NAME }]])('designs route', routeArg => {
- it('pushes designs root component', () => {
- const wrapper = factory(routeArg);
-
- expect(wrapper.find(Designs).exists()).toBe(true);
- });
- });
-
describe.each([['/designs/1'], [{ name: DESIGN_ROUTE_NAME, params: { id: '1' } }]])(
'designs detail route',
routeArg => {
diff --git a/spec/frontend/design_management/utils/cache_update_spec.js b/spec/frontend/design_management/utils/cache_update_spec.js
index 641d35ff9ff..e8a5cf3949d 100644
--- a/spec/frontend/design_management/utils/cache_update_spec.js
+++ b/spec/frontend/design_management/utils/cache_update_spec.js
@@ -13,7 +13,7 @@ import {
UPDATE_IMAGE_DIFF_NOTE_ERROR,
} from '~/design_management/utils/error_messages';
import design from '../mock_data/design';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
jest.mock('~/flash.js');
diff --git a/spec/frontend/design_management/utils/design_management_utils_spec.js b/spec/frontend/design_management/utils/design_management_utils_spec.js
index 478ebadc8f6..e6d836b9157 100644
--- a/spec/frontend/design_management/utils/design_management_utils_spec.js
+++ b/spec/frontend/design_management/utils/design_management_utils_spec.js
@@ -51,7 +51,7 @@ describe('extractDiscussions', () => {
};
});
- it('discards the edges.node artifacts of GraphQL', () => {
+ it('discards the node artifacts of GraphQL', () => {
expect(extractDiscussions(discussions)).toEqual([
{ id: 1, notes: ['a'], index: 1 },
{ id: 2, notes: ['b'], index: 2 },
@@ -96,10 +96,7 @@ describe('optimistic responses', () => {
discussions: { __typename: 'DesignDiscussion', nodes: [] },
versions: {
__typename: 'DesignVersionConnection',
- edges: {
- __typename: 'DesignVersionEdge',
- node: { __typename: 'DesignVersion', id: -1, sha: -1 },
- },
+ nodes: { __typename: 'DesignVersion', id: -1, sha: -1 },
},
},
],
diff --git a/spec/frontend/design_management/utils/error_messages_spec.js b/spec/frontend/design_management/utils/error_messages_spec.js
index 635ff931d7d..f5072c3b6b7 100644
--- a/spec/frontend/design_management/utils/error_messages_spec.js
+++ b/spec/frontend/design_management/utils/error_messages_spec.js
@@ -10,8 +10,8 @@ const mockFilenames = n =>
describe('Error message', () => {
describe('designDeletionError', () => {
- const singularMsg = 'Could not delete a design. Please try again.';
- const pluralMsg = 'Could not delete designs. Please try again.';
+ const singularMsg = 'Could not archive a design. Please try again.';
+ const pluralMsg = 'Could not archive designs. Please try again.';
describe('when [singular=true]', () => {
it.each([[undefined], [true]])('uses singular grammar', singularOption => {
@@ -55,7 +55,7 @@ describe('Error message', () => {
'Upload skipped. Some of the designs you tried uploading did not change: 1.jpg, 2.jpg, 3.jpg, 4.jpg, 5.jpg, and 2 more.',
],
])('designUploadSkippedWarning', (uploadedFiles, skippedFiles, expected) => {
- test('returns expected warning message', () => {
+ it('returns expected warning message', () => {
expect(designUploadSkippedWarning(uploadedFiles, skippedFiles)).toBe(expected);
});
});
diff --git a/spec/frontend/design_management_new/components/__snapshots__/design_note_pin_spec.js.snap b/spec/frontend/design_management_legacy/components/__snapshots__/design_note_pin_spec.js.snap
index 4c848256e5b..62a0f675cff 100644
--- a/spec/frontend/design_management_new/components/__snapshots__/design_note_pin_spec.js.snap
+++ b/spec/frontend/design_management_legacy/components/__snapshots__/design_note_pin_spec.js.snap
@@ -3,13 +3,13 @@
exports[`Design note pin component should match the snapshot of note when repositioning 1`] = `
<button
aria-label="Comment form position"
- class="design-pin gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center btn-transparent comment-indicator"
+ class="design-pin gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-0 btn-transparent comment-indicator"
style="left: 10px; top: 10px; cursor: move;"
type="button"
>
- <icon-stub
+ <gl-icon-stub
name="image-comment-dark"
- size="16"
+ size="24"
/>
</button>
`;
@@ -17,7 +17,7 @@ exports[`Design note pin component should match the snapshot of note when reposi
exports[`Design note pin component should match the snapshot of note with index 1`] = `
<button
aria-label="Comment '1' position"
- class="design-pin gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center js-image-badge badge badge-pill"
+ class="design-pin gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-0 js-image-badge badge badge-pill"
style="left: 10px; top: 10px;"
type="button"
>
@@ -30,13 +30,13 @@ exports[`Design note pin component should match the snapshot of note with index
exports[`Design note pin component should match the snapshot of note without index 1`] = `
<button
aria-label="Comment form position"
- class="design-pin gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center btn-transparent comment-indicator"
+ class="design-pin gl-absolute gl-display-flex gl-align-items-center gl-justify-content-center gl-p-0 btn-transparent comment-indicator"
style="left: 10px; top: 10px;"
type="button"
>
- <icon-stub
+ <gl-icon-stub
name="image-comment-dark"
- size="16"
+ size="24"
/>
</button>
`;
diff --git a/spec/frontend/design_management_new/components/__snapshots__/design_presentation_spec.js.snap b/spec/frontend/design_management_legacy/components/__snapshots__/design_presentation_spec.js.snap
index 189962c5b2e..189962c5b2e 100644
--- a/spec/frontend/design_management_new/components/__snapshots__/design_presentation_spec.js.snap
+++ b/spec/frontend/design_management_legacy/components/__snapshots__/design_presentation_spec.js.snap
diff --git a/spec/frontend/design_management_new/components/__snapshots__/design_scaler_spec.js.snap b/spec/frontend/design_management_legacy/components/__snapshots__/design_scaler_spec.js.snap
index cb4575cbd11..cb4575cbd11 100644
--- a/spec/frontend/design_management_new/components/__snapshots__/design_scaler_spec.js.snap
+++ b/spec/frontend/design_management_legacy/components/__snapshots__/design_scaler_spec.js.snap
diff --git a/spec/frontend/design_management_new/components/__snapshots__/image_spec.js.snap b/spec/frontend/design_management_legacy/components/__snapshots__/image_spec.js.snap
index acaa62b11eb..acaa62b11eb 100644
--- a/spec/frontend/design_management_new/components/__snapshots__/image_spec.js.snap
+++ b/spec/frontend/design_management_legacy/components/__snapshots__/image_spec.js.snap
diff --git a/spec/frontend/design_management_new/components/delete_button_spec.js b/spec/frontend/design_management_legacy/components/delete_button_spec.js
index 218c58847a6..73b4908d06a 100644
--- a/spec/frontend/design_management_new/components/delete_button_spec.js
+++ b/spec/frontend/design_management_legacy/components/delete_button_spec.js
@@ -1,11 +1,11 @@
import { shallowMount } from '@vue/test-utils';
-import { GlButton, GlModal, GlModalDirective } from '@gitlab/ui';
-import BatchDeleteButton from '~/design_management_new/components/delete_button.vue';
+import { GlDeprecatedButton, GlModal, GlModalDirective } from '@gitlab/ui';
+import BatchDeleteButton from '~/design_management_legacy/components/delete_button.vue';
describe('Batch delete button component', () => {
let wrapper;
- const findButton = () => wrapper.find(GlButton);
+ const findButton = () => wrapper.find(GlDeprecatedButton);
const findModal = () => wrapper.find(GlModal);
function createComponent(isDeleting = false) {
diff --git a/spec/frontend/design_management_new/components/design_note_pin_spec.js b/spec/frontend/design_management_legacy/components/design_note_pin_spec.js
index 8e2caa604f4..3077928cf86 100644
--- a/spec/frontend/design_management_new/components/design_note_pin_spec.js
+++ b/spec/frontend/design_management_legacy/components/design_note_pin_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import DesignNotePin from '~/design_management_new/components/design_note_pin.vue';
+import DesignNotePin from '~/design_management_legacy/components/design_note_pin.vue';
describe('Design note pin component', () => {
let wrapper;
diff --git a/spec/frontend/design_management_new/components/design_notes/__snapshots__/design_note_spec.js.snap b/spec/frontend/design_management_legacy/components/design_notes/__snapshots__/design_note_spec.js.snap
index b55bacb6fc5..b55bacb6fc5 100644
--- a/spec/frontend/design_management_new/components/design_notes/__snapshots__/design_note_spec.js.snap
+++ b/spec/frontend/design_management_legacy/components/design_notes/__snapshots__/design_note_spec.js.snap
diff --git a/spec/frontend/design_management_new/components/design_notes/__snapshots__/design_reply_form_spec.js.snap b/spec/frontend/design_management_legacy/components/design_notes/__snapshots__/design_reply_form_spec.js.snap
index e01c79e3520..e01c79e3520 100644
--- a/spec/frontend/design_management_new/components/design_notes/__snapshots__/design_reply_form_spec.js.snap
+++ b/spec/frontend/design_management_legacy/components/design_notes/__snapshots__/design_reply_form_spec.js.snap
diff --git a/spec/frontend/design_management_new/components/design_notes/design_discussion_spec.js b/spec/frontend/design_management_legacy/components/design_notes/design_discussion_spec.js
index 401ce64e859..d20be97f470 100644
--- a/spec/frontend/design_management_new/components/design_notes/design_discussion_spec.js
+++ b/spec/frontend/design_management_legacy/components/design_notes/design_discussion_spec.js
@@ -1,13 +1,13 @@
import { mount } from '@vue/test-utils';
import { GlLoadingIcon } from '@gitlab/ui';
import notes from '../../mock_data/notes';
-import DesignDiscussion from '~/design_management_new/components/design_notes/design_discussion.vue';
-import DesignNote from '~/design_management_new/components/design_notes/design_note.vue';
-import DesignReplyForm from '~/design_management_new/components/design_notes/design_reply_form.vue';
-import createNoteMutation from '~/design_management_new/graphql/mutations/create_note.mutation.graphql';
-import toggleResolveDiscussionMutation from '~/design_management_new/graphql/mutations/toggle_resolve_discussion.mutation.graphql';
+import DesignDiscussion from '~/design_management_legacy/components/design_notes/design_discussion.vue';
+import DesignNote from '~/design_management_legacy/components/design_notes/design_note.vue';
+import DesignReplyForm from '~/design_management_legacy/components/design_notes/design_reply_form.vue';
+import createNoteMutation from '~/design_management_legacy/graphql/mutations/create_note.mutation.graphql';
+import toggleResolveDiscussionMutation from '~/design_management_legacy/graphql/mutations/toggle_resolve_discussion.mutation.graphql';
import ReplyPlaceholder from '~/notes/components/discussion_reply_placeholder.vue';
-import ToggleRepliesWidget from '~/design_management_new/components/design_notes/toggle_replies_widget.vue';
+import ToggleRepliesWidget from '~/design_management_legacy/components/design_notes/toggle_replies_widget.vue';
const discussion = {
id: '0',
@@ -61,10 +61,6 @@ describe('Design discussions component', () => {
...data,
};
},
- provide: {
- projectPath: 'project-path',
- issueIid: '1',
- },
mocks: {
$apollo,
$route: {
diff --git a/spec/frontend/design_management_new/components/design_notes/design_note_spec.js b/spec/frontend/design_management_legacy/components/design_notes/design_note_spec.js
index b0e3e85b9c6..aa187cd1388 100644
--- a/spec/frontend/design_management_new/components/design_notes/design_note_spec.js
+++ b/spec/frontend/design_management_legacy/components/design_notes/design_note_spec.js
@@ -1,9 +1,9 @@
import { shallowMount } from '@vue/test-utils';
import { ApolloMutation } from 'vue-apollo';
-import DesignNote from '~/design_management_new/components/design_notes/design_note.vue';
+import DesignNote from '~/design_management_legacy/components/design_notes/design_note.vue';
import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
-import DesignReplyForm from '~/design_management_new/components/design_notes/design_reply_form.vue';
+import DesignReplyForm from '~/design_management_legacy/components/design_notes/design_reply_form.vue';
const scrollIntoViewMock = jest.fn();
const note = {
diff --git a/spec/frontend/design_management_new/components/design_notes/design_reply_form_spec.js b/spec/frontend/design_management_legacy/components/design_notes/design_reply_form_spec.js
index 9c1d6154516..088a71b64af 100644
--- a/spec/frontend/design_management_new/components/design_notes/design_reply_form_spec.js
+++ b/spec/frontend/design_management_legacy/components/design_notes/design_reply_form_spec.js
@@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
-import DesignReplyForm from '~/design_management_new/components/design_notes/design_reply_form.vue';
+import DesignReplyForm from '~/design_management_legacy/components/design_notes/design_reply_form.vue';
const showModal = jest.fn();
diff --git a/spec/frontend/design_management_new/components/design_notes/toggle_replies_widget_spec.js b/spec/frontend/design_management_legacy/components/design_notes/toggle_replies_widget_spec.js
index d3c89075a24..acc7cbbca52 100644
--- a/spec/frontend/design_management_new/components/design_notes/toggle_replies_widget_spec.js
+++ b/spec/frontend/design_management_legacy/components/design_notes/toggle_replies_widget_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import { GlIcon, GlButton, GlLink } from '@gitlab/ui';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
-import ToggleRepliesWidget from '~/design_management_new/components/design_notes/toggle_replies_widget.vue';
+import ToggleRepliesWidget from '~/design_management_legacy/components/design_notes/toggle_replies_widget.vue';
import notes from '../../mock_data/notes';
describe('Toggle replies widget component', () => {
diff --git a/spec/frontend/design_management_new/components/design_overlay_spec.js b/spec/frontend/design_management_legacy/components/design_overlay_spec.js
index 4ca69c143a8..c014f3479f4 100644
--- a/spec/frontend/design_management_new/components/design_overlay_spec.js
+++ b/spec/frontend/design_management_legacy/components/design_overlay_spec.js
@@ -1,8 +1,8 @@
import { mount } from '@vue/test-utils';
-import DesignOverlay from '~/design_management_new/components/design_overlay.vue';
-import updateActiveDiscussion from '~/design_management_new/graphql/mutations/update_active_discussion.mutation.graphql';
+import DesignOverlay from '~/design_management_legacy/components/design_overlay.vue';
+import updateActiveDiscussion from '~/design_management_legacy/graphql/mutations/update_active_discussion.mutation.graphql';
import notes from '../mock_data/notes';
-import { ACTIVE_DISCUSSION_SOURCE_TYPES } from '~/design_management_new/constants';
+import { ACTIVE_DISCUSSION_SOURCE_TYPES } from '~/design_management_legacy/constants';
const mutate = jest.fn(() => Promise.resolve());
diff --git a/spec/frontend/design_management_new/components/design_presentation_spec.js b/spec/frontend/design_management_legacy/components/design_presentation_spec.js
index d043a762cd2..ceff86b0549 100644
--- a/spec/frontend/design_management_new/components/design_presentation_spec.js
+++ b/spec/frontend/design_management_legacy/components/design_presentation_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import DesignPresentation from '~/design_management_new/components/design_presentation.vue';
-import DesignOverlay from '~/design_management_new/components/design_overlay.vue';
+import DesignPresentation from '~/design_management_legacy/components/design_presentation.vue';
+import DesignOverlay from '~/design_management_legacy/components/design_overlay.vue';
const mockOverlayData = {
overlayDimensions: {
diff --git a/spec/frontend/design_management_new/components/design_scaler_spec.js b/spec/frontend/design_management_legacy/components/design_scaler_spec.js
index 5ff2554cd60..30ef5ab159b 100644
--- a/spec/frontend/design_management_new/components/design_scaler_spec.js
+++ b/spec/frontend/design_management_legacy/components/design_scaler_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import DesignScaler from '~/design_management_new/components/design_scaler.vue';
+import DesignScaler from '~/design_management_legacy/components/design_scaler.vue';
describe('Design management design scaler component', () => {
let wrapper;
diff --git a/spec/frontend/design_management_new/components/design_sidebar_spec.js b/spec/frontend/design_management_legacy/components/design_sidebar_spec.js
index f1d442a7b21..fc0f618c359 100644
--- a/spec/frontend/design_management_new/components/design_sidebar_spec.js
+++ b/spec/frontend/design_management_legacy/components/design_sidebar_spec.js
@@ -1,11 +1,11 @@
import { shallowMount } from '@vue/test-utils';
import { GlCollapse, GlPopover } from '@gitlab/ui';
import Cookies from 'js-cookie';
-import DesignSidebar from '~/design_management_new/components/design_sidebar.vue';
+import DesignSidebar from '~/design_management_legacy/components/design_sidebar.vue';
import Participants from '~/sidebar/components/participants/participants.vue';
-import DesignDiscussion from '~/design_management_new/components/design_notes/design_discussion.vue';
+import DesignDiscussion from '~/design_management_legacy/components/design_notes/design_discussion.vue';
import design from '../mock_data/design';
-import updateActiveDiscussionMutation from '~/design_management_new/graphql/mutations/update_active_discussion.mutation.graphql';
+import updateActiveDiscussionMutation from '~/design_management_legacy/graphql/mutations/update_active_discussion.mutation.graphql';
const updateActiveDiscussionMutationVariables = {
mutation: updateActiveDiscussionMutation,
diff --git a/spec/frontend/design_management_new/components/image_spec.js b/spec/frontend/design_management_legacy/components/image_spec.js
index c1a8a8767df..265c91abb4e 100644
--- a/spec/frontend/design_management_new/components/image_spec.js
+++ b/spec/frontend/design_management_legacy/components/image_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import { GlIcon } from '@gitlab/ui';
-import DesignImage from '~/design_management_new/components/image.vue';
+import DesignImage from '~/design_management_legacy/components/image.vue';
describe('Design management large image component', () => {
let wrapper;
diff --git a/spec/frontend/design_management_legacy/components/list/__snapshots__/item_spec.js.snap b/spec/frontend/design_management_legacy/components/list/__snapshots__/item_spec.js.snap
new file mode 100644
index 00000000000..168b9424006
--- /dev/null
+++ b/spec/frontend/design_management_legacy/components/list/__snapshots__/item_spec.js.snap
@@ -0,0 +1,149 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Design management list item component when item appears in view after image is loaded renders media broken icon when image onerror triggered 1`] = `
+<gl-icon-stub
+ class="text-secondary"
+ name="media-broken"
+ size="32"
+/>
+`;
+
+exports[`Design management list item component with notes renders item with multiple comments 1`] = `
+<router-link-stub
+ class="card cursor-pointer text-plain js-design-list-item design-list-item"
+ to="[object Object]"
+>
+ <div
+ class="card-body p-0 d-flex-center overflow-hidden position-relative"
+ >
+ <!---->
+
+ <gl-intersection-observer-stub>
+ <!---->
+
+ <img
+ alt="test"
+ class="block mx-auto mw-100 mh-100 design-img"
+ data-qa-selector="design_image"
+ src=""
+ />
+ </gl-intersection-observer-stub>
+ </div>
+
+ <div
+ class="card-footer d-flex w-100"
+ >
+ <div
+ class="d-flex flex-column str-truncated-100"
+ >
+ <span
+ class="bold str-truncated-100"
+ data-qa-selector="design_file_name"
+ >
+ test
+ </span>
+
+ <span
+ class="str-truncated-100"
+ >
+
+ Updated
+ <timeago-stub
+ cssclass=""
+ time="01-01-2019"
+ tooltipplacement="bottom"
+ />
+ </span>
+ </div>
+
+ <div
+ class="ml-auto d-flex align-items-center text-secondary"
+ >
+ <icon-stub
+ class="ml-1"
+ name="comments"
+ size="16"
+ />
+
+ <span
+ aria-label="2 comments"
+ class="ml-1"
+ >
+
+ 2
+
+ </span>
+ </div>
+ </div>
+</router-link-stub>
+`;
+
+exports[`Design management list item component with notes renders item with single comment 1`] = `
+<router-link-stub
+ class="card cursor-pointer text-plain js-design-list-item design-list-item"
+ to="[object Object]"
+>
+ <div
+ class="card-body p-0 d-flex-center overflow-hidden position-relative"
+ >
+ <!---->
+
+ <gl-intersection-observer-stub>
+ <!---->
+
+ <img
+ alt="test"
+ class="block mx-auto mw-100 mh-100 design-img"
+ data-qa-selector="design_image"
+ src=""
+ />
+ </gl-intersection-observer-stub>
+ </div>
+
+ <div
+ class="card-footer d-flex w-100"
+ >
+ <div
+ class="d-flex flex-column str-truncated-100"
+ >
+ <span
+ class="bold str-truncated-100"
+ data-qa-selector="design_file_name"
+ >
+ test
+ </span>
+
+ <span
+ class="str-truncated-100"
+ >
+
+ Updated
+ <timeago-stub
+ cssclass=""
+ time="01-01-2019"
+ tooltipplacement="bottom"
+ />
+ </span>
+ </div>
+
+ <div
+ class="ml-auto d-flex align-items-center text-secondary"
+ >
+ <icon-stub
+ class="ml-1"
+ name="comments"
+ size="16"
+ />
+
+ <span
+ aria-label="1 comment"
+ class="ml-1"
+ >
+
+ 1
+
+ </span>
+ </div>
+ </div>
+</router-link-stub>
+`;
diff --git a/spec/frontend/design_management_new/components/list/item_spec.js b/spec/frontend/design_management_legacy/components/list/item_spec.js
index 5e3e6832acb..e9bb0fc3f29 100644
--- a/spec/frontend/design_management_new/components/list/item_spec.js
+++ b/spec/frontend/design_management_legacy/components/list/item_spec.js
@@ -1,7 +1,8 @@
import { createLocalVue, shallowMount } from '@vue/test-utils';
import { GlIcon, GlLoadingIcon, GlIntersectionObserver } from '@gitlab/ui';
import VueRouter from 'vue-router';
-import Item from '~/design_management_new/components/list/item.vue';
+import Icon from '~/vue_shared/components/icon.vue';
+import Item from '~/design_management_legacy/components/list/item.vue';
const localVue = createLocalVue();
localVue.use(VueRouter);
@@ -18,6 +19,10 @@ const DESIGN_VERSION_EVENT = {
describe('Design management list item component', () => {
let wrapper;
+ const findDesignEvent = () => wrapper.find('[data-testid="designEvent"]');
+ const findEventIcon = () => findDesignEvent().find(Icon);
+ const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
+
function createComponent({
notesCount = 0,
event = DESIGN_VERSION_EVENT.NO_CHANGE,
@@ -134,35 +139,31 @@ describe('Design management list item component', () => {
});
});
- describe('with no notes', () => {
- it('renders item with no status icon for none event', () => {
- createComponent();
-
- expect(wrapper.element).toMatchSnapshot();
- });
-
- it('renders item with correct status icon for modification event', () => {
- createComponent({ event: DESIGN_VERSION_EVENT.MODIFICATION });
-
- expect(wrapper.element).toMatchSnapshot();
- });
-
- it('renders item with correct status icon for deletion event', () => {
- createComponent({ event: DESIGN_VERSION_EVENT.DELETION });
+ it('renders loading spinner when isUploading is true', () => {
+ createComponent({ isUploading: true });
- expect(wrapper.element).toMatchSnapshot();
- });
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
- it('renders item with correct status icon for creation event', () => {
- createComponent({ event: DESIGN_VERSION_EVENT.CREATION });
+ it('renders item with no status icon for none event', () => {
+ createComponent();
- expect(wrapper.element).toMatchSnapshot();
- });
-
- it('renders loading spinner when isUploading is true', () => {
- createComponent({ isUploading: true });
+ expect(findDesignEvent().exists()).toBe(false);
+ });
- expect(wrapper.element).toMatchSnapshot();
+ describe('with associated event', () => {
+ it.each`
+ event | icon | className
+ ${DESIGN_VERSION_EVENT.MODIFICATION} | ${'file-modified-solid'} | ${'text-primary-500'}
+ ${DESIGN_VERSION_EVENT.DELETION} | ${'file-deletion-solid'} | ${'text-danger-500'}
+ ${DESIGN_VERSION_EVENT.CREATION} | ${'file-addition-solid'} | ${'text-success-500'}
+ `('renders item with correct status icon for $event event', ({ event, icon, className }) => {
+ createComponent({ event });
+ const eventIcon = findEventIcon();
+
+ expect(eventIcon.exists()).toBe(true);
+ expect(eventIcon.props('name')).toBe(icon);
+ expect(eventIcon.classes()).toContain(className);
});
});
});
diff --git a/spec/frontend/design_management_new/components/toolbar/__snapshots__/index_spec.js.snap b/spec/frontend/design_management_legacy/components/toolbar/__snapshots__/index_spec.js.snap
index f251171ecda..e55cff8de3d 100644
--- a/spec/frontend/design_management_new/components/toolbar/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management_legacy/components/toolbar/__snapshots__/index_spec.js.snap
@@ -7,7 +7,6 @@ exports[`Design management toolbar component renders design and updated data 1`]
<a
aria-label="Go back to designs"
class="mr-3 text-plain d-flex justify-content-center align-items-center"
- data-testid="close-design"
>
<icon-stub
name="close"
@@ -50,7 +49,6 @@ exports[`Design management toolbar component renders design and updated data 1`]
<delete-button-stub
buttonclass=""
- buttonsize="medium"
buttonvariant="danger"
hasselecteddesigns="true"
>
diff --git a/spec/frontend/design_management/components/toolbar/__snapshots__/pagination_button_spec.js.snap b/spec/frontend/design_management_legacy/components/toolbar/__snapshots__/pagination_button_spec.js.snap
index 08662a04f15..08662a04f15 100644
--- a/spec/frontend/design_management/components/toolbar/__snapshots__/pagination_button_spec.js.snap
+++ b/spec/frontend/design_management_legacy/components/toolbar/__snapshots__/pagination_button_spec.js.snap
diff --git a/spec/frontend/design_management/components/toolbar/__snapshots__/pagination_spec.js.snap b/spec/frontend/design_management_legacy/components/toolbar/__snapshots__/pagination_spec.js.snap
index 0197b4bff79..0197b4bff79 100644
--- a/spec/frontend/design_management/components/toolbar/__snapshots__/pagination_spec.js.snap
+++ b/spec/frontend/design_management_legacy/components/toolbar/__snapshots__/pagination_spec.js.snap
diff --git a/spec/frontend/design_management_new/components/toolbar/index_spec.js b/spec/frontend/design_management_legacy/components/toolbar/index_spec.js
index eb5ae15ed58..8207cad4136 100644
--- a/spec/frontend/design_management_new/components/toolbar/index_spec.js
+++ b/spec/frontend/design_management_legacy/components/toolbar/index_spec.js
@@ -1,9 +1,9 @@
import { createLocalVue, shallowMount } from '@vue/test-utils';
import VueRouter from 'vue-router';
-import Toolbar from '~/design_management_new/components/toolbar/index.vue';
-import DeleteButton from '~/design_management_new/components/delete_button.vue';
-import { DESIGNS_ROUTE_NAME } from '~/design_management_new/router/constants';
import { GlDeprecatedButton } from '@gitlab/ui';
+import Toolbar from '~/design_management_legacy/components/toolbar/index.vue';
+import DeleteButton from '~/design_management_legacy/components/delete_button.vue';
+import { DESIGNS_ROUTE_NAME } from '~/design_management_legacy/router/constants';
const localVue = createLocalVue();
localVue.use(VueRouter);
diff --git a/spec/frontend/design_management_new/components/toolbar/pagination_button_spec.js b/spec/frontend/design_management_legacy/components/toolbar/pagination_button_spec.js
index 5f33d65fc1f..d2153adca45 100644
--- a/spec/frontend/design_management_new/components/toolbar/pagination_button_spec.js
+++ b/spec/frontend/design_management_legacy/components/toolbar/pagination_button_spec.js
@@ -1,7 +1,7 @@
import { createLocalVue, shallowMount } from '@vue/test-utils';
import VueRouter from 'vue-router';
-import PaginationButton from '~/design_management_new/components/toolbar/pagination_button.vue';
-import { DESIGN_ROUTE_NAME } from '~/design_management_new/router/constants';
+import PaginationButton from '~/design_management_legacy/components/toolbar/pagination_button.vue';
+import { DESIGN_ROUTE_NAME } from '~/design_management_legacy/router/constants';
const localVue = createLocalVue();
localVue.use(VueRouter);
diff --git a/spec/frontend/design_management/components/toolbar/pagination_spec.js b/spec/frontend/design_management_legacy/components/toolbar/pagination_spec.js
index db5a36dadf6..21b55113a6e 100644
--- a/spec/frontend/design_management/components/toolbar/pagination_spec.js
+++ b/spec/frontend/design_management_legacy/components/toolbar/pagination_spec.js
@@ -1,8 +1,8 @@
/* global Mousetrap */
import 'mousetrap';
import { shallowMount } from '@vue/test-utils';
-import Pagination from '~/design_management/components/toolbar/pagination.vue';
-import { DESIGN_ROUTE_NAME } from '~/design_management/router/constants';
+import Pagination from '~/design_management_legacy/components/toolbar/pagination.vue';
+import { DESIGN_ROUTE_NAME } from '~/design_management_legacy/router/constants';
const push = jest.fn();
const $router = {
diff --git a/spec/frontend/design_management_new/components/upload/__snapshots__/button_spec.js.snap b/spec/frontend/design_management_legacy/components/upload/__snapshots__/button_spec.js.snap
index b498becc606..27c0ba589e6 100644
--- a/spec/frontend/design_management_new/components/upload/__snapshots__/button_spec.js.snap
+++ b/spec/frontend/design_management_legacy/components/upload/__snapshots__/button_spec.js.snap
@@ -4,10 +4,8 @@ exports[`Design management upload button component renders inverted upload desig
<div
isinverted="true"
>
- <gl-button-stub
- category="tertiary"
- icon=""
- size="small"
+ <gl-deprecated-button-stub
+ size="md"
title="Adding a design with the same filename replaces the file in a new version."
variant="success"
>
@@ -15,7 +13,7 @@ exports[`Design management upload button component renders inverted upload desig
Upload designs
<!---->
- </gl-button-stub>
+ </gl-deprecated-button-stub>
<input
accept="image/*"
@@ -29,11 +27,9 @@ exports[`Design management upload button component renders inverted upload desig
exports[`Design management upload button component renders loading icon 1`] = `
<div>
- <gl-button-stub
- category="tertiary"
+ <gl-deprecated-button-stub
disabled="true"
- icon=""
- size="small"
+ size="md"
title="Adding a design with the same filename replaces the file in a new version."
variant="success"
>
@@ -47,7 +43,7 @@ exports[`Design management upload button component renders loading icon 1`] = `
label="Loading"
size="sm"
/>
- </gl-button-stub>
+ </gl-deprecated-button-stub>
<input
accept="image/*"
@@ -61,10 +57,8 @@ exports[`Design management upload button component renders loading icon 1`] = `
exports[`Design management upload button component renders upload design button 1`] = `
<div>
- <gl-button-stub
- category="tertiary"
- icon=""
- size="small"
+ <gl-deprecated-button-stub
+ size="md"
title="Adding a design with the same filename replaces the file in a new version."
variant="success"
>
@@ -72,7 +66,7 @@ exports[`Design management upload button component renders upload design button
Upload designs
<!---->
- </gl-button-stub>
+ </gl-deprecated-button-stub>
<input
accept="image/*"
diff --git a/spec/frontend/design_management_new/components/upload/__snapshots__/design_dropzone_spec.js.snap b/spec/frontend/design_management_legacy/components/upload/__snapshots__/design_dropzone_spec.js.snap
index c53c6c889b0..0737b9729a2 100644
--- a/spec/frontend/design_management_new/components/upload/__snapshots__/design_dropzone_spec.js.snap
+++ b/spec/frontend/design_management_legacy/components/upload/__snapshots__/design_dropzone_spec.js.snap
@@ -5,23 +5,20 @@ exports[`Design management dropzone component when dragging renders correct temp
class="w-100 position-relative"
>
<button
- class="card design-dropzone-card design-dropzone-border w-100 h-100 gl-align-items-center gl-justify-content-center gl-p-3"
+ class="card design-dropzone-card design-dropzone-border w-100 h-100 d-flex-center p-3"
>
<div
- class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
- data-testid="dropzone-area"
+ class="d-flex-center flex-column text-center"
>
<gl-icon-stub
- class="gl-mb-2"
- name="upload"
- size="24"
+ class="mb-4"
+ name="doc-new"
+ size="48"
/>
- <p
- class="gl-font-weight-bold gl-mb-0"
- >
+ <p>
<gl-sprintf-stub
- message="Drop or %{linkStart}upload%{linkEnd} Designs to attach"
+ message="%{lineOneStart}Drag and drop to upload your designs%{lineOneEnd} or %{linkStart}click to upload%{linkEnd}."
/>
</p>
</div>
@@ -46,9 +43,7 @@ exports[`Design management dropzone component when dragging renders correct temp
class="mw-50 text-center"
style="display: none;"
>
- <h3
- class=""
- >
+ <h3>
Oh no!
</h3>
@@ -61,9 +56,7 @@ exports[`Design management dropzone component when dragging renders correct temp
class="mw-50 text-center"
style=""
>
- <h3
- class=""
- >
+ <h3>
Incoming!
</h3>
@@ -81,23 +74,20 @@ exports[`Design management dropzone component when dragging renders correct temp
class="w-100 position-relative"
>
<button
- class="card design-dropzone-card design-dropzone-border w-100 h-100 gl-align-items-center gl-justify-content-center gl-p-3"
+ class="card design-dropzone-card design-dropzone-border w-100 h-100 d-flex-center p-3"
>
<div
- class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
- data-testid="dropzone-area"
+ class="d-flex-center flex-column text-center"
>
<gl-icon-stub
- class="gl-mb-2"
- name="upload"
- size="24"
+ class="mb-4"
+ name="doc-new"
+ size="48"
/>
- <p
- class="gl-font-weight-bold gl-mb-0"
- >
+ <p>
<gl-sprintf-stub
- message="Drop or %{linkStart}upload%{linkEnd} Designs to attach"
+ message="%{lineOneStart}Drag and drop to upload your designs%{lineOneEnd} or %{linkStart}click to upload%{linkEnd}."
/>
</p>
</div>
@@ -122,9 +112,7 @@ exports[`Design management dropzone component when dragging renders correct temp
class="mw-50 text-center"
style="display: none;"
>
- <h3
- class=""
- >
+ <h3>
Oh no!
</h3>
@@ -137,9 +125,7 @@ exports[`Design management dropzone component when dragging renders correct temp
class="mw-50 text-center"
style=""
>
- <h3
- class=""
- >
+ <h3>
Incoming!
</h3>
@@ -157,23 +143,20 @@ exports[`Design management dropzone component when dragging renders correct temp
class="w-100 position-relative"
>
<button
- class="card design-dropzone-card design-dropzone-border w-100 h-100 gl-align-items-center gl-justify-content-center gl-p-3"
+ class="card design-dropzone-card design-dropzone-border w-100 h-100 d-flex-center p-3"
>
<div
- class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
- data-testid="dropzone-area"
+ class="d-flex-center flex-column text-center"
>
<gl-icon-stub
- class="gl-mb-2"
- name="upload"
- size="24"
+ class="mb-4"
+ name="doc-new"
+ size="48"
/>
- <p
- class="gl-font-weight-bold gl-mb-0"
- >
+ <p>
<gl-sprintf-stub
- message="Drop or %{linkStart}upload%{linkEnd} Designs to attach"
+ message="%{lineOneStart}Drag and drop to upload your designs%{lineOneEnd} or %{linkStart}click to upload%{linkEnd}."
/>
</p>
</div>
@@ -197,9 +180,7 @@ exports[`Design management dropzone component when dragging renders correct temp
<div
class="mw-50 text-center"
>
- <h3
- class=""
- >
+ <h3>
Oh no!
</h3>
@@ -212,9 +193,7 @@ exports[`Design management dropzone component when dragging renders correct temp
class="mw-50 text-center"
style="display: none;"
>
- <h3
- class=""
- >
+ <h3>
Incoming!
</h3>
@@ -232,23 +211,20 @@ exports[`Design management dropzone component when dragging renders correct temp
class="w-100 position-relative"
>
<button
- class="card design-dropzone-card design-dropzone-border w-100 h-100 gl-align-items-center gl-justify-content-center gl-p-3"
+ class="card design-dropzone-card design-dropzone-border w-100 h-100 d-flex-center p-3"
>
<div
- class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
- data-testid="dropzone-area"
+ class="d-flex-center flex-column text-center"
>
<gl-icon-stub
- class="gl-mb-2"
- name="upload"
- size="24"
+ class="mb-4"
+ name="doc-new"
+ size="48"
/>
- <p
- class="gl-font-weight-bold gl-mb-0"
- >
+ <p>
<gl-sprintf-stub
- message="Drop or %{linkStart}upload%{linkEnd} Designs to attach"
+ message="%{lineOneStart}Drag and drop to upload your designs%{lineOneEnd} or %{linkStart}click to upload%{linkEnd}."
/>
</p>
</div>
@@ -272,9 +248,7 @@ exports[`Design management dropzone component when dragging renders correct temp
<div
class="mw-50 text-center"
>
- <h3
- class=""
- >
+ <h3>
Oh no!
</h3>
@@ -287,9 +261,7 @@ exports[`Design management dropzone component when dragging renders correct temp
class="mw-50 text-center"
style="display: none;"
>
- <h3
- class=""
- >
+ <h3>
Incoming!
</h3>
@@ -307,23 +279,20 @@ exports[`Design management dropzone component when dragging renders correct temp
class="w-100 position-relative"
>
<button
- class="card design-dropzone-card design-dropzone-border w-100 h-100 gl-align-items-center gl-justify-content-center gl-p-3"
+ class="card design-dropzone-card design-dropzone-border w-100 h-100 d-flex-center p-3"
>
<div
- class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
- data-testid="dropzone-area"
+ class="d-flex-center flex-column text-center"
>
<gl-icon-stub
- class="gl-mb-2"
- name="upload"
- size="24"
+ class="mb-4"
+ name="doc-new"
+ size="48"
/>
- <p
- class="gl-font-weight-bold gl-mb-0"
- >
+ <p>
<gl-sprintf-stub
- message="Drop or %{linkStart}upload%{linkEnd} Designs to attach"
+ message="%{lineOneStart}Drag and drop to upload your designs%{lineOneEnd} or %{linkStart}click to upload%{linkEnd}."
/>
</p>
</div>
@@ -347,9 +316,7 @@ exports[`Design management dropzone component when dragging renders correct temp
<div
class="mw-50 text-center"
>
- <h3
- class=""
- >
+ <h3>
Oh no!
</h3>
@@ -362,9 +329,7 @@ exports[`Design management dropzone component when dragging renders correct temp
class="mw-50 text-center"
style="display: none;"
>
- <h3
- class=""
- >
+ <h3>
Incoming!
</h3>
@@ -382,23 +347,20 @@ exports[`Design management dropzone component when no slot provided renders defa
class="w-100 position-relative"
>
<button
- class="card design-dropzone-card design-dropzone-border w-100 h-100 gl-align-items-center gl-justify-content-center gl-p-3"
+ class="card design-dropzone-card design-dropzone-border w-100 h-100 d-flex-center p-3"
>
<div
- class="gl-display-flex gl-align-items-center gl-justify-content-center gl-text-center gl-flex-direction-column"
- data-testid="dropzone-area"
+ class="d-flex-center flex-column text-center"
>
<gl-icon-stub
- class="gl-mb-2"
- name="upload"
- size="24"
+ class="mb-4"
+ name="doc-new"
+ size="48"
/>
- <p
- class="gl-font-weight-bold gl-mb-0"
- >
+ <p>
<gl-sprintf-stub
- message="Drop or %{linkStart}upload%{linkEnd} Designs to attach"
+ message="%{lineOneStart}Drag and drop to upload your designs%{lineOneEnd} or %{linkStart}click to upload%{linkEnd}."
/>
</p>
</div>
@@ -422,9 +384,7 @@ exports[`Design management dropzone component when no slot provided renders defa
<div
class="mw-50 text-center"
>
- <h3
- class=""
- >
+ <h3>
Oh no!
</h3>
@@ -437,9 +397,7 @@ exports[`Design management dropzone component when no slot provided renders defa
class="mw-50 text-center"
style="display: none;"
>
- <h3
- class=""
- >
+ <h3>
Incoming!
</h3>
@@ -470,9 +428,7 @@ exports[`Design management dropzone component when slot provided renders dropzon
<div
class="mw-50 text-center"
>
- <h3
- class=""
- >
+ <h3>
Oh no!
</h3>
@@ -485,9 +441,7 @@ exports[`Design management dropzone component when slot provided renders dropzon
class="mw-50 text-center"
style="display: none;"
>
- <h3
- class=""
- >
+ <h3>
Incoming!
</h3>
diff --git a/spec/frontend/design_management_new/components/upload/__snapshots__/design_version_dropdown_spec.js.snap b/spec/frontend/design_management_legacy/components/upload/__snapshots__/design_version_dropdown_spec.js.snap
index 0d16acdef54..d34b925f33d 100644
--- a/spec/frontend/design_management_new/components/upload/__snapshots__/design_version_dropdown_spec.js.snap
+++ b/spec/frontend/design_management_legacy/components/upload/__snapshots__/design_version_dropdown_spec.js.snap
@@ -1,23 +1,14 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Design management design version dropdown component renders design version dropdown button 1`] = `
-<gl-new-dropdown-stub
- category="tertiary"
+<gl-deprecated-dropdown-stub
class="design-version-dropdown"
- headertext=""
issueiid=""
projectpath=""
- size="small"
text="Showing Latest Version"
- variant="default"
+ variant="link"
>
- <gl-new-dropdown-item-stub
- avatarurl=""
- iconcolor=""
- iconname=""
- iconrightname=""
- secondarytext=""
- >
+ <gl-deprecated-dropdown-item-stub>
<router-link-stub
class="d-flex js-version-link"
to="[object Object]"
@@ -37,17 +28,11 @@ exports[`Design management design version dropdown component renders design vers
</div>
<i
- class="fa fa-check pull-right"
+ class="fa fa-check float-right gl-mr-2"
/>
</router-link-stub>
- </gl-new-dropdown-item-stub>
- <gl-new-dropdown-item-stub
- avatarurl=""
- iconcolor=""
- iconname=""
- iconrightname=""
- secondarytext=""
- >
+ </gl-deprecated-dropdown-item-stub>
+ <gl-deprecated-dropdown-item-stub>
<router-link-stub
class="d-flex js-version-link"
to="[object Object]"
@@ -66,28 +51,19 @@ exports[`Design management design version dropdown component renders design vers
<!---->
</router-link-stub>
- </gl-new-dropdown-item-stub>
-</gl-new-dropdown-stub>
+ </gl-deprecated-dropdown-item-stub>
+</gl-deprecated-dropdown-stub>
`;
exports[`Design management design version dropdown component renders design version list 1`] = `
-<gl-new-dropdown-stub
- category="tertiary"
+<gl-deprecated-dropdown-stub
class="design-version-dropdown"
- headertext=""
issueiid=""
projectpath=""
- size="small"
text="Showing Latest Version"
- variant="default"
+ variant="link"
>
- <gl-new-dropdown-item-stub
- avatarurl=""
- iconcolor=""
- iconname=""
- iconrightname=""
- secondarytext=""
- >
+ <gl-deprecated-dropdown-item-stub>
<router-link-stub
class="d-flex js-version-link"
to="[object Object]"
@@ -107,17 +83,11 @@ exports[`Design management design version dropdown component renders design vers
</div>
<i
- class="fa fa-check pull-right"
+ class="fa fa-check float-right gl-mr-2"
/>
</router-link-stub>
- </gl-new-dropdown-item-stub>
- <gl-new-dropdown-item-stub
- avatarurl=""
- iconcolor=""
- iconname=""
- iconrightname=""
- secondarytext=""
- >
+ </gl-deprecated-dropdown-item-stub>
+ <gl-deprecated-dropdown-item-stub>
<router-link-stub
class="d-flex js-version-link"
to="[object Object]"
@@ -136,6 +106,6 @@ exports[`Design management design version dropdown component renders design vers
<!---->
</router-link-stub>
- </gl-new-dropdown-item-stub>
-</gl-new-dropdown-stub>
+ </gl-deprecated-dropdown-item-stub>
+</gl-deprecated-dropdown-stub>
`;
diff --git a/spec/frontend/design_management_new/components/upload/button_spec.js b/spec/frontend/design_management_legacy/components/upload/button_spec.js
index 7f751982491..dde5c694194 100644
--- a/spec/frontend/design_management_new/components/upload/button_spec.js
+++ b/spec/frontend/design_management_legacy/components/upload/button_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import UploadButton from '~/design_management_new/components/upload/button.vue';
+import UploadButton from '~/design_management_legacy/components/upload/button.vue';
describe('Design management upload button component', () => {
let wrapper;
diff --git a/spec/frontend/design_management_new/components/upload/design_dropzone_spec.js b/spec/frontend/design_management_legacy/components/upload/design_dropzone_spec.js
index c48cbb10172..1907a3124a6 100644
--- a/spec/frontend/design_management_new/components/upload/design_dropzone_spec.js
+++ b/spec/frontend/design_management_legacy/components/upload/design_dropzone_spec.js
@@ -1,7 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import DesignDropzone from '~/design_management_new/components/upload/design_dropzone.vue';
-import createFlash from '~/flash';
-import { GlIcon } from '@gitlab/ui';
+import DesignDropzone from '~/design_management_legacy/components/upload/design_dropzone.vue';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
jest.mock('~/flash');
@@ -13,16 +12,10 @@ describe('Design management dropzone component', () => {
};
const findDropzoneCard = () => wrapper.find('.design-dropzone-card');
- const findDropzoneArea = () => wrapper.find('[data-testid="dropzone-area"]');
- const findIcon = () => wrapper.find(GlIcon);
- function createComponent({ slots = {}, data = {}, props = {} } = {}) {
+ function createComponent({ slots = {}, data = {} } = {}) {
wrapper = shallowMount(DesignDropzone, {
slots,
- propsData: {
- hasDesigns: true,
- ...props,
- },
data() {
return data;
},
@@ -136,16 +129,4 @@ describe('Design management dropzone component', () => {
});
});
});
-
- it('applies correct classes when there are no designs or no design saving loader', () => {
- createComponent({ props: { hasDesigns: false } });
- expect(findDropzoneArea().classes()).not.toContain('gl-flex-direction-column');
- expect(findIcon().classes()).toEqual(['gl-mr-4']);
- });
-
- it('applies correct classes when there are designs or design saving loader', () => {
- createComponent({ props: { hasDesigns: true } });
- expect(findDropzoneArea().classes()).toContain('gl-flex-direction-column');
- expect(findIcon().classes()).toEqual(['gl-mb-2']);
- });
});
diff --git a/spec/frontend/design_management_new/components/upload/design_version_dropdown_spec.js b/spec/frontend/design_management_legacy/components/upload/design_version_dropdown_spec.js
index 74e7f3f88fc..7fb85f357c7 100644
--- a/spec/frontend/design_management_new/components/upload/design_version_dropdown_spec.js
+++ b/spec/frontend/design_management_legacy/components/upload/design_version_dropdown_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import DesignVersionDropdown from '~/design_management_new/components/upload/design_version_dropdown.vue';
-import { GlNewDropdown, GlNewDropdownItem } from '@gitlab/ui';
+import { GlDeprecatedDropdown, GlDeprecatedDropdownItem } from '@gitlab/ui';
+import DesignVersionDropdown from '~/design_management_legacy/components/upload/design_version_dropdown.vue';
import mockAllVersions from './mock_data/all_versions';
const LATEST_VERSION_ID = 3;
@@ -75,7 +75,9 @@ describe('Design management design version dropdown component', () => {
createComponent();
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find(GlNewDropdown).attributes('text')).toBe('Showing Latest Version');
+ expect(wrapper.find(GlDeprecatedDropdown).attributes('text')).toBe(
+ 'Showing Latest Version',
+ );
});
});
@@ -83,7 +85,9 @@ describe('Design management design version dropdown component', () => {
createComponent({ maxVersions: 1 });
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find(GlNewDropdown).attributes('text')).toBe('Showing Latest Version');
+ expect(wrapper.find(GlDeprecatedDropdown).attributes('text')).toBe(
+ 'Showing Latest Version',
+ );
});
});
@@ -91,7 +95,7 @@ describe('Design management design version dropdown component', () => {
createComponent({ $route: designRouteFactory(PREVIOUS_VERSION_ID) });
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find(GlNewDropdown).attributes('text')).toBe(`Showing Version #1`);
+ expect(wrapper.find(GlDeprecatedDropdown).attributes('text')).toBe(`Showing Version #1`);
});
});
@@ -99,7 +103,9 @@ describe('Design management design version dropdown component', () => {
createComponent({ $route: designRouteFactory(LATEST_VERSION_ID) });
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find(GlNewDropdown).attributes('text')).toBe('Showing Latest Version');
+ expect(wrapper.find(GlDeprecatedDropdown).attributes('text')).toBe(
+ 'Showing Latest Version',
+ );
});
});
@@ -107,7 +113,9 @@ describe('Design management design version dropdown component', () => {
createComponent();
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.findAll(GlNewDropdownItem)).toHaveLength(wrapper.vm.allVersions.length);
+ expect(wrapper.findAll(GlDeprecatedDropdownItem)).toHaveLength(
+ wrapper.vm.allVersions.length,
+ );
});
});
});
diff --git a/spec/frontend/design_management_new/components/upload/mock_data/all_versions.js b/spec/frontend/design_management_legacy/components/upload/mock_data/all_versions.js
index e76bbd261bd..e76bbd261bd 100644
--- a/spec/frontend/design_management_new/components/upload/mock_data/all_versions.js
+++ b/spec/frontend/design_management_legacy/components/upload/mock_data/all_versions.js
diff --git a/spec/frontend/design_management_new/mock_data/all_versions.js b/spec/frontend/design_management_legacy/mock_data/all_versions.js
index c389fdb8747..c389fdb8747 100644
--- a/spec/frontend/design_management_new/mock_data/all_versions.js
+++ b/spec/frontend/design_management_legacy/mock_data/all_versions.js
diff --git a/spec/frontend/design_management_new/mock_data/design.js b/spec/frontend/design_management_legacy/mock_data/design.js
index 675198b9408..675198b9408 100644
--- a/spec/frontend/design_management_new/mock_data/design.js
+++ b/spec/frontend/design_management_legacy/mock_data/design.js
diff --git a/spec/frontend/design_management_new/mock_data/designs.js b/spec/frontend/design_management_legacy/mock_data/designs.js
index 07f5c1b7457..07f5c1b7457 100644
--- a/spec/frontend/design_management_new/mock_data/designs.js
+++ b/spec/frontend/design_management_legacy/mock_data/designs.js
diff --git a/spec/frontend/design_management_new/mock_data/no_designs.js b/spec/frontend/design_management_legacy/mock_data/no_designs.js
index 9db0ffcade2..9db0ffcade2 100644
--- a/spec/frontend/design_management_new/mock_data/no_designs.js
+++ b/spec/frontend/design_management_legacy/mock_data/no_designs.js
diff --git a/spec/frontend/design_management_new/mock_data/notes.js b/spec/frontend/design_management_legacy/mock_data/notes.js
index 80cb3944786..80cb3944786 100644
--- a/spec/frontend/design_management_new/mock_data/notes.js
+++ b/spec/frontend/design_management_legacy/mock_data/notes.js
diff --git a/spec/frontend/design_management_new/pages/__snapshots__/index_spec.js.snap b/spec/frontend/design_management_legacy/pages/__snapshots__/index_spec.js.snap
index 902803b0ad1..3ba63fd14f0 100644
--- a/spec/frontend/design_management_new/pages/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management_legacy/pages/__snapshots__/index_spec.js.snap
@@ -1,10 +1,7 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Design management index page designs does not render toolbar when there is no permission 1`] = `
-<div
- class="gl-mt-5 designs-root"
- data-testid="designs-root"
->
+<div>
<!---->
<div
@@ -13,24 +10,18 @@ exports[`Design management index page designs does not render toolbar when there
<ol
class="list-unstyled row"
>
- <!---->
-
<li
- class="gl-flex-direction-column col-md-6 col-lg-3 gl-mb-3"
- data-testid="design-dropzone-wrapper"
+ class="col-md-6 col-lg-4 mb-3"
>
<design-dropzone-stub
- class="design-list-item design-list-item-new"
- hasdesigns="true"
+ class="design-list-item"
/>
</li>
<li
- class="col-md-6 col-lg-3 gl-mb-3"
+ class="col-md-6 col-lg-4 mb-3"
>
- <design-dropzone-stub
- hasdesigns="true"
- >
+ <design-dropzone-stub>
<design-stub
event="NONE"
filename="design-1-name"
@@ -43,11 +34,9 @@ exports[`Design management index page designs does not render toolbar when there
<!---->
</li>
<li
- class="col-md-6 col-lg-3 gl-mb-3"
+ class="col-md-6 col-lg-4 mb-3"
>
- <design-dropzone-stub
- hasdesigns="true"
- >
+ <design-dropzone-stub>
<design-stub
event="NONE"
filename="design-2-name"
@@ -60,11 +49,9 @@ exports[`Design management index page designs does not render toolbar when there
<!---->
</li>
<li
- class="col-md-6 col-lg-3 gl-mb-3"
+ class="col-md-6 col-lg-4 mb-3"
>
- <design-dropzone-stub
- hasdesigns="true"
- >
+ <design-dropzone-stub>
<design-stub
event="NONE"
filename="design-3-name"
@@ -86,45 +73,30 @@ exports[`Design management index page designs does not render toolbar when there
`;
exports[`Design management index page designs renders designs list and header with upload button 1`] = `
-<div
- class="gl-mt-5 designs-root"
- data-testid="designs-root"
->
+<div>
<header
class="row-content-block border-top-0 p-2 d-flex"
>
<div
- class="gl-display-flex gl-justify-content-space-between gl-align-items-center gl-w-full"
+ class="d-flex justify-content-between align-items-center w-100"
>
- <div>
- <span
- class="gl-font-weight-bold gl-mr-3"
- >
- Designs
- </span>
-
- <design-version-dropdown-stub />
- </div>
+ <design-version-dropdown-stub />
<div
- class="qa-selector-toolbar gl-display-flex"
+ class="qa-selector-toolbar d-flex"
>
- <gl-button-stub
- category="tertiary"
- class="gl-mr-2 js-select-all"
- icon=""
- size="small"
+ <gl-deprecated-button-stub
+ class="mr-2 js-select-all"
+ size="md"
variant="link"
>
Select all
-
- </gl-button-stub>
+ </gl-deprecated-button-stub>
<div>
<delete-button-stub
- buttonclass="gl-mr-4"
- buttonsize="small"
- buttonvariant="danger"
+ buttonclass="btn-danger btn-inverted mr-2"
+ buttonvariant=""
>
Delete selected
@@ -144,24 +116,18 @@ exports[`Design management index page designs renders designs list and header wi
<ol
class="list-unstyled row"
>
- <!---->
-
<li
- class="gl-flex-direction-column col-md-6 col-lg-3 gl-mb-3"
- data-testid="design-dropzone-wrapper"
+ class="col-md-6 col-lg-4 mb-3"
>
<design-dropzone-stub
- class="design-list-item design-list-item-new"
- hasdesigns="true"
+ class="design-list-item"
/>
</li>
<li
- class="col-md-6 col-lg-3 gl-mb-3"
+ class="col-md-6 col-lg-4 mb-3"
>
- <design-dropzone-stub
- hasdesigns="true"
- >
+ <design-dropzone-stub>
<design-stub
event="NONE"
filename="design-1-name"
@@ -177,11 +143,9 @@ exports[`Design management index page designs renders designs list and header wi
/>
</li>
<li
- class="col-md-6 col-lg-3 gl-mb-3"
+ class="col-md-6 col-lg-4 mb-3"
>
- <design-dropzone-stub
- hasdesigns="true"
- >
+ <design-dropzone-stub>
<design-stub
event="NONE"
filename="design-2-name"
@@ -197,11 +161,9 @@ exports[`Design management index page designs renders designs list and header wi
/>
</li>
<li
- class="col-md-6 col-lg-3 gl-mb-3"
+ class="col-md-6 col-lg-4 mb-3"
>
- <design-dropzone-stub
- hasdesigns="true"
- >
+ <design-dropzone-stub>
<design-stub
event="NONE"
filename="design-3-name"
@@ -226,10 +188,7 @@ exports[`Design management index page designs renders designs list and header wi
`;
exports[`Design management index page designs renders error 1`] = `
-<div
- class="gl-mt-5"
- data-testid="designs-root"
->
+<div>
<!---->
<div
@@ -257,10 +216,7 @@ exports[`Design management index page designs renders error 1`] = `
`;
exports[`Design management index page designs renders loading icon 1`] = `
-<div
- class="gl-mt-5"
- data-testid="designs-root"
->
+<div>
<!---->
<div
@@ -279,11 +235,8 @@ exports[`Design management index page designs renders loading icon 1`] = `
</div>
`;
-exports[`Design management index page when has no designs renders design dropzone 1`] = `
-<div
- class="gl-mt-5"
- data-testid="designs-root"
->
+exports[`Design management index page when has no designs renders empty text 1`] = `
+<div>
<!---->
<div
@@ -292,18 +245,11 @@ exports[`Design management index page when has no designs renders design dropzon
<ol
class="list-unstyled row"
>
- <span
- class="gl-font-weight-bold gl-font-weight-bold gl-ml-5 gl-mb-4"
- >
- Designs
- </span>
-
<li
- class="col-12"
- data-testid="design-dropzone-wrapper"
+ class="col-md-6 col-lg-4 mb-3"
>
<design-dropzone-stub
- class=""
+ class="design-list-item"
/>
</li>
diff --git a/spec/frontend/design_management_new/pages/design/__snapshots__/index_spec.js.snap b/spec/frontend/design_management_legacy/pages/design/__snapshots__/index_spec.js.snap
index 83bcebd513e..dc5baf37fc6 100644
--- a/spec/frontend/design_management_new/pages/design/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management_legacy/pages/design/__snapshots__/index_spec.js.snap
@@ -10,7 +10,7 @@ exports[`Design management design index page renders design index 1`] = `
<design-destroyer-stub
filenames="test.jpg"
iid="1"
- project-path="project-path"
+ projectpath=""
/>
<!---->
@@ -41,7 +41,7 @@ exports[`Design management design index page renders design index 1`] = `
</h2>
<a
- class="gl-text-gray-600 gl-text-decoration-none gl-mb-6 gl-display-block"
+ class="gl-text-gray-400 gl-text-decoration-none gl-mb-6 gl-display-block"
href="full-issue-url"
>
ull-issue-path
@@ -60,13 +60,13 @@ exports[`Design management design index page renders design index 1`] = `
designid="test"
discussion="[object Object]"
discussionwithopenform=""
- markdownpreviewpath="/project-path/preview_markdown?target_type=Issue"
+ markdownpreviewpath="//preview_markdown?target_type=Issue"
noteableid="design-id"
/>
<gl-button-stub
- category="tertiary"
- class="link-inherit-color gl-text-black-normal gl-text-decoration-none gl-font-weight-bold gl-mb-4"
+ category="primary"
+ class="link-inherit-color gl-text-body gl-text-decoration-none gl-font-weight-bold gl-mb-4"
data-testid="resolved-comments"
icon="chevron-right"
id="resolved-comments"
@@ -108,7 +108,7 @@ exports[`Design management design index page renders design index 1`] = `
designid="test"
discussion="[object Object]"
discussionwithopenform=""
- markdownpreviewpath="/project-path/preview_markdown?target_type=Issue"
+ markdownpreviewpath="//preview_markdown?target_type=Issue"
noteableid="design-id"
/>
</gl-collapse-stub>
@@ -140,7 +140,7 @@ exports[`Design management design index page with error GlAlert is rendered in c
<design-destroyer-stub
filenames="test.jpg"
iid="1"
- project-path="project-path"
+ projectpath=""
/>
<div
@@ -188,7 +188,7 @@ exports[`Design management design index page with error GlAlert is rendered in c
</h2>
<a
- class="gl-text-gray-600 gl-text-decoration-none gl-mb-6 gl-display-block"
+ class="gl-text-gray-400 gl-text-decoration-none gl-mb-6 gl-display-block"
href="full-issue-url"
>
ull-issue-path
diff --git a/spec/frontend/design_management_new/pages/design/index_spec.js b/spec/frontend/design_management_legacy/pages/design/index_spec.js
index 3822b0b3b71..5eb4158c715 100644
--- a/spec/frontend/design_management_new/pages/design/index_spec.js
+++ b/spec/frontend/design_management_legacy/pages/design/index_spec.js
@@ -2,11 +2,11 @@ import { shallowMount, createLocalVue } from '@vue/test-utils';
import VueRouter from 'vue-router';
import { GlAlert } from '@gitlab/ui';
import { ApolloMutation } from 'vue-apollo';
-import createFlash from '~/flash';
-import DesignIndex from '~/design_management_new/pages/design/index.vue';
-import DesignSidebar from '~/design_management_new/components/design_sidebar.vue';
-import DesignPresentation from '~/design_management_new/components/design_presentation.vue';
-import createImageDiffNoteMutation from '~/design_management_new/graphql/mutations/create_image_diff_note.mutation.graphql';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
+import DesignIndex from '~/design_management_legacy/pages/design/index.vue';
+import DesignSidebar from '~/design_management_legacy/components/design_sidebar.vue';
+import DesignPresentation from '~/design_management_legacy/components/design_presentation.vue';
+import createImageDiffNoteMutation from '~/design_management_legacy/graphql/mutations/create_image_diff_note.mutation.graphql';
import design from '../../mock_data/design';
import mockResponseWithDesigns from '../../mock_data/designs';
import mockResponseNoDesigns from '../../mock_data/no_designs';
@@ -14,11 +14,11 @@ import mockAllVersions from '../../mock_data/all_versions';
import {
DESIGN_NOT_FOUND_ERROR,
DESIGN_VERSION_NOT_EXIST_ERROR,
-} from '~/design_management_new/utils/error_messages';
-import { DESIGNS_ROUTE_NAME } from '~/design_management_new/router/constants';
-import createRouter from '~/design_management_new/router';
-import * as utils from '~/design_management_new/utils/design_management_utils';
-import { DESIGN_DETAIL_LAYOUT_CLASSLIST } from '~/design_management_new/constants';
+} from '~/design_management_legacy/utils/error_messages';
+import { DESIGNS_ROUTE_NAME } from '~/design_management_legacy/router/constants';
+import createRouter from '~/design_management_legacy/router';
+import * as utils from '~/design_management_legacy/utils/design_management_utils';
+import { DESIGN_DETAIL_LAYOUT_CLASSLIST } from '~/design_management_legacy/constants';
jest.mock('~/flash');
jest.mock('mousetrap', () => ({
@@ -95,12 +95,9 @@ describe('Design management design index page', () => {
DesignSidebar,
DesignReplyForm,
},
- provide: {
- issueIid: '1',
- projectPath: 'project-path',
- },
data() {
return {
+ issueIid: '1',
activeDiscussion: {
id: null,
source: null,
@@ -152,7 +149,7 @@ describe('Design management design index page', () => {
expect(findSidebar().props()).toEqual({
design,
- markdownPreviewPath: '/project-path/preview_markdown?target_type=Issue',
+ markdownPreviewPath: '//preview_markdown?target_type=Issue',
resolvedDiscussionsExpanded: false,
});
});
diff --git a/spec/frontend/design_management_new/pages/index_spec.js b/spec/frontend/design_management_legacy/pages/index_spec.js
index 40a462eabb8..5b7512aab7b 100644
--- a/spec/frontend/design_management_new/pages/index_spec.js
+++ b/spec/frontend/design_management_legacy/pages/index_spec.js
@@ -2,20 +2,20 @@ import { createLocalVue, shallowMount } from '@vue/test-utils';
import { ApolloMutation } from 'vue-apollo';
import VueRouter from 'vue-router';
import { GlEmptyState } from '@gitlab/ui';
-import Index from '~/design_management_new/pages/index.vue';
-import uploadDesignQuery from '~/design_management_new/graphql/mutations/upload_design.mutation.graphql';
-import DesignDestroyer from '~/design_management_new/components/design_destroyer.vue';
-import DesignDropzone from '~/design_management_new/components/upload/design_dropzone.vue';
-import DeleteButton from '~/design_management_new/components/delete_button.vue';
-import { DESIGNS_ROUTE_NAME } from '~/design_management_new/router/constants';
+import Index from '~/design_management_legacy/pages/index.vue';
+import uploadDesignQuery from '~/design_management_legacy/graphql/mutations/upload_design.mutation.graphql';
+import DesignDestroyer from '~/design_management_legacy/components/design_destroyer.vue';
+import DesignDropzone from '~/design_management_legacy/components/upload/design_dropzone.vue';
+import DeleteButton from '~/design_management_legacy/components/delete_button.vue';
+import { DESIGNS_ROUTE_NAME } from '~/design_management_legacy/router/constants';
import {
EXISTING_DESIGN_DROP_MANY_FILES_MESSAGE,
EXISTING_DESIGN_DROP_INVALID_FILENAME_MESSAGE,
-} from '~/design_management_new/utils/error_messages';
-import createFlash from '~/flash';
-import createRouter from '~/design_management_new/router';
-import * as utils from '~/design_management_new/utils/design_management_utils';
-import { DESIGN_DETAIL_LAYOUT_CLASSLIST } from '~/design_management_new/constants';
+} from '~/design_management_legacy/utils/error_messages';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
+import createRouter from '~/design_management_legacy/router';
+import * as utils from '~/design_management_legacy/utils/design_management_utils';
+import { DESIGN_DETAIL_LAYOUT_CLASSLIST } from '~/design_management_legacy/constants';
jest.mock('~/flash.js');
const mockPageEl = {
@@ -68,8 +68,6 @@ describe('Design management index page', () => {
const findToolbar = () => wrapper.find('.qa-selector-toolbar');
const findDeleteButton = () => wrapper.find(DeleteButton);
const findDropzone = () => wrapper.findAll(DesignDropzone).at(0);
- const dropzoneClasses = () => findDropzone().classes();
- const findDropzoneWrapper = () => wrapper.find('[data-testid="design-dropzone-wrapper"]');
const findFirstDropzoneWithDesign = () => wrapper.findAll(DesignDropzone).at(1);
function createComponent({
@@ -94,23 +92,19 @@ describe('Design management index page', () => {
};
wrapper = shallowMount(Index, {
- data() {
- return {
- designs,
- allVersions,
- permissions: {
- createDesign,
- },
- };
- },
mocks: { $apollo },
localVue,
router,
stubs: { DesignDestroyer, ApolloMutation, ...stubs },
attachToDocument: true,
- provide: {
- projectPath: 'project-path',
- issueIid: '1',
+ });
+
+ wrapper.setData({
+ designs,
+ allVersions,
+ issueIid: '1',
+ permissions: {
+ createDesign,
},
});
}
@@ -123,7 +117,9 @@ describe('Design management index page', () => {
it('renders loading icon', () => {
createComponent({ loading: true });
- expect(wrapper.element).toMatchSnapshot();
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
});
it('renders error', () => {
@@ -139,35 +135,25 @@ describe('Design management index page', () => {
it('renders a toolbar with buttons when there are designs', () => {
createComponent({ designs: mockDesigns, allVersions: [mockVersion] });
- expect(findToolbar().exists()).toBe(true);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findToolbar().exists()).toBe(true);
+ });
});
it('renders designs list and header with upload button', () => {
createComponent({ designs: mockDesigns, allVersions: [mockVersion] });
- expect(wrapper.element).toMatchSnapshot();
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
});
it('does not render toolbar when there is no permission', () => {
createComponent({ designs: mockDesigns, allVersions: [mockVersion], createDesign: false });
- expect(wrapper.element).toMatchSnapshot();
- });
-
- it('has correct classes applied to design dropzone', () => {
- createComponent({ designs: mockDesigns, allVersions: [mockVersion] });
- expect(dropzoneClasses()).toContain('design-list-item');
- expect(dropzoneClasses()).toContain('design-list-item-new');
- });
-
- it('has correct classes applied to dropzone wrapper', () => {
- createComponent({ designs: mockDesigns, allVersions: [mockVersion] });
- expect(findDropzoneWrapper().classes()).toEqual([
- 'gl-flex-direction-column',
- 'col-md-6',
- 'col-lg-3',
- 'gl-mb-3',
- ]);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
});
});
@@ -176,20 +162,11 @@ describe('Design management index page', () => {
createComponent();
});
- it('renders design dropzone', () =>
+ it('renders empty text', () =>
wrapper.vm.$nextTick().then(() => {
expect(wrapper.element).toMatchSnapshot();
}));
- it('has correct classes applied to design dropzone', () => {
- expect(dropzoneClasses()).not.toContain('design-list-item');
- expect(dropzoneClasses()).not.toContain('design-list-item-new');
- });
-
- it('has correct classes applied to dropzone wrapper', () => {
- expect(findDropzoneWrapper().classes()).toEqual(['col-12']);
- });
-
it('does not render a toolbar with buttons', () =>
wrapper.vm.$nextTick().then(() => {
expect(findToolbar().exists()).toBe(false);
@@ -208,7 +185,7 @@ describe('Design management index page', () => {
mutation: uploadDesignQuery,
variables: {
files: [{ name: 'test' }],
- projectPath: 'project-path',
+ projectPath: '',
iid: '1',
},
optimisticResponse: {
@@ -254,18 +231,12 @@ describe('Design management index page', () => {
},
};
- return wrapper.vm
- .$nextTick()
- .then(() => {
- findDropzone().vm.$emit('change', [{ name: 'test' }]);
- expect(mutate).toHaveBeenCalledWith(mutationVariables);
- expect(wrapper.vm.filesToBeSaved).toEqual([{ name: 'test' }]);
- expect(wrapper.vm.isSaving).toBeTruthy();
- })
- .then(() => {
- expect(dropzoneClasses()).toContain('design-list-item');
- expect(dropzoneClasses()).toContain('design-list-item-new');
- });
+ return wrapper.vm.$nextTick().then(() => {
+ findDropzone().vm.$emit('change', [{ name: 'test' }]);
+ expect(mutate).toHaveBeenCalledWith(mutationVariables);
+ expect(wrapper.vm.filesToBeSaved).toEqual([{ name: 'test' }]);
+ expect(wrapper.vm.isSaving).toBeTruthy();
+ });
});
it('sets isSaving', () => {
@@ -413,7 +384,8 @@ describe('Design management index page', () => {
it('renders toolbar buttons', () => {
expect(findToolbar().exists()).toBe(true);
- expect(findToolbar().isVisible()).toBe(true);
+ expect(findToolbar().classes()).toContain('d-flex');
+ expect(findToolbar().classes()).not.toContain('d-none');
});
it('adds two designs to selected designs when their checkboxes are checked', () => {
@@ -470,9 +442,9 @@ describe('Design management index page', () => {
});
});
- it('on latest version when has no designs toolbar buttons are invisible', () => {
+ it('on latest version when has no designs does not render toolbar buttons', () => {
createComponent({ designs: [], allVersions: [mockVersion] });
- expect(findToolbar().isVisible()).toBe(false);
+ expect(findToolbar().exists()).toBe(false);
});
describe('on non-latest version', () => {
@@ -563,7 +535,7 @@ describe('Design management index page', () => {
it('ensures fullscreen layout is not applied', () => {
createComponent(true);
- wrapper.vm.$router.push('/');
+ wrapper.vm.$router.push('/designs');
expect(mockPageEl.classList.remove).toHaveBeenCalledTimes(1);
expect(mockPageEl.classList.remove).toHaveBeenCalledWith(...DESIGN_DETAIL_LAYOUT_CLASSLIST);
});
diff --git a/spec/frontend/design_management_new/router_spec.js b/spec/frontend/design_management_legacy/router_spec.js
index 4d63e622724..5f62793a243 100644
--- a/spec/frontend/design_management_new/router_spec.js
+++ b/spec/frontend/design_management_legacy/router_spec.js
@@ -1,11 +1,15 @@
import { mount, createLocalVue } from '@vue/test-utils';
import { nextTick } from 'vue';
import VueRouter from 'vue-router';
-import App from '~/design_management_new/components/app.vue';
-import Designs from '~/design_management_new/pages/index.vue';
-import DesignDetail from '~/design_management_new/pages/design/index.vue';
-import createRouter from '~/design_management_new/router';
-import { DESIGNS_ROUTE_NAME, DESIGN_ROUTE_NAME } from '~/design_management_new/router/constants';
+import App from '~/design_management_legacy/components/app.vue';
+import Designs from '~/design_management_legacy/pages/index.vue';
+import DesignDetail from '~/design_management_legacy/pages/design/index.vue';
+import createRouter from '~/design_management_legacy/router';
+import {
+ ROOT_ROUTE_NAME,
+ DESIGNS_ROUTE_NAME,
+ DESIGN_ROUTE_NAME,
+} from '~/design_management_legacy/router/constants';
import '~/commons/bootstrap';
function factory(routeArg) {
@@ -45,7 +49,7 @@ describe('Design management router', () => {
window.location.hash = '';
});
- describe.each([['/'], [{ name: DESIGNS_ROUTE_NAME }]])('root route', routeArg => {
+ describe.each([['/'], [{ name: ROOT_ROUTE_NAME }]])('root route', routeArg => {
it('pushes home component', () => {
const wrapper = factory(routeArg);
@@ -53,6 +57,14 @@ describe('Design management router', () => {
});
});
+ describe.each([['/designs'], [{ name: DESIGNS_ROUTE_NAME }]])('designs route', routeArg => {
+ it('pushes designs root component', () => {
+ const wrapper = factory(routeArg);
+
+ expect(wrapper.find(Designs).exists()).toBe(true);
+ });
+ });
+
describe.each([['/designs/1'], [{ name: DESIGN_ROUTE_NAME, params: { id: '1' } }]])(
'designs detail route',
routeArg => {
diff --git a/spec/frontend/design_management_new/utils/cache_update_spec.js b/spec/frontend/design_management_legacy/utils/cache_update_spec.js
index 611716d5aa7..dce91b5e59b 100644
--- a/spec/frontend/design_management_new/utils/cache_update_spec.js
+++ b/spec/frontend/design_management_legacy/utils/cache_update_spec.js
@@ -5,15 +5,15 @@ import {
updateStoreAfterAddImageDiffNote,
updateStoreAfterUploadDesign,
updateStoreAfterUpdateImageDiffNote,
-} from '~/design_management_new/utils/cache_update';
+} from '~/design_management_legacy/utils/cache_update';
import {
designDeletionError,
ADD_DISCUSSION_COMMENT_ERROR,
ADD_IMAGE_DIFF_NOTE_ERROR,
UPDATE_IMAGE_DIFF_NOTE_ERROR,
-} from '~/design_management_new/utils/error_messages';
+} from '~/design_management_legacy/utils/error_messages';
import design from '../mock_data/design';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
jest.mock('~/flash.js');
diff --git a/spec/frontend/design_management_new/utils/design_management_utils_spec.js b/spec/frontend/design_management_legacy/utils/design_management_utils_spec.js
index 8bc33e214be..97e85a24a35 100644
--- a/spec/frontend/design_management_new/utils/design_management_utils_spec.js
+++ b/spec/frontend/design_management_legacy/utils/design_management_utils_spec.js
@@ -6,7 +6,7 @@ import {
updateImageDiffNoteOptimisticResponse,
isValidDesignFile,
extractDesign,
-} from '~/design_management_new/utils/design_management_utils';
+} from '~/design_management_legacy/utils/design_management_utils';
import mockResponseNoDesigns from '../mock_data/no_designs';
import mockResponseWithDesigns from '../mock_data/designs';
import mockDesign from '../mock_data/design';
diff --git a/spec/frontend/design_management_new/utils/error_messages_spec.js b/spec/frontend/design_management_legacy/utils/error_messages_spec.js
index eb5dc0fad20..489ac23da4e 100644
--- a/spec/frontend/design_management_new/utils/error_messages_spec.js
+++ b/spec/frontend/design_management_legacy/utils/error_messages_spec.js
@@ -1,7 +1,7 @@
import {
designDeletionError,
designUploadSkippedWarning,
-} from '~/design_management_new/utils/error_messages';
+} from '~/design_management_legacy/utils/error_messages';
const mockFilenames = n =>
Array(n)
@@ -55,7 +55,7 @@ describe('Error message', () => {
'Upload skipped. Some of the designs you tried uploading did not change: 1.jpg, 2.jpg, 3.jpg, 4.jpg, 5.jpg, and 2 more.',
],
])('designUploadSkippedWarning', (uploadedFiles, skippedFiles, expected) => {
- test('returns expected warning message', () => {
+ it('returns expected warning message', () => {
expect(designUploadSkippedWarning(uploadedFiles, skippedFiles)).toBe(expected);
});
});
diff --git a/spec/frontend/design_management_new/utils/tracking_spec.js b/spec/frontend/design_management_legacy/utils/tracking_spec.js
index ac7267642cb..a59cf80c906 100644
--- a/spec/frontend/design_management_new/utils/tracking_spec.js
+++ b/spec/frontend/design_management_legacy/utils/tracking_spec.js
@@ -1,5 +1,5 @@
import { mockTracking } from 'helpers/tracking_helper';
-import { trackDesignDetailView } from '~/design_management_new/utils/tracking';
+import { trackDesignDetailView } from '~/design_management_legacy/utils/tracking';
function getTrackingSpy(key) {
return mockTracking(key, undefined, jest.spyOn);
diff --git a/spec/frontend/design_management_new/components/list/__snapshots__/item_spec.js.snap b/spec/frontend/design_management_new/components/list/__snapshots__/item_spec.js.snap
deleted file mode 100644
index 8c6e20cb54c..00000000000
--- a/spec/frontend/design_management_new/components/list/__snapshots__/item_spec.js.snap
+++ /dev/null
@@ -1,472 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`Design management list item component when item appears in view after image is loaded renders media broken icon when image onerror triggered 1`] = `
-<gl-icon-stub
- class="text-secondary"
- name="media-broken"
- size="32"
-/>
-`;
-
-exports[`Design management list item component with no notes renders item with correct status icon for creation event 1`] = `
-<router-link-stub
- class="card cursor-pointer text-plain js-design-list-item design-list-item design-list-item-new"
- to="[object Object]"
->
- <div
- class="card-body p-0 d-flex-center overflow-hidden position-relative"
- >
- <div
- class="design-event position-absolute"
- >
- <span
- aria-label="Added in this version"
- title="Added in this version"
- >
- <icon-stub
- class="text-success-500"
- name="file-addition-solid"
- size="18"
- />
- </span>
- </div>
-
- <gl-intersection-observer-stub
- options="[object Object]"
- >
- <!---->
-
- <img
- alt="test"
- class="block mx-auto mw-100 mh-100 design-img"
- data-qa-selector="design_image"
- src=""
- />
- </gl-intersection-observer-stub>
- </div>
-
- <div
- class="card-footer d-flex w-100"
- >
- <div
- class="d-flex flex-column str-truncated-100"
- >
- <span
- class="bold str-truncated-100"
- data-qa-selector="design_file_name"
- >
- test
- </span>
-
- <span
- class="str-truncated-100"
- >
-
- Updated
- <timeago-stub
- cssclass=""
- time="01-01-2019"
- tooltipplacement="bottom"
- />
- </span>
- </div>
-
- <!---->
- </div>
-</router-link-stub>
-`;
-
-exports[`Design management list item component with no notes renders item with correct status icon for deletion event 1`] = `
-<router-link-stub
- class="card cursor-pointer text-plain js-design-list-item design-list-item design-list-item-new"
- to="[object Object]"
->
- <div
- class="card-body p-0 d-flex-center overflow-hidden position-relative"
- >
- <div
- class="design-event position-absolute"
- >
- <span
- aria-label="Deleted in this version"
- title="Deleted in this version"
- >
- <icon-stub
- class="text-danger-500"
- name="file-deletion-solid"
- size="18"
- />
- </span>
- </div>
-
- <gl-intersection-observer-stub
- options="[object Object]"
- >
- <!---->
-
- <img
- alt="test"
- class="block mx-auto mw-100 mh-100 design-img"
- data-qa-selector="design_image"
- src=""
- />
- </gl-intersection-observer-stub>
- </div>
-
- <div
- class="card-footer d-flex w-100"
- >
- <div
- class="d-flex flex-column str-truncated-100"
- >
- <span
- class="bold str-truncated-100"
- data-qa-selector="design_file_name"
- >
- test
- </span>
-
- <span
- class="str-truncated-100"
- >
-
- Updated
- <timeago-stub
- cssclass=""
- time="01-01-2019"
- tooltipplacement="bottom"
- />
- </span>
- </div>
-
- <!---->
- </div>
-</router-link-stub>
-`;
-
-exports[`Design management list item component with no notes renders item with correct status icon for modification event 1`] = `
-<router-link-stub
- class="card cursor-pointer text-plain js-design-list-item design-list-item design-list-item-new"
- to="[object Object]"
->
- <div
- class="card-body p-0 d-flex-center overflow-hidden position-relative"
- >
- <div
- class="design-event position-absolute"
- >
- <span
- aria-label="Modified in this version"
- title="Modified in this version"
- >
- <icon-stub
- class="text-primary-500"
- name="file-modified-solid"
- size="18"
- />
- </span>
- </div>
-
- <gl-intersection-observer-stub
- options="[object Object]"
- >
- <!---->
-
- <img
- alt="test"
- class="block mx-auto mw-100 mh-100 design-img"
- data-qa-selector="design_image"
- src=""
- />
- </gl-intersection-observer-stub>
- </div>
-
- <div
- class="card-footer d-flex w-100"
- >
- <div
- class="d-flex flex-column str-truncated-100"
- >
- <span
- class="bold str-truncated-100"
- data-qa-selector="design_file_name"
- >
- test
- </span>
-
- <span
- class="str-truncated-100"
- >
-
- Updated
- <timeago-stub
- cssclass=""
- time="01-01-2019"
- tooltipplacement="bottom"
- />
- </span>
- </div>
-
- <!---->
- </div>
-</router-link-stub>
-`;
-
-exports[`Design management list item component with no notes renders item with no status icon for none event 1`] = `
-<router-link-stub
- class="card cursor-pointer text-plain js-design-list-item design-list-item design-list-item-new"
- to="[object Object]"
->
- <div
- class="card-body p-0 d-flex-center overflow-hidden position-relative"
- >
- <!---->
-
- <gl-intersection-observer-stub
- options="[object Object]"
- >
- <!---->
-
- <img
- alt="test"
- class="block mx-auto mw-100 mh-100 design-img"
- data-qa-selector="design_image"
- src=""
- />
- </gl-intersection-observer-stub>
- </div>
-
- <div
- class="card-footer d-flex w-100"
- >
- <div
- class="d-flex flex-column str-truncated-100"
- >
- <span
- class="bold str-truncated-100"
- data-qa-selector="design_file_name"
- >
- test
- </span>
-
- <span
- class="str-truncated-100"
- >
-
- Updated
- <timeago-stub
- cssclass=""
- time="01-01-2019"
- tooltipplacement="bottom"
- />
- </span>
- </div>
-
- <!---->
- </div>
-</router-link-stub>
-`;
-
-exports[`Design management list item component with no notes renders loading spinner when isUploading is true 1`] = `
-<router-link-stub
- class="card cursor-pointer text-plain js-design-list-item design-list-item design-list-item-new"
- to="[object Object]"
->
- <div
- class="card-body p-0 d-flex-center overflow-hidden position-relative"
- >
- <!---->
-
- <gl-intersection-observer-stub
- options="[object Object]"
- >
- <gl-loading-icon-stub
- color="orange"
- label="Loading"
- size="md"
- />
-
- <img
- alt="test"
- class="block mx-auto mw-100 mh-100 design-img"
- data-qa-selector="design_image"
- src=""
- style="display: none;"
- />
- </gl-intersection-observer-stub>
- </div>
-
- <div
- class="card-footer d-flex w-100"
- >
- <div
- class="d-flex flex-column str-truncated-100"
- >
- <span
- class="bold str-truncated-100"
- data-qa-selector="design_file_name"
- >
- test
- </span>
-
- <span
- class="str-truncated-100"
- >
-
- Updated
- <timeago-stub
- cssclass=""
- time="01-01-2019"
- tooltipplacement="bottom"
- />
- </span>
- </div>
-
- <!---->
- </div>
-</router-link-stub>
-`;
-
-exports[`Design management list item component with notes renders item with multiple comments 1`] = `
-<router-link-stub
- class="card cursor-pointer text-plain js-design-list-item design-list-item design-list-item-new"
- to="[object Object]"
->
- <div
- class="card-body p-0 d-flex-center overflow-hidden position-relative"
- >
- <!---->
-
- <gl-intersection-observer-stub
- options="[object Object]"
- >
- <!---->
-
- <img
- alt="test"
- class="block mx-auto mw-100 mh-100 design-img"
- data-qa-selector="design_image"
- src=""
- />
- </gl-intersection-observer-stub>
- </div>
-
- <div
- class="card-footer d-flex w-100"
- >
- <div
- class="d-flex flex-column str-truncated-100"
- >
- <span
- class="bold str-truncated-100"
- data-qa-selector="design_file_name"
- >
- test
- </span>
-
- <span
- class="str-truncated-100"
- >
-
- Updated
- <timeago-stub
- cssclass=""
- time="01-01-2019"
- tooltipplacement="bottom"
- />
- </span>
- </div>
-
- <div
- class="ml-auto d-flex align-items-center text-secondary"
- >
- <icon-stub
- class="ml-1"
- name="comments"
- size="16"
- />
-
- <span
- aria-label="2 comments"
- class="ml-1"
- >
-
- 2
-
- </span>
- </div>
- </div>
-</router-link-stub>
-`;
-
-exports[`Design management list item component with notes renders item with single comment 1`] = `
-<router-link-stub
- class="card cursor-pointer text-plain js-design-list-item design-list-item design-list-item-new"
- to="[object Object]"
->
- <div
- class="card-body p-0 d-flex-center overflow-hidden position-relative"
- >
- <!---->
-
- <gl-intersection-observer-stub
- options="[object Object]"
- >
- <!---->
-
- <img
- alt="test"
- class="block mx-auto mw-100 mh-100 design-img"
- data-qa-selector="design_image"
- src=""
- />
- </gl-intersection-observer-stub>
- </div>
-
- <div
- class="card-footer d-flex w-100"
- >
- <div
- class="d-flex flex-column str-truncated-100"
- >
- <span
- class="bold str-truncated-100"
- data-qa-selector="design_file_name"
- >
- test
- </span>
-
- <span
- class="str-truncated-100"
- >
-
- Updated
- <timeago-stub
- cssclass=""
- time="01-01-2019"
- tooltipplacement="bottom"
- />
- </span>
- </div>
-
- <div
- class="ml-auto d-flex align-items-center text-secondary"
- >
- <icon-stub
- class="ml-1"
- name="comments"
- size="16"
- />
-
- <span
- aria-label="1 comment"
- class="ml-1"
- >
-
- 1
-
- </span>
- </div>
- </div>
-</router-link-stub>
-`;
diff --git a/spec/frontend/design_management_new/components/toolbar/__snapshots__/pagination_button_spec.js.snap b/spec/frontend/design_management_new/components/toolbar/__snapshots__/pagination_button_spec.js.snap
deleted file mode 100644
index 08662a04f15..00000000000
--- a/spec/frontend/design_management_new/components/toolbar/__snapshots__/pagination_button_spec.js.snap
+++ /dev/null
@@ -1,28 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`Design management pagination button component disables button when no design is passed 1`] = `
-<router-link-stub
- aria-label="Test title"
- class="btn btn-default disabled"
- disabled="true"
- to="[object Object]"
->
- <icon-stub
- name="angle-right"
- size="16"
- />
-</router-link-stub>
-`;
-
-exports[`Design management pagination button component renders router-link 1`] = `
-<router-link-stub
- aria-label="Test title"
- class="btn btn-default"
- to="[object Object]"
->
- <icon-stub
- name="angle-right"
- size="16"
- />
-</router-link-stub>
-`;
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index b7f03f35dfb..ac046ddc203 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -41,6 +41,7 @@ describe('diffs/components/app', () => {
store = createDiffsStore();
store.state.diffs.isLoading = false;
+ store.state.diffs.isTreeLoaded = true;
extendStore(store);
diff --git a/spec/frontend/diffs/components/compare_versions_spec.js b/spec/frontend/diffs/components/compare_versions_spec.js
index 7f69a6344c1..7fdbc791589 100644
--- a/spec/frontend/diffs/components/compare_versions_spec.js
+++ b/spec/frontend/diffs/components/compare_versions_spec.js
@@ -30,7 +30,7 @@ describe('CompareVersions', () => {
store,
propsData: {
mergeRequestDiffs: diffsMockData,
- diffFilesLength: 0,
+ diffFilesCountText: null,
...props,
},
});
diff --git a/spec/frontend/diffs/components/diff_expansion_cell_spec.js b/spec/frontend/diffs/components/diff_expansion_cell_spec.js
index ef2e0dfe59b..b8aca4ad86b 100644
--- a/spec/frontend/diffs/components/diff_expansion_cell_spec.js
+++ b/spec/frontend/diffs/components/diff_expansion_cell_spec.js
@@ -1,12 +1,12 @@
import Vue from 'vue';
import { cloneDeep } from 'lodash';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { getByText } from '@testing-library/dom';
import { createStore } from '~/mr_notes/stores';
import DiffExpansionCell from '~/diffs/components/diff_expansion_cell.vue';
import { getPreviousLineIndex } from '~/diffs/store/utils';
import { INLINE_DIFF_VIEW_TYPE, PARALLEL_DIFF_VIEW_TYPE } from '~/diffs/constants';
import diffFileMockData from '../mock_data/diff_file';
-import { getByText } from '@testing-library/dom';
const EXPAND_UP_CLASS = '.js-unfold';
const EXPAND_DOWN_CLASS = '.js-unfold-down';
diff --git a/spec/frontend/diffs/components/diff_file_spec.js b/spec/frontend/diffs/components/diff_file_spec.js
index 7e154d76f45..ead8bd79cdb 100644
--- a/spec/frontend/diffs/components/diff_file_spec.js
+++ b/spec/frontend/diffs/components/diff_file_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
-import { createStore } from '~/mr_notes/stores';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
import { mockTracking, triggerEvent } from 'helpers/tracking_helper';
+import { createStore } from '~/mr_notes/stores';
import DiffFileComponent from '~/diffs/components/diff_file.vue';
import { diffViewerModes, diffViewerErrors } from '~/ide/constants';
import diffFileMockDataReadable from '../mock_data/diff_file';
@@ -128,6 +128,26 @@ describe('DiffFile', () => {
});
});
+ it('should auto-expand collapsed files when viewDiffsFileByFile is true', done => {
+ vm.$destroy();
+ window.gon = {
+ features: { autoExpandCollapsedDiffs: true },
+ };
+ vm = createComponentWithStore(Vue.extend(DiffFileComponent), createStore(), {
+ file: JSON.parse(JSON.stringify(diffFileMockDataUnreadable)),
+ canCurrentUserFork: false,
+ viewDiffsFileByFile: true,
+ }).$mount();
+
+ vm.$nextTick(() => {
+ expect(vm.$el.innerText).not.toContain('This diff is collapsed');
+
+ window.gon = {};
+
+ done();
+ });
+ });
+
it('should be collapsed for renamed files', done => {
vm.renderIt = true;
vm.isCollapsed = false;
diff --git a/spec/frontend/diffs/components/diff_stats_spec.js b/spec/frontend/diffs/components/diff_stats_spec.js
index 5956b478019..7a083fb6bde 100644
--- a/spec/frontend/diffs/components/diff_stats_spec.js
+++ b/spec/frontend/diffs/components/diff_stats_spec.js
@@ -2,53 +2,97 @@ import { shallowMount } from '@vue/test-utils';
import DiffStats from '~/diffs/components/diff_stats.vue';
import Icon from '~/vue_shared/components/icon.vue';
+const TEST_ADDED_LINES = 100;
+const TEST_REMOVED_LINES = 200;
+const DIFF_FILES_COUNT = '300';
+const DIFF_FILES_COUNT_TRUNCATED = '300+';
+
describe('diff_stats', () => {
- it('does not render a group if diffFileLengths is empty', () => {
- const wrapper = shallowMount(DiffStats, {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(DiffStats, {
propsData: {
- addedLines: 1,
- removedLines: 2,
+ addedLines: TEST_ADDED_LINES,
+ removedLines: TEST_REMOVED_LINES,
+ ...props,
},
});
- const groups = wrapper.findAll('.diff-stats-group');
+ };
- expect(groups.length).toBe(2);
- });
+ describe('diff stats group', () => {
+ const findDiffStatsGroup = () => wrapper.findAll('.diff-stats-group');
- it('does not render a group if diffFileLengths is not a number', () => {
- const wrapper = shallowMount(DiffStats, {
- propsData: {
- addedLines: 1,
- removedLines: 2,
- diffFilesLength: Number.NaN,
- },
+ it('is not rendered if diffFilesCountText is empty', () => {
+ createComponent();
+
+ expect(findDiffStatsGroup().length).toBe(2);
});
- const groups = wrapper.findAll('.diff-stats-group');
- expect(groups.length).toBe(2);
- });
+ it('is not rendered if diffFilesCountText is not a number', () => {
+ createComponent({
+ diffFilesCountText: null,
+ });
- it('shows amount of files changed, lines added and lines removed when passed all props', () => {
- const wrapper = shallowMount(DiffStats, {
- propsData: {
- addedLines: 100,
- removedLines: 200,
- diffFilesLength: 300,
- },
+ expect(findDiffStatsGroup().length).toBe(2);
});
+ });
+ describe('line changes', () => {
const findFileLine = name => wrapper.find(name);
+
+ it('shows the amount of lines added', () => {
+ expect(findFileLine('.js-file-addition-line').text()).toBe(TEST_ADDED_LINES.toString());
+ });
+
+ it('shows the amount of lines removed', () => {
+ expect(findFileLine('.js-file-deletion-line').text()).toBe(TEST_REMOVED_LINES.toString());
+ });
+ });
+
+ describe('files changes', () => {
const findIcon = name =>
wrapper
.findAll(Icon)
.filter(c => c.attributes('name') === name)
.at(0).element.parentNode;
- const additions = findFileLine('.js-file-addition-line');
- const deletions = findFileLine('.js-file-deletion-line');
- const filesChanged = findIcon('doc-code');
- expect(additions.text()).toBe('100');
- expect(deletions.text()).toBe('200');
- expect(filesChanged.textContent).toContain('300');
+ it('shows amount of file changed with plural "files" when 0 files has changed', () => {
+ const oneFileChanged = '0';
+
+ createComponent({
+ diffFilesCountText: oneFileChanged,
+ });
+
+ expect(findIcon('doc-code').textContent.trim()).toBe(`${oneFileChanged} files`);
+ });
+
+ it('shows amount of file changed with singular "file" when 1 file is changed', () => {
+ const oneFileChanged = '1';
+
+ createComponent({
+ diffFilesCountText: oneFileChanged,
+ });
+
+ expect(findIcon('doc-code').textContent.trim()).toBe(`${oneFileChanged} file`);
+ });
+
+ it('shows amount of files change with plural "files" when multiple files are changed', () => {
+ createComponent({
+ diffFilesCountText: DIFF_FILES_COUNT,
+ });
+
+ expect(findIcon('doc-code').textContent.trim()).toContain(`${DIFF_FILES_COUNT} files`);
+ });
+
+ it('shows amount of files change with plural "files" when files changed is truncated', () => {
+ createComponent({
+ diffFilesCountText: DIFF_FILES_COUNT_TRUNCATED,
+ });
+
+ expect(findIcon('doc-code').textContent.trim()).toContain(
+ `${DIFF_FILES_COUNT_TRUNCATED} files`,
+ );
+ });
});
});
diff --git a/spec/frontend/diffs/components/diff_table_cell_spec.js b/spec/frontend/diffs/components/diff_table_cell_spec.js
index 9693fe68b57..02f5c27eecb 100644
--- a/spec/frontend/diffs/components/diff_table_cell_spec.js
+++ b/spec/frontend/diffs/components/diff_table_cell_spec.js
@@ -1,10 +1,10 @@
import { createLocalVue, shallowMount } from '@vue/test-utils';
import Vuex from 'vuex';
+import { TEST_HOST } from 'helpers/test_constants';
import DiffTableCell from '~/diffs/components/diff_table_cell.vue';
import DiffGutterAvatars from '~/diffs/components/diff_gutter_avatars.vue';
import { LINE_POSITION_RIGHT } from '~/diffs/constants';
import { createStore } from '~/mr_notes/stores';
-import { TEST_HOST } from 'helpers/test_constants';
import discussionsMockData from '../mock_data/diff_discussions';
import diffFileMockData from '../mock_data/diff_file';
@@ -18,6 +18,12 @@ const TEST_LINE_CODE = 'LC_42';
const TEST_FILE_HASH = diffFileMockData.file_hash;
describe('DiffTableCell', () => {
+ const symlinkishFileTooltip =
+ 'Commenting on symbolic links that replace or are replaced by files is currently not supported.';
+ const realishFileTooltip =
+ 'Commenting on files that replace or are replaced by symbolic links is currently not supported.';
+ const otherFileTooltip = 'Add a comment to this line';
+
let wrapper;
let line;
let store;
@@ -67,6 +73,7 @@ describe('DiffTableCell', () => {
const findTd = () => wrapper.find({ ref: 'td' });
const findNoteButton = () => wrapper.find({ ref: 'addDiffNoteButton' });
const findLineNumber = () => wrapper.find({ ref: 'lineNumberRef' });
+ const findTooltip = () => wrapper.find({ ref: 'addNoteTooltip' });
const findAvatars = () => wrapper.find(DiffGutterAvatars);
describe('td', () => {
@@ -134,6 +141,53 @@ describe('DiffTableCell', () => {
});
},
);
+
+ it.each`
+ disabled | commentsDisabled
+ ${'disabled'} | ${true}
+ ${undefined} | ${false}
+ `(
+ 'has attribute disabled=$disabled when the outer component has prop commentsDisabled=$commentsDisabled',
+ ({ disabled, commentsDisabled }) => {
+ line.commentsDisabled = commentsDisabled;
+
+ createComponent({
+ showCommentButton: true,
+ isHover: true,
+ });
+
+ wrapper.setData({ isCommentButtonRendered: true });
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findNoteButton().attributes('disabled')).toBe(disabled);
+ });
+ },
+ );
+
+ it.each`
+ tooltip | commentsDisabled
+ ${symlinkishFileTooltip} | ${{ wasSymbolic: true }}
+ ${symlinkishFileTooltip} | ${{ isSymbolic: true }}
+ ${realishFileTooltip} | ${{ wasReal: true }}
+ ${realishFileTooltip} | ${{ isReal: true }}
+ ${otherFileTooltip} | ${false}
+ `(
+ 'has the correct tooltip when commentsDisabled=$commentsDisabled',
+ ({ tooltip, commentsDisabled }) => {
+ line.commentsDisabled = commentsDisabled;
+
+ createComponent({
+ showCommentButton: true,
+ isHover: true,
+ });
+
+ wrapper.setData({ isCommentButtonRendered: true });
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findTooltip().attributes('title')).toBe(tooltip);
+ });
+ },
+ );
});
describe('line number', () => {
diff --git a/spec/frontend/diffs/components/inline_diff_view_spec.js b/spec/frontend/diffs/components/inline_diff_view_spec.js
index eeef8e5a7b0..6c37f86658e 100644
--- a/spec/frontend/diffs/components/inline_diff_view_spec.js
+++ b/spec/frontend/diffs/components/inline_diff_view_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
import '~/behaviors/markdown/render_gfm';
-import { createStore } from '~/mr_notes/stores';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { createStore } from '~/mr_notes/stores';
import InlineDiffView from '~/diffs/components/inline_diff_view.vue';
import diffFileMockData from '../mock_data/diff_file';
import discussionsMockData from '../mock_data/diff_discussions';
diff --git a/spec/frontend/diffs/components/no_changes_spec.js b/spec/frontend/diffs/components/no_changes_spec.js
index 2eca97a47fd..2795c68b4ee 100644
--- a/spec/frontend/diffs/components/no_changes_spec.js
+++ b/spec/frontend/diffs/components/no_changes_spec.js
@@ -1,8 +1,8 @@
import { createLocalVue, shallowMount } from '@vue/test-utils';
import Vuex from 'vuex';
+import { GlButton } from '@gitlab/ui';
import { createStore } from '~/mr_notes/stores';
import NoChanges from '~/diffs/components/no_changes.vue';
-import { GlButton } from '@gitlab/ui';
describe('Diff no changes empty state', () => {
let vm;
diff --git a/spec/frontend/diffs/components/parallel_diff_view_spec.js b/spec/frontend/diffs/components/parallel_diff_view_spec.js
index 30231f0ba71..cb1a47f60d5 100644
--- a/spec/frontend/diffs/components/parallel_diff_view_spec.js
+++ b/spec/frontend/diffs/components/parallel_diff_view_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
-import { createStore } from '~/mr_notes/stores';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { createStore } from '~/mr_notes/stores';
import ParallelDiffView from '~/diffs/components/parallel_diff_view.vue';
import * as constants from '~/diffs/constants';
import diffFileMockData from '../mock_data/diff_file';
diff --git a/spec/frontend/diffs/components/tree_list_spec.js b/spec/frontend/diffs/components/tree_list_spec.js
index f78c5f25ee7..14cb2a17aec 100644
--- a/spec/frontend/diffs/components/tree_list_spec.js
+++ b/spec/frontend/diffs/components/tree_list_spec.js
@@ -17,6 +17,7 @@ describe('Diffs tree list component', () => {
});
// Setup initial state
+ store.state.diffs.isTreeLoaded = true;
store.state.diffs.diffFiles.push('test');
store.state.diffs = {
addedLines: 10,
diff --git a/spec/frontend/diffs/diff_file_spec.js b/spec/frontend/diffs/diff_file_spec.js
new file mode 100644
index 00000000000..5d74760ef66
--- /dev/null
+++ b/spec/frontend/diffs/diff_file_spec.js
@@ -0,0 +1,60 @@
+import { prepareRawDiffFile } from '~/diffs/diff_file';
+
+const DIFF_FILES = [
+ {
+ file_hash: 'ABC', // This file is just a normal file
+ },
+ {
+ file_hash: 'DEF', // This file replaces a symlink
+ a_mode: '0',
+ b_mode: '0755',
+ },
+ {
+ file_hash: 'DEF', // This symlink is replaced by a file
+ a_mode: '120000',
+ b_mode: '0',
+ },
+ {
+ file_hash: 'GHI', // This symlink replaces a file
+ a_mode: '0',
+ b_mode: '120000',
+ },
+ {
+ file_hash: 'GHI', // This file is replaced by a symlink
+ a_mode: '0755',
+ b_mode: '0',
+ },
+];
+
+function makeBrokenSymlinkObject(replaced, wasSymbolic, isSymbolic, wasReal, isReal) {
+ return {
+ replaced,
+ wasSymbolic,
+ isSymbolic,
+ wasReal,
+ isReal,
+ };
+}
+
+describe('diff_file utilities', () => {
+ describe('prepareRawDiffFile', () => {
+ it.each`
+ fileIndex | description | brokenSymlink
+ ${0} | ${'a file that is not symlink-adjacent'} | ${false}
+ ${1} | ${'a file that replaces a symlink'} | ${makeBrokenSymlinkObject(false, false, false, false, true)}
+ ${2} | ${'a symlink that is replaced by a file'} | ${makeBrokenSymlinkObject(true, true, false, false, false)}
+ ${3} | ${'a symlink that replaces a file'} | ${makeBrokenSymlinkObject(false, false, true, false, false)}
+ ${4} | ${'a file that is replaced by a symlink'} | ${makeBrokenSymlinkObject(true, false, false, true, false)}
+ `(
+ 'properly marks $description with the correct .brokenSymlink value',
+ ({ fileIndex, brokenSymlink }) => {
+ const preppedRaw = prepareRawDiffFile({
+ file: DIFF_FILES[fileIndex],
+ allFiles: DIFF_FILES,
+ });
+
+ expect(preppedRaw.brokenSymlink).toStrictEqual(brokenSymlink);
+ },
+ );
+ });
+});
diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js
index fc5e39357ca..5fef35d6c5b 100644
--- a/spec/frontend/diffs/store/actions_spec.js
+++ b/spec/frontend/diffs/store/actions_spec.js
@@ -1,6 +1,8 @@
import MockAdapter from 'axios-mock-adapter';
import Cookies from 'js-cookie';
import mockDiffFile from 'jest/diffs/mock_data/diff_file';
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
+import { TEST_HOST } from 'jest/helpers/test_constants';
import {
DIFF_VIEW_COOKIE_NAME,
INLINE_DIFF_VIEW_TYPE,
@@ -56,12 +58,10 @@ import testAction from '../../helpers/vuex_action_helper';
import * as utils from '~/diffs/store/utils';
import * as commonUtils from '~/lib/utils/common_utils';
import { mergeUrlParams } from '~/lib/utils/url_utility';
-import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import { diffMetadata } from '../mock_data/diff_metadata';
-import createFlash from '~/flash';
-import { TEST_HOST } from 'jest/helpers/test_constants';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
-jest.mock('~/flash', () => jest.fn());
+jest.mock('~/flash');
describe('DiffsStoreActions', () => {
useLocalStorageSpy();
@@ -1594,24 +1594,39 @@ describe('DiffsStoreActions', () => {
describe('setCurrentDiffFileIdFromNote', () => {
it('commits UPDATE_CURRENT_DIFF_FILE_ID', () => {
const commit = jest.fn();
+ const state = { diffFiles: [{ file_hash: '123' }] };
const rootGetters = {
getDiscussion: () => ({ diff_file: { file_hash: '123' } }),
notesById: { '1': { discussion_id: '2' } },
};
- setCurrentDiffFileIdFromNote({ commit, rootGetters }, '1');
+ setCurrentDiffFileIdFromNote({ commit, state, rootGetters }, '1');
expect(commit).toHaveBeenCalledWith(types.UPDATE_CURRENT_DIFF_FILE_ID, '123');
});
it('does not commit UPDATE_CURRENT_DIFF_FILE_ID when discussion has no diff_file', () => {
const commit = jest.fn();
+ const state = { diffFiles: [{ file_hash: '123' }] };
const rootGetters = {
getDiscussion: () => ({ id: '1' }),
notesById: { '1': { discussion_id: '2' } },
};
- setCurrentDiffFileIdFromNote({ commit, rootGetters }, '1');
+ setCurrentDiffFileIdFromNote({ commit, state, rootGetters }, '1');
+
+ expect(commit).not.toHaveBeenCalled();
+ });
+
+ it('does not commit UPDATE_CURRENT_DIFF_FILE_ID when diff file does not exist', () => {
+ const commit = jest.fn();
+ const state = { diffFiles: [{ file_hash: '123' }] };
+ const rootGetters = {
+ getDiscussion: () => ({ diff_file: { file_hash: '124' } }),
+ notesById: { '1': { discussion_id: '2' } },
+ };
+
+ setCurrentDiffFileIdFromNote({ commit, state, rootGetters }, '1');
expect(commit).not.toHaveBeenCalled();
});
diff --git a/spec/frontend/diffs/store/mutations_spec.js b/spec/frontend/diffs/store/mutations_spec.js
index c24d406fef3..70047899612 100644
--- a/spec/frontend/diffs/store/mutations_spec.js
+++ b/spec/frontend/diffs/store/mutations_spec.js
@@ -830,6 +830,7 @@ describe('DiffsStoreMutations', () => {
const state = {
treeEntries: {},
tree: [],
+ isTreeLoaded: false,
};
mutations[types.SET_TREE_DATA](state, {
@@ -844,6 +845,7 @@ describe('DiffsStoreMutations', () => {
});
expect(state.tree).toEqual(['tree']);
+ expect(state.isTreeLoaded).toEqual(true);
});
});
diff --git a/spec/frontend/diffs/store/utils_spec.js b/spec/frontend/diffs/store/utils_spec.js
index d87619e1e3c..62c82468ea0 100644
--- a/spec/frontend/diffs/store/utils_spec.js
+++ b/spec/frontend/diffs/store/utils_spec.js
@@ -20,6 +20,14 @@ import { noteableDataMock } from '../../notes/mock_data';
const getDiffFileMock = () => JSON.parse(JSON.stringify(diffFileMockData));
const getDiffMetadataMock = () => JSON.parse(JSON.stringify(diffMetadata));
+function extractLinesFromFile(file) {
+ const unpackedParallel = file.parallel_diff_lines
+ .flatMap(({ left, right }) => [left, right])
+ .filter(Boolean);
+
+ return [...file.highlighted_diff_lines, ...unpackedParallel];
+}
+
describe('DiffsStoreUtils', () => {
describe('findDiffFile', () => {
const files = [{ file_hash: 1, name: 'one' }];
@@ -429,6 +437,28 @@ describe('DiffsStoreUtils', () => {
expect(preppedLine.right).toEqual(correctLine);
expect(preppedLine.line_code).toEqual(correctLine.line_code);
});
+
+ it.each`
+ brokenSymlink
+ ${false}
+ ${{}}
+ ${'anything except `false`'}
+ `(
+ "properly assigns each line's `commentsDisabled` as the same value as the parent file's `brokenSymlink` value (`$brokenSymlink`)",
+ ({ brokenSymlink }) => {
+ preppedLine = utils.prepareLineForRenamedFile({
+ diffViewType: INLINE_DIFF_VIEW_TYPE,
+ line: sourceLine,
+ index: lineIndex,
+ diffFile: {
+ ...diffFile,
+ brokenSymlink,
+ },
+ });
+
+ expect(preppedLine.commentsDisabled).toStrictEqual(brokenSymlink);
+ },
+ );
});
describe('prepareDiffData', () => {
@@ -541,6 +571,25 @@ describe('DiffsStoreUtils', () => {
}),
]);
});
+
+ it('adds the `.brokenSymlink` property to each diff file', () => {
+ preparedDiff.diff_files.forEach(file => {
+ expect(file).toEqual(expect.objectContaining({ brokenSymlink: false }));
+ });
+ });
+
+ it("copies the diff file's `.brokenSymlink` value to each of that file's child lines", () => {
+ const lines = [
+ ...preparedDiff.diff_files,
+ ...splitInlineDiff.diff_files,
+ ...splitParallelDiff.diff_files,
+ ...completedDiff.diff_files,
+ ].flatMap(file => extractLinesFromFile(file));
+
+ lines.forEach(line => {
+ expect(line.commentsDisabled).toBe(false);
+ });
+ });
});
describe('for diff metadata', () => {
@@ -603,6 +652,12 @@ describe('DiffsStoreUtils', () => {
},
]);
});
+
+ it('adds the `.brokenSymlink` property to each meta diff file', () => {
+ preparedDiffFiles.forEach(file => {
+ expect(file).toMatchObject({ brokenSymlink: false });
+ });
+ });
});
});
diff --git a/spec/frontend/dropzone_input_spec.js b/spec/frontend/dropzone_input_spec.js
index 688b9164e5f..4cfc6478bd2 100644
--- a/spec/frontend/dropzone_input_spec.js
+++ b/spec/frontend/dropzone_input_spec.js
@@ -1,9 +1,9 @@
import $ from 'jquery';
import mock from 'xhr-mock';
import { TEST_HOST } from 'spec/test_constants';
+import waitForPromises from 'helpers/wait_for_promises';
import dropzoneInput from '~/dropzone_input';
import PasteMarkdownTable from '~/behaviors/markdown/paste_markdown_table';
-import waitForPromises from 'helpers/wait_for_promises';
const TEST_FILE = new File([], 'somefile.jpg');
TEST_FILE.upload = {};
diff --git a/spec/frontend/editor/editor_lite_spec.js b/spec/frontend/editor/editor_lite_spec.js
index 92a136835bf..e4edeab172b 100644
--- a/spec/frontend/editor/editor_lite_spec.js
+++ b/spec/frontend/editor/editor_lite_spec.js
@@ -2,13 +2,15 @@ import { editor as monacoEditor, languages as monacoLanguages, Uri } from 'monac
import Editor from '~/editor/editor_lite';
import { DEFAULT_THEME, themes } from '~/ide/lib/themes';
+const URI_PREFIX = 'gitlab';
+
describe('Base editor', () => {
let editorEl;
let editor;
const blobContent = 'Foo Bar';
const blobPath = 'test.md';
- const uri = new Uri('gitlab', false, blobPath);
- const fakeModel = { foo: 'bar' };
+ const blobGlobalId = 'snippet_777';
+ const fakeModel = { foo: 'bar', dispose: jest.fn() };
beforeEach(() => {
setFixtures('<div id="editor" data-editor-loading></div>');
@@ -21,6 +23,8 @@ describe('Base editor', () => {
editorEl.remove();
});
+ const createUri = (...paths) => Uri.file([URI_PREFIX, ...paths].join('/'));
+
it('initializes Editor with basic properties', () => {
expect(editor).toBeDefined();
expect(editor.editorEl).toBe(null);
@@ -65,7 +69,7 @@ describe('Base editor', () => {
it('creates model to be supplied to Monaco editor', () => {
editor.createInstance({ el: editorEl, blobPath, blobContent });
- expect(modelSpy).toHaveBeenCalledWith(blobContent, undefined, uri);
+ expect(modelSpy).toHaveBeenCalledWith(blobContent, undefined, createUri(blobPath));
expect(setModel).toHaveBeenCalledWith(fakeModel);
});
@@ -75,6 +79,16 @@ describe('Base editor', () => {
expect(editor.editorEl).not.toBe(null);
expect(instanceSpy).toHaveBeenCalledWith(editorEl, expect.anything());
});
+
+ it('with blobGlobalId, creates model with id in uri', () => {
+ editor.createInstance({ el: editorEl, blobPath, blobContent, blobGlobalId });
+
+ expect(modelSpy).toHaveBeenCalledWith(
+ blobContent,
+ undefined,
+ createUri(blobGlobalId, blobPath),
+ );
+ });
});
describe('implementation', () => {
@@ -82,10 +96,6 @@ describe('Base editor', () => {
editor.createInstance({ el: editorEl, blobPath, blobContent });
});
- afterEach(() => {
- editor.model.dispose();
- });
-
it('correctly proxies value from the model', () => {
expect(editor.getValue()).toEqual(blobContent);
});
@@ -132,10 +142,6 @@ describe('Base editor', () => {
editor.createInstance({ el: editorEl, blobPath, blobContent });
});
- afterEach(() => {
- editor.model.dispose();
- });
-
it('is extensible with the extensions', () => {
expect(editor.foo).toBeUndefined();
diff --git a/spec/frontend/editor/editor_markdown_ext_spec.js b/spec/frontend/editor/editor_markdown_ext_spec.js
index aad2400c0f0..b0fabad8542 100644
--- a/spec/frontend/editor/editor_markdown_ext_spec.js
+++ b/spec/frontend/editor/editor_markdown_ext_spec.js
@@ -1,5 +1,5 @@
-import EditorLite from '~/editor/editor_lite';
import { Range, Position } from 'monaco-editor';
+import EditorLite from '~/editor/editor_lite';
import EditorMarkdownExtension from '~/editor/editor_markdown_ext';
describe('Markdown Extension for Editor Lite', () => {
diff --git a/spec/frontend/emoji/emoji_spec.js b/spec/frontend/emoji/emoji_spec.js
index c6a15d5976a..9b49c8b8ab5 100644
--- a/spec/frontend/emoji/emoji_spec.js
+++ b/spec/frontend/emoji/emoji_spec.js
@@ -1,4 +1,5 @@
import MockAdapter from 'axios-mock-adapter';
+import { trimText } from 'helpers/text_helper';
import axios from '~/lib/utils/axios_utils';
import { initEmojiMap, glEmojiTag, EMOJI_VERSION } from '~/emoji';
import isEmojiUnicodeSupported, {
@@ -9,7 +10,6 @@ import isEmojiUnicodeSupported, {
isHorceRacingSkinToneComboEmoji,
isPersonZwjEmoji,
} from '~/emoji/support/is_emoji_unicode_supported';
-import { trimText } from 'helpers/text_helper';
const emptySupportMap = {
personZwj: false,
diff --git a/spec/frontend/emoji/support/unicode_support_map_spec.js b/spec/frontend/emoji/support/unicode_support_map_spec.js
index aaee9c30cac..945e804a9fa 100644
--- a/spec/frontend/emoji/support/unicode_support_map_spec.js
+++ b/spec/frontend/emoji/support/unicode_support_map_spec.js
@@ -1,6 +1,6 @@
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import getUnicodeSupportMap from '~/emoji/support/unicode_support_map';
import AccessorUtilities from '~/lib/utils/accessor';
-import { useLocalStorageSpy } from 'helpers/local_storage_helper';
describe('Unicode Support Map', () => {
useLocalStorageSpy();
diff --git a/spec/frontend/environment.js b/spec/frontend/environment.js
index c9d77a34595..35ca323f5a9 100644
--- a/spec/frontend/environment.js
+++ b/spec/frontend/environment.js
@@ -50,18 +50,14 @@ class CustomEnvironment extends JSDOMEnvironment {
*/
this.global.fetch = () => {};
- // Not yet supported by JSDOM: https://github.com/jsdom/jsdom/issues/317
- this.global.document.createRange = () => ({
- setStart: () => {},
- setEnd: () => {},
- commonAncestorContainer: {
- nodeName: 'BODY',
- ownerDocument: this.global.document,
- },
- });
-
// Expose the jsdom (created in super class) to the global so that we can call reconfigure({ url: '' }) to properly set `window.location`
- this.global.dom = this.dom;
+ this.global.jsdom = this.dom;
+
+ Object.assign(this.global.performance, {
+ mark: () => null,
+ measure: () => null,
+ getEntriesByName: () => [],
+ });
}
async teardown() {
diff --git a/spec/frontend/environments/environment_actions_spec.js b/spec/frontend/environments/environment_actions_spec.js
index 4c06e19cec0..e7f5ee4bc4d 100644
--- a/spec/frontend/environments/environment_actions_spec.js
+++ b/spec/frontend/environments/environment_actions_spec.js
@@ -1,9 +1,9 @@
import { shallowMount } from '@vue/test-utils';
import { TEST_HOST } from 'helpers/test_constants';
+import { GlLoadingIcon } from '@gitlab/ui';
import eventHub from '~/environments/event_hub';
import EnvironmentActions from '~/environments/components/environment_actions.vue';
import Icon from '~/vue_shared/components/icon.vue';
-import { GlLoadingIcon } from '@gitlab/ui';
describe('EnvironmentActions Component', () => {
let vm;
diff --git a/spec/frontend/environments/environment_external_url_spec.js b/spec/frontend/environments/environment_external_url_spec.js
index 9997ea94941..4c133665979 100644
--- a/spec/frontend/environments/environment_external_url_spec.js
+++ b/spec/frontend/environments/environment_external_url_spec.js
@@ -1,4 +1,4 @@
-import { shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import ExternalUrlComp from '~/environments/components/environment_external_url.vue';
describe('External URL Component', () => {
@@ -6,7 +6,7 @@ describe('External URL Component', () => {
const externalUrl = 'https://gitlab.com';
beforeEach(() => {
- wrapper = shallowMount(ExternalUrlComp, { propsData: { externalUrl } });
+ wrapper = mount(ExternalUrlComp, { propsData: { externalUrl } });
});
it('should link to the provided externalUrl prop', () => {
diff --git a/spec/frontend/environments/environment_stop_spec.js b/spec/frontend/environments/environment_stop_spec.js
index f971cf56b65..1865403cdc4 100644
--- a/spec/frontend/environments/environment_stop_spec.js
+++ b/spec/frontend/environments/environment_stop_spec.js
@@ -1,7 +1,7 @@
import $ from 'jquery';
import { shallowMount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
import StopComponent from '~/environments/components/environment_stop.vue';
-import LoadingButton from '~/vue_shared/components/loading_button.vue';
import eventHub from '~/environments/event_hub';
$.fn.tooltip = () => {};
@@ -17,7 +17,7 @@ describe('Stop Component', () => {
});
};
- const findButton = () => wrapper.find(LoadingButton);
+ const findButton = () => wrapper.find(GlButton);
beforeEach(() => {
jest.spyOn(window, 'confirm');
diff --git a/spec/frontend/environments/environments_app_spec.js b/spec/frontend/environments/environments_app_spec.js
index c0bf0dca176..d440bf73e15 100644
--- a/spec/frontend/environments/environments_app_spec.js
+++ b/spec/frontend/environments/environments_app_spec.js
@@ -1,6 +1,6 @@
import { mount, shallowMount } from '@vue/test-utils';
-import axios from '~/lib/utils/axios_utils';
import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import Container from '~/environments/components/container.vue';
import EmptyState from '~/environments/components/empty_state.vue';
import EnvironmentsApp from '~/environments/components/environments_app.vue';
diff --git a/spec/frontend/environments/folder/environments_folder_view_spec.js b/spec/frontend/environments/folder/environments_folder_view_spec.js
index 740225ddd9d..f33c8de0094 100644
--- a/spec/frontend/environments/folder/environments_folder_view_spec.js
+++ b/spec/frontend/environments/folder/environments_folder_view_spec.js
@@ -1,11 +1,11 @@
import { mount } from '@vue/test-utils';
-import axios from '~/lib/utils/axios_utils';
import MockAdapter from 'axios-mock-adapter';
+import { removeBreakLine, removeWhitespace } from 'helpers/text_helper';
+import { GlPagination } from '@gitlab/ui';
+import axios from '~/lib/utils/axios_utils';
import EnvironmentsFolderViewComponent from '~/environments/folder/environments_folder_view.vue';
import EnvironmentTable from '~/environments/components/environments_table.vue';
import { environmentsList } from '../mock_data';
-import { removeBreakLine, removeWhitespace } from 'helpers/text_helper';
-import { GlPagination } from '@gitlab/ui';
describe('Environments Folder View', () => {
let mock;
diff --git a/spec/frontend/error_tracking/components/error_details_spec.js b/spec/frontend/error_tracking/components/error_details_spec.js
index 6124602e038..ef3eeb8c7e4 100644
--- a/spec/frontend/error_tracking/components/error_details_spec.js
+++ b/spec/frontend/error_tracking/components/error_details_spec.js
@@ -1,7 +1,5 @@
import { createLocalVue, shallowMount } from '@vue/test-utils';
import Vuex from 'vuex';
-import { __ } from '~/locale';
-import createFlash from '~/flash';
import {
GlButton,
GlLoadingIcon,
@@ -11,6 +9,8 @@ import {
GlAlert,
GlSprintf,
} from '@gitlab/ui';
+import { __ } from '~/locale';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import Stacktrace from '~/error_tracking/components/stacktrace.vue';
import ErrorDetails from '~/error_tracking/components/error_details.vue';
import {
diff --git a/spec/frontend/error_tracking/components/error_tracking_actions_spec.js b/spec/frontend/error_tracking/components/error_tracking_actions_spec.js
index 1ea92883e54..b22805f5227 100644
--- a/spec/frontend/error_tracking/components/error_tracking_actions_spec.js
+++ b/spec/frontend/error_tracking/components/error_tracking_actions_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDeprecatedButton } from '@gitlab/ui';
+import { GlButton } from '@gitlab/ui';
import ErrorTrackingActions from '~/error_tracking/components/error_tracking_actions.vue';
describe('Error Tracking Actions', () => {
@@ -20,7 +20,7 @@ describe('Error Tracking Actions', () => {
},
...props,
},
- stubs: { GlDeprecatedButton },
+ stubs: { GlButton },
});
}
@@ -34,7 +34,7 @@ describe('Error Tracking Actions', () => {
}
});
- const findButtons = () => wrapper.findAll(GlDeprecatedButton);
+ const findButtons = () => wrapper.findAll(GlButton);
describe('when error status is unresolved', () => {
it('renders the correct actions buttons to allow ignore and resolve', () => {
diff --git a/spec/frontend/error_tracking/components/error_tracking_list_spec.js b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
index d88a412fb50..bad70a31599 100644
--- a/spec/frontend/error_tracking/components/error_tracking_list_spec.js
+++ b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
@@ -1,6 +1,12 @@
import { createLocalVue, mount } from '@vue/test-utils';
import Vuex from 'vuex';
-import { GlEmptyState, GlLoadingIcon, GlFormInput, GlPagination, GlDropdown } from '@gitlab/ui';
+import {
+ GlEmptyState,
+ GlLoadingIcon,
+ GlFormInput,
+ GlPagination,
+ GlDeprecatedDropdown,
+} from '@gitlab/ui';
import stubChildren from 'helpers/stub_children';
import ErrorTrackingList from '~/error_tracking/components/error_tracking_list.vue';
import ErrorTrackingActions from '~/error_tracking/components/error_tracking_actions.vue';
@@ -18,19 +24,19 @@ describe('ErrorTrackingList', () => {
const findErrorListTable = () => wrapper.find('table');
const findErrorListRows = () => wrapper.findAll('tbody tr');
- const dropdownsArray = () => wrapper.findAll(GlDropdown);
+ const dropdownsArray = () => wrapper.findAll(GlDeprecatedDropdown);
const findRecentSearchesDropdown = () =>
dropdownsArray()
.at(0)
- .find(GlDropdown);
+ .find(GlDeprecatedDropdown);
const findStatusFilterDropdown = () =>
dropdownsArray()
.at(1)
- .find(GlDropdown);
+ .find(GlDeprecatedDropdown);
const findSortDropdown = () =>
dropdownsArray()
.at(2)
- .find(GlDropdown);
+ .find(GlDeprecatedDropdown);
const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
const findPagination = () => wrapper.find(GlPagination);
const findErrorActions = () => wrapper.find(ErrorTrackingActions);
@@ -128,8 +134,8 @@ describe('ErrorTrackingList', () => {
mountComponent({
stubs: {
GlTable: false,
- GlDropdown: false,
- GlDropdownItem: false,
+ GlDeprecatedDropdown: false,
+ GlDeprecatedDropdownItem: false,
GlLink: false,
},
});
@@ -199,8 +205,8 @@ describe('ErrorTrackingList', () => {
mountComponent({
stubs: {
GlTable: false,
- GlDropdown: false,
- GlDropdownItem: false,
+ GlDeprecatedDropdown: false,
+ GlDeprecatedDropdownItem: false,
},
});
});
@@ -335,8 +341,8 @@ describe('ErrorTrackingList', () => {
beforeEach(() => {
mountComponent({
stubs: {
- GlDropdown: false,
- GlDropdownItem: false,
+ GlDeprecatedDropdown: false,
+ GlDeprecatedDropdownItem: false,
},
});
});
diff --git a/spec/frontend/error_tracking/components/stacktrace_entry_spec.js b/spec/frontend/error_tracking/components/stacktrace_entry_spec.js
index de746b8ac84..df7bff201f1 100644
--- a/spec/frontend/error_tracking/components/stacktrace_entry_spec.js
+++ b/spec/frontend/error_tracking/components/stacktrace_entry_spec.js
@@ -1,10 +1,10 @@
import { shallowMount } from '@vue/test-utils';
import { GlSprintf } from '@gitlab/ui';
+import { trimText } from 'helpers/text_helper';
import StackTraceEntry from '~/error_tracking/components/stacktrace_entry.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import FileIcon from '~/vue_shared/components/file_icon.vue';
import Icon from '~/vue_shared/components/icon.vue';
-import { trimText } from 'helpers/text_helper';
describe('Stacktrace Entry', () => {
let wrapper;
diff --git a/spec/frontend/error_tracking/store/actions_spec.js b/spec/frontend/error_tracking/store/actions_spec.js
index e4a895902b3..43037473a61 100644
--- a/spec/frontend/error_tracking/store/actions_spec.js
+++ b/spec/frontend/error_tracking/store/actions_spec.js
@@ -1,7 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import axios from '~/lib/utils/axios_utils';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import * as actions from '~/error_tracking/store/actions';
import * as types from '~/error_tracking/store/mutation_types';
import { visitUrl } from '~/lib/utils/url_utility';
diff --git a/spec/frontend/error_tracking/store/details/actions_spec.js b/spec/frontend/error_tracking/store/details/actions_spec.js
index 6802300b0f5..58e77c46e02 100644
--- a/spec/frontend/error_tracking/store/details/actions_spec.js
+++ b/spec/frontend/error_tracking/store/details/actions_spec.js
@@ -1,7 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import axios from '~/lib/utils/axios_utils';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import * as actions from '~/error_tracking/store/details/actions';
import * as types from '~/error_tracking/store/details/mutation_types';
import Poll from '~/lib/utils/poll';
diff --git a/spec/frontend/error_tracking/store/list/actions_spec.js b/spec/frontend/error_tracking/store/list/actions_spec.js
index 3cb740bf05d..7326472e1dd 100644
--- a/spec/frontend/error_tracking/store/list/actions_spec.js
+++ b/spec/frontend/error_tracking/store/list/actions_spec.js
@@ -1,8 +1,8 @@
-import axios from '~/lib/utils/axios_utils';
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
+import axios from '~/lib/utils/axios_utils';
import httpStatusCodes from '~/lib/utils/http_status';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import * as actions from '~/error_tracking/store/list/actions';
import * as types from '~/error_tracking/store/list/mutation_types';
diff --git a/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js b/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js
index d924f895da8..023a3e26781 100644
--- a/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js
+++ b/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js
@@ -1,7 +1,7 @@
import { pick, clone } from 'lodash';
import Vuex from 'vuex';
import { createLocalVue, shallowMount } from '@vue/test-utils';
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlDeprecatedDropdown, GlDeprecatedDropdownItem } from '@gitlab/ui';
import ProjectDropdown from '~/error_tracking_settings/components/project_dropdown.vue';
import { defaultProps, projectList, staleProject } from '../mock';
@@ -43,7 +43,7 @@ describe('error tracking settings project dropdown', () => {
describe('empty project list', () => {
it('renders the dropdown', () => {
expect(wrapper.find('#project-dropdown').exists()).toBeTruthy();
- expect(wrapper.find(GlDropdown).exists()).toBeTruthy();
+ expect(wrapper.find(GlDeprecatedDropdown).exists()).toBeTruthy();
});
it('shows helper text', () => {
@@ -58,8 +58,8 @@ describe('error tracking settings project dropdown', () => {
});
it('does not contain any dropdown items', () => {
- expect(wrapper.find(GlDropdownItem).exists()).toBeFalsy();
- expect(wrapper.find(GlDropdown).props('text')).toBe('No projects available');
+ expect(wrapper.find(GlDeprecatedDropdownItem).exists()).toBeFalsy();
+ expect(wrapper.find(GlDeprecatedDropdown).props('text')).toBe('No projects available');
});
});
@@ -72,12 +72,12 @@ describe('error tracking settings project dropdown', () => {
it('renders the dropdown', () => {
expect(wrapper.find('#project-dropdown').exists()).toBeTruthy();
- expect(wrapper.find(GlDropdown).exists()).toBeTruthy();
+ expect(wrapper.find(GlDeprecatedDropdown).exists()).toBeTruthy();
});
it('contains a number of dropdown items', () => {
- expect(wrapper.find(GlDropdownItem).exists()).toBeTruthy();
- expect(wrapper.findAll(GlDropdownItem).length).toBe(2);
+ expect(wrapper.find(GlDeprecatedDropdownItem).exists()).toBeTruthy();
+ expect(wrapper.findAll(GlDeprecatedDropdownItem).length).toBe(2);
});
});
diff --git a/spec/frontend/filtered_search/components/recent_searches_dropdown_content_spec.js b/spec/frontend/filtered_search/components/recent_searches_dropdown_content_spec.js
index c0851096d8e..158f70f7d47 100644
--- a/spec/frontend/filtered_search/components/recent_searches_dropdown_content_spec.js
+++ b/spec/frontend/filtered_search/components/recent_searches_dropdown_content_spec.js
@@ -57,7 +57,11 @@ describe('Recent Searches Dropdown Content', () => {
beforeEach(() => {
createComponent({
- items: ['foo', 'author:@root label:~foo bar'],
+ items: [
+ 'foo',
+ 'author:@root label:~foo bar',
+ [{ type: 'author_username', value: { data: 'toby', operator: '=' } }],
+ ],
isLocalStorageAvailable: true,
});
});
@@ -76,7 +80,7 @@ describe('Recent Searches Dropdown Content', () => {
});
it('renders a correct amount of dropdown items', () => {
- expect(findDropdownItems()).toHaveLength(2);
+ expect(findDropdownItems()).toHaveLength(2); // Ignore non-string recent item
});
it('expect second dropdown to have 2 tokens', () => {
diff --git a/spec/frontend/filtered_search/filtered_search_manager_spec.js b/spec/frontend/filtered_search/filtered_search_manager_spec.js
index 70e8b339d4b..53c726a6cea 100644
--- a/spec/frontend/filtered_search/filtered_search_manager_spec.js
+++ b/spec/frontend/filtered_search/filtered_search_manager_spec.js
@@ -77,7 +77,7 @@ describe('Filtered Search Manager', () => {
jest.spyOn(FilteredSearchDropdownManager.prototype, 'setDropdown').mockImplementation();
});
- const initializeManager = () => {
+ const initializeManager = ({ useDefaultState } = {}) => {
jest.spyOn(FilteredSearchManager.prototype, 'loadSearchParamsFromURL').mockImplementation();
jest.spyOn(FilteredSearchManager.prototype, 'tokenChange').mockImplementation();
jest
@@ -88,7 +88,7 @@ describe('Filtered Search Manager', () => {
input = document.querySelector('.filtered-search');
tokensContainer = document.querySelector('.tokens-container');
- manager = new FilteredSearchManager({ page });
+ manager = new FilteredSearchManager({ page, useDefaultState });
manager.setup();
};
@@ -184,17 +184,27 @@ describe('Filtered Search Manager', () => {
});
describe('search', () => {
- const defaultParams = '?scope=all&utf8=%E2%9C%93&state=opened';
+ const defaultParams = '?scope=all&utf8=%E2%9C%93';
+ const defaultState = '&state=opened';
- beforeEach(() => {
+ it('should search with a single word', done => {
initializeManager();
+ input.value = 'searchTerm';
+
+ visitUrl.mockImplementation(url => {
+ expect(url).toEqual(`${defaultParams}&search=searchTerm`);
+ done();
+ });
+
+ manager.search();
});
- it('should search with a single word', done => {
+ it('sets default state', done => {
+ initializeManager({ useDefaultState: true });
input.value = 'searchTerm';
visitUrl.mockImplementation(url => {
- expect(url).toEqual(`${defaultParams}&search=searchTerm`);
+ expect(url).toEqual(`${defaultParams}${defaultState}&search=searchTerm`);
done();
});
@@ -202,6 +212,7 @@ describe('Filtered Search Manager', () => {
});
it('should search with multiple words', done => {
+ initializeManager();
input.value = 'awesome search terms';
visitUrl.mockImplementation(url => {
@@ -213,6 +224,7 @@ describe('Filtered Search Manager', () => {
});
it('should search with special characters', done => {
+ initializeManager();
input.value = '~!@#$%^&*()_+{}:<>,.?/';
visitUrl.mockImplementation(url => {
@@ -225,7 +237,29 @@ describe('Filtered Search Manager', () => {
manager.search();
});
+ it('should use replacement URL for condition', done => {
+ initializeManager();
+ tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML('milestone', '=', '13', true),
+ );
+
+ visitUrl.mockImplementation(url => {
+ expect(url).toEqual(`${defaultParams}&milestone_title=replaced`);
+ done();
+ });
+
+ manager.filteredSearchTokenKeys.conditions.push({
+ url: 'milestone_title=13',
+ replacementUrl: 'milestone_title=replaced',
+ tokenKey: 'milestone',
+ value: '13',
+ operator: '=',
+ });
+ manager.search();
+ });
+
it('removes duplicated tokens', done => {
+ initializeManager();
tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(`
${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', '~bug')}
${FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', '~bug')}
diff --git a/spec/frontend/filtered_search/filtered_search_visual_tokens_spec.js b/spec/frontend/filtered_search/filtered_search_visual_tokens_spec.js
index e59ee925cc7..6a00065c9fe 100644
--- a/spec/frontend/filtered_search/filtered_search_visual_tokens_spec.js
+++ b/spec/frontend/filtered_search/filtered_search_visual_tokens_spec.js
@@ -280,8 +280,8 @@ describe('Filtered Search Visual Tokens', () => {
);
});
- it('contains fa-close icon', () => {
- expect(tokenElement.querySelector('.remove-token .fa-close')).toEqual(expect.anything());
+ it('contains close icon', () => {
+ expect(tokenElement.querySelector('.remove-token .close-icon')).toEqual(expect.anything());
});
});
});
diff --git a/spec/frontend/filtered_search/services/recent_searches_service_spec.js b/spec/frontend/filtered_search/services/recent_searches_service_spec.js
index a89d38b7a20..afeca54b949 100644
--- a/spec/frontend/filtered_search/services/recent_searches_service_spec.js
+++ b/spec/frontend/filtered_search/services/recent_searches_service_spec.js
@@ -1,7 +1,7 @@
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import RecentSearchesService from '~/filtered_search/services/recent_searches_service';
import RecentSearchesServiceError from '~/filtered_search/services/recent_searches_service_error';
import AccessorUtilities from '~/lib/utils/accessor';
-import { useLocalStorageSpy } from 'helpers/local_storage_helper';
useLocalStorageSpy();
diff --git a/spec/frontend/filtered_search/visual_token_value_spec.js b/spec/frontend/filtered_search/visual_token_value_spec.js
index 3a64b688c7a..e2855b29b70 100644
--- a/spec/frontend/filtered_search/visual_token_value_spec.js
+++ b/spec/frontend/filtered_search/visual_token_value_spec.js
@@ -1,10 +1,10 @@
import { escape } from 'lodash';
+import { TEST_HOST } from 'jest/helpers/test_constants';
import VisualTokenValue from '~/filtered_search/visual_token_value';
import AjaxCache from '~/lib/utils/ajax_cache';
import UsersCache from '~/lib/utils/users_cache';
import DropdownUtils from '~/filtered_search//dropdown_utils';
import FilteredSearchSpecHelper from '../helpers/filtered_search_spec_helper';
-import { TEST_HOST } from 'jest/helpers/test_constants';
describe('Filtered Search Visual Tokens', () => {
const findElements = tokenElement => {
diff --git a/spec/frontend/fixtures/api_merge_requests.rb b/spec/frontend/fixtures/api_merge_requests.rb
new file mode 100644
index 00000000000..f3280e216ff
--- /dev/null
+++ b/spec/frontend/fixtures/api_merge_requests.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::MergeRequests, '(JavaScript fixtures)', type: :request do
+ include ApiHelpers
+ include JavaScriptFixturesHelpers
+
+ let(:admin) { create(:admin, name: 'root') }
+ let(:namespace) { create(:namespace, name: 'gitlab-test' )}
+ let(:project) { create(:project, :repository, namespace: namespace, path: 'lorem-ipsum') }
+
+ before(:all) do
+ clean_frontend_fixtures('api/merge_requests')
+ end
+
+ it 'api/merge_requests/get.json' do
+ 4.times { |i| create(:merge_request, source_project: project, source_branch: "branch-#{i}") }
+
+ get api("/projects/#{project.id}/merge_requests", admin)
+
+ expect(response).to be_successful
+ end
+end
diff --git a/spec/frontend/fixtures/api_projects.rb b/spec/frontend/fixtures/api_projects.rb
new file mode 100644
index 00000000000..fa77ca1c0cf
--- /dev/null
+++ b/spec/frontend/fixtures/api_projects.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Projects, '(JavaScript fixtures)', type: :request do
+ include ApiHelpers
+ include JavaScriptFixturesHelpers
+
+ let(:admin) { create(:admin, name: 'root') }
+ let(:namespace) { create(:namespace, name: 'gitlab-test' )}
+ let(:project) { create(:project, :repository, namespace: namespace, path: 'lorem-ipsum') }
+ let(:project_empty) { create(:project_empty_repo, namespace: namespace, path: 'lorem-ipsum-empty') }
+
+ before(:all) do
+ clean_frontend_fixtures('api/projects')
+ end
+
+ it 'api/projects/get.json' do
+ get api("/projects/#{project.id}", admin)
+
+ expect(response).to be_successful
+ end
+
+ it 'api/projects/get_empty.json' do
+ get api("/projects/#{project_empty.id}", admin)
+
+ expect(response).to be_successful
+ end
+
+ it 'api/projects/branches/get.json' do
+ get api("/projects/#{project.id}/repository/branches/#{project.default_branch}", admin)
+
+ expect(response).to be_successful
+ end
+end
diff --git a/spec/frontend/fixtures/freeze_period.rb b/spec/frontend/fixtures/freeze_period.rb
new file mode 100644
index 00000000000..7695dbc2e8f
--- /dev/null
+++ b/spec/frontend/fixtures/freeze_period.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Freeze Periods (JavaScript fixtures)' do
+ include JavaScriptFixturesHelpers
+ include Ci::PipelineSchedulesHelper
+
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:project) { create(:project, :repository, path: 'freeze-periods-project') }
+
+ before(:all) do
+ clean_frontend_fixtures('api/freeze-periods/')
+ end
+
+ after(:all) do
+ remove_repository(project)
+ end
+
+ describe API::FreezePeriods, '(JavaScript fixtures)', type: :request do
+ include ApiHelpers
+
+ it 'api/freeze-periods/freeze_periods.json' do
+ create(:ci_freeze_period, project: project, freeze_start: '5 4 * * *', freeze_end: '5 9 * 8 *', cron_timezone: 'America/New_York')
+ create(:ci_freeze_period, project: project, freeze_start: '0 12 * * 1-5', freeze_end: '0 1 5 * *', cron_timezone: 'Etc/UTC')
+ create(:ci_freeze_period, project: project, freeze_start: '0 12 * * 1-5', freeze_end: '0 16 * * 6', cron_timezone: 'Europe/Berlin')
+
+ get api("/projects/#{project.id}/freeze_periods", admin)
+
+ expect(response).to be_successful
+ end
+ end
+
+ describe Ci::PipelineSchedulesHelper, '(JavaScript fixtures)' do
+ let(:response) { timezone_data.to_json }
+
+ it 'api/freeze-periods/timezone_data.json' do
+ end
+ end
+end
diff --git a/spec/frontend/fixtures/merge_requests.rb b/spec/frontend/fixtures/merge_requests.rb
index 7801eb27ce8..6f281b26e6d 100644
--- a/spec/frontend/fixtures/merge_requests.rb
+++ b/spec/frontend/fixtures/merge_requests.rb
@@ -38,6 +38,7 @@ RSpec.describe Projects::MergeRequestsController, '(JavaScript fixtures)', type:
sha: merge_request.diff_head_sha
)
end
+
let(:path) { "files/ruby/popen.rb" }
let(:position) do
build(:text_diff_position, :added,
diff --git a/spec/frontend/fixtures/metrics_dashboard.rb b/spec/frontend/fixtures/metrics_dashboard.rb
index 6ee730f5c3d..eef79825ae7 100644
--- a/spec/frontend/fixtures/metrics_dashboard.rb
+++ b/spec/frontend/fixtures/metrics_dashboard.rb
@@ -8,7 +8,7 @@ RSpec.describe MetricsDashboard, '(JavaScript fixtures)', type: :controller do
let_it_be(:user) { create(:user) }
let_it_be(:namespace) { create(:namespace, name: 'monitoring' )}
- let_it_be(:project) { project_with_dashboard_namespace('.gitlab/dashboards/test.yml', namespace: namespace) }
+ let_it_be(:project) { project_with_dashboard_namespace('.gitlab/dashboards/test.yml', nil, namespace: namespace) }
let_it_be(:environment) { create(:environment, id: 1, project: project) }
let_it_be(:params) { { environment: environment } }
diff --git a/spec/frontend/fixtures/projects_json.rb b/spec/frontend/fixtures/projects_json.rb
new file mode 100644
index 00000000000..c081d4f08dc
--- /dev/null
+++ b/spec/frontend/fixtures/projects_json.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Projects JSON endpoints (JavaScript fixtures)', type: :controller do
+ include JavaScriptFixturesHelpers
+
+ let(:admin) { create(:admin, name: 'root') }
+ let(:project) { create(:project, :repository) }
+
+ before(:all) do
+ clean_frontend_fixtures('projects_json/')
+ end
+
+ before do
+ project.add_maintainer(admin)
+ sign_in(admin)
+ end
+
+ describe Projects::FindFileController, '(JavaScript fixtures)', type: :controller do
+ it 'projects_json/files.json' do
+ get :list,
+ params: {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: project.default_branch
+ },
+ format: 'json'
+
+ expect(response).to be_successful
+ end
+ end
+
+ describe Projects::CommitController, '(JavaScript fixtures)', type: :controller do
+ it 'projects_json/pipelines_empty.json' do
+ get :pipelines,
+ params: {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ id: project.commit(project.default_branch).id,
+ format: 'json'
+ }
+
+ expect(response).to be_successful
+ end
+ end
+end
diff --git a/spec/frontend/fixtures/test_report.rb b/spec/frontend/fixtures/test_report.rb
index 16496aa901b..3d09078ba68 100644
--- a/spec/frontend/fixtures/test_report.rb
+++ b/spec/frontend/fixtures/test_report.rb
@@ -15,7 +15,6 @@ RSpec.describe Projects::PipelinesController, "(JavaScript fixtures)", type: :co
before do
sign_in(user)
- stub_feature_flags(junit_pipeline_view: project)
end
it "pipelines/test_report.json" do
diff --git a/spec/frontend/flash_spec.js b/spec/frontend/flash_spec.js
index fa7c1904339..a37d57b03fd 100644
--- a/spec/frontend/flash_spec.js
+++ b/spec/frontend/flash_spec.js
@@ -1,4 +1,10 @@
-import flash, { createFlashEl, createAction, hideFlash, removeFlashClickListener } from '~/flash';
+import createFlash, {
+ deprecatedCreateFlash,
+ createFlashEl,
+ createAction,
+ hideFlash,
+ removeFlashClickListener,
+} from '~/flash';
describe('Flash', () => {
describe('createFlashEl', () => {
@@ -119,10 +125,10 @@ describe('Flash', () => {
});
});
- describe('createFlash', () => {
+ describe('deprecatedCreateFlash', () => {
describe('no flash-container', () => {
it('does not add to the DOM', () => {
- const flashEl = flash('testing');
+ const flashEl = deprecatedCreateFlash('testing');
expect(flashEl).toBeNull();
@@ -144,7 +150,7 @@ describe('Flash', () => {
});
it('adds flash element into container', () => {
- flash('test', 'alert', document, null, false, true);
+ deprecatedCreateFlash('test', 'alert', document, null, false, true);
expect(document.querySelector('.flash-alert')).not.toBeNull();
@@ -152,26 +158,26 @@ describe('Flash', () => {
});
it('adds flash into specified parent', () => {
- flash('test', 'alert', document.querySelector('.content-wrapper'));
+ deprecatedCreateFlash('test', 'alert', document.querySelector('.content-wrapper'));
expect(document.querySelector('.content-wrapper .flash-alert')).not.toBeNull();
});
it('adds container classes when inside content-wrapper', () => {
- flash('test');
+ deprecatedCreateFlash('test');
expect(document.querySelector('.flash-text').className).toBe('flash-text');
});
it('does not add container when outside of content-wrapper', () => {
document.querySelector('.content-wrapper').className = 'js-content-wrapper';
- flash('test');
+ deprecatedCreateFlash('test');
expect(document.querySelector('.flash-text').className.trim()).toContain('flash-text');
});
it('removes element after clicking', () => {
- flash('test', 'alert', document, null, false, true);
+ deprecatedCreateFlash('test', 'alert', document, null, false, true);
document.querySelector('.flash-alert .js-close-icon').click();
@@ -182,8 +188,111 @@ describe('Flash', () => {
describe('with actionConfig', () => {
it('adds action link', () => {
- flash('test', 'alert', document, {
+ deprecatedCreateFlash('test', 'alert', document, {
+ title: 'test',
+ });
+
+ expect(document.querySelector('.flash-action')).not.toBeNull();
+ });
+
+ it('calls actionConfig clickHandler on click', () => {
+ const actionConfig = {
title: 'test',
+ clickHandler: jest.fn(),
+ };
+
+ deprecatedCreateFlash('test', 'alert', document, actionConfig);
+
+ document.querySelector('.flash-action').click();
+
+ expect(actionConfig.clickHandler).toHaveBeenCalled();
+ });
+ });
+ });
+ });
+
+ describe('createFlash', () => {
+ const message = 'test';
+ const type = 'alert';
+ const parent = document;
+ const fadeTransition = false;
+ const addBodyClass = true;
+ const defaultParams = {
+ message,
+ type,
+ parent,
+ actionConfig: null,
+ fadeTransition,
+ addBodyClass,
+ };
+
+ describe('no flash-container', () => {
+ it('does not add to the DOM', () => {
+ const flashEl = createFlash({ message });
+
+ expect(flashEl).toBeNull();
+
+ expect(document.querySelector('.flash-alert')).toBeNull();
+ });
+ });
+
+ describe('with flash-container', () => {
+ beforeEach(() => {
+ setFixtures(
+ '<div class="content-wrapper js-content-wrapper"><div class="flash-container"></div></div>',
+ );
+ });
+
+ afterEach(() => {
+ document.querySelector('.js-content-wrapper').remove();
+ });
+
+ it('adds flash element into container', () => {
+ createFlash({ ...defaultParams });
+
+ expect(document.querySelector('.flash-alert')).not.toBeNull();
+
+ expect(document.body.className).toContain('flash-shown');
+ });
+
+ it('adds flash into specified parent', () => {
+ createFlash({ ...defaultParams, parent: document.querySelector('.content-wrapper') });
+
+ expect(document.querySelector('.content-wrapper .flash-alert')).not.toBeNull();
+ expect(document.querySelector('.content-wrapper').innerText.trim()).toEqual(message);
+ });
+
+ it('adds container classes when inside content-wrapper', () => {
+ createFlash(defaultParams);
+
+ expect(document.querySelector('.flash-text').className).toBe('flash-text');
+ expect(document.querySelector('.content-wrapper').innerText.trim()).toEqual(message);
+ });
+
+ it('does not add container when outside of content-wrapper', () => {
+ document.querySelector('.content-wrapper').className = 'js-content-wrapper';
+ createFlash(defaultParams);
+
+ expect(document.querySelector('.flash-text').className.trim()).toContain('flash-text');
+ });
+
+ it('removes element after clicking', () => {
+ createFlash({ ...defaultParams });
+
+ document.querySelector('.flash-alert .js-close-icon').click();
+
+ expect(document.querySelector('.flash-alert')).toBeNull();
+
+ expect(document.body.className).not.toContain('flash-shown');
+ });
+
+ describe('with actionConfig', () => {
+ it('adds action link', () => {
+ createFlash({
+ ...defaultParams,
+ actionConfig: {
+ title: 'test',
+ },
});
expect(document.querySelector('.flash-action')).not.toBeNull();
@@ -195,7 +304,7 @@ describe('Flash', () => {
clickHandler: jest.fn(),
};
- flash('test', 'alert', document, actionConfig);
+ createFlash({ ...defaultParams, actionConfig });
document.querySelector('.flash-action').click();
diff --git a/spec/frontend/frequent_items/components/app_spec.js b/spec/frontend/frequent_items/components/app_spec.js
index 7c54a48aa41..b4f36b82385 100644
--- a/spec/frontend/frequent_items/components/app_spec.js
+++ b/spec/frontend/frequent_items/components/app_spec.js
@@ -1,6 +1,8 @@
import MockAdapter from 'axios-mock-adapter';
import Vue from 'vue';
import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
+import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import appComponent from '~/frequent_items/components/app.vue';
import eventHub from '~/frequent_items/event_hub';
@@ -8,8 +10,6 @@ import store from '~/frequent_items/store';
import { FREQUENT_ITEMS, HOUR_IN_MS } from '~/frequent_items/constants';
import { getTopFrequentItems } from '~/frequent_items/utils';
import { currentSession, mockFrequentProjects, mockSearchedProjects } from '../mock_data';
-import { useLocalStorageSpy } from 'helpers/local_storage_helper';
-import waitForPromises from 'helpers/wait_for_promises';
useLocalStorageSpy();
diff --git a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
index 1595f6c9fff..0e16b726c4b 100644
--- a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
+++ b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
@@ -12,17 +12,19 @@ exports[`grafana integration component default state to match the default snapsh
class="js-section-header h4"
>
- Grafana Authentication
+ Grafana authentication
</h3>
- <gl-deprecated-button-stub
+ <gl-button-stub
+ category="primary"
class="js-settings-toggle"
- size="md"
- variant="secondary"
+ icon=""
+ size="medium"
+ variant="default"
>
Expand
- </gl-deprecated-button-stub>
+ </gl-button-stub>
<p
class="js-section-sub-header"
@@ -90,14 +92,20 @@ exports[`grafana integration component default state to match the default snapsh
</p>
</gl-form-group-stub>
- <gl-deprecated-button-stub
- size="md"
- variant="success"
+ <div
+ class="gl-display-flex gl-justify-content-end"
>
+ <gl-button-stub
+ category="primary"
+ icon=""
+ size="medium"
+ variant="success"
+ >
+
+ Save Changes
- Save Changes
-
- </gl-deprecated-button-stub>
+ </gl-button-stub>
+ </div>
</form>
</div>
</section>
diff --git a/spec/frontend/grafana_integration/components/grafana_integration_spec.js b/spec/frontend/grafana_integration/components/grafana_integration_spec.js
index 3df200a98e4..df88a336c09 100644
--- a/spec/frontend/grafana_integration/components/grafana_integration_spec.js
+++ b/spec/frontend/grafana_integration/components/grafana_integration_spec.js
@@ -1,11 +1,11 @@
import { mount, shallowMount } from '@vue/test-utils';
-import { GlDeprecatedButton } from '@gitlab/ui';
+import { GlButton } from '@gitlab/ui';
import { TEST_HOST } from 'helpers/test_constants';
import GrafanaIntegration from '~/grafana_integration/components/grafana_integration.vue';
import { createStore } from '~/grafana_integration/store';
import axios from '~/lib/utils/axios_utils';
import { refreshCurrentPage } from '~/lib/utils/url_utility';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
jest.mock('~/lib/utils/url_utility');
jest.mock('~/flash');
@@ -44,14 +44,14 @@ describe('grafana integration component', () => {
it('renders header text', () => {
wrapper = shallowMount(GrafanaIntegration, { store });
- expect(wrapper.find('.js-section-header').text()).toBe('Grafana Authentication');
+ expect(wrapper.find('.js-section-header').text()).toBe('Grafana authentication');
});
describe('expand/collapse button', () => {
it('renders as an expand button by default', () => {
wrapper = shallowMount(GrafanaIntegration, { store });
- const button = wrapper.find(GlDeprecatedButton);
+ const button = wrapper.find(GlButton);
expect(button.text()).toBe('Expand');
});
@@ -77,8 +77,7 @@ describe('grafana integration component', () => {
});
describe('submit button', () => {
- const findSubmitButton = () =>
- wrapper.find('.settings-content form').find(GlDeprecatedButton);
+ const findSubmitButton = () => wrapper.find('.settings-content form').find(GlButton);
const endpointRequest = [
operationsSettingsEndpoint,
diff --git a/spec/frontend/groups/components/app_spec.js b/spec/frontend/groups/components/app_spec.js
index 35eda21e047..5d34bc48ed5 100644
--- a/spec/frontend/groups/components/app_spec.js
+++ b/spec/frontend/groups/components/app_spec.js
@@ -2,8 +2,8 @@ import '~/flash';
import $ from 'jquery';
import Vue from 'vue';
import AxiosMockAdapter from 'axios-mock-adapter';
-import axios from '~/lib/utils/axios_utils';
import waitForPromises from 'helpers/wait_for_promises';
+import axios from '~/lib/utils/axios_utils';
import appComponent from '~/groups/components/app.vue';
import groupFolderComponent from '~/groups/components/group_folder.vue';
import groupItemComponent from '~/groups/components/group_item.vue';
diff --git a/spec/frontend/header_spec.js b/spec/frontend/header_spec.js
index 467d9678f69..59a8ca2ed23 100644
--- a/spec/frontend/header_spec.js
+++ b/spec/frontend/header_spec.js
@@ -1,6 +1,6 @@
import $ from 'jquery';
-import initTodoToggle, { initNavUserDropdownTracking } from '~/header';
import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
+import initTodoToggle, { initNavUserDropdownTracking } from '~/header';
describe('Header', () => {
describe('Todos notification', () => {
diff --git a/spec/frontend/helpers/backoff_helper.js b/spec/frontend/helpers/backoff_helper.js
new file mode 100644
index 00000000000..e5c0308d3fb
--- /dev/null
+++ b/spec/frontend/helpers/backoff_helper.js
@@ -0,0 +1,33 @@
+/**
+ * A mock version of a commonUtils `backOff` to test multiple
+ * retries.
+ *
+ * Usage:
+ *
+ * ```
+ * import * as commonUtils from '~/lib/utils/common_utils';
+ * import { backoffMockImplementation } from '../../helpers/backoff_helper';
+ *
+ * beforeEach(() => {
+ * // ...
+ * jest.spyOn(commonUtils, 'backOff').mockImplementation(backoffMockImplementation);
+ * });
+ * ```
+ *
+ * @param {Function} callback
+ */
+export const backoffMockImplementation = callback => {
+ const q = new Promise((resolve, reject) => {
+ const stop = arg => (arg instanceof Error ? reject(arg) : resolve(arg));
+ const next = () => callback(next, stop);
+ // Define a timeout based on a mock timer
+ setTimeout(() => {
+ callback(next, stop);
+ });
+ });
+ // Run all resolved promises in chain
+ jest.runOnlyPendingTimers();
+ return q;
+};
+
+export default { backoffMockImplementation };
diff --git a/spec/frontend/helpers/dom_events_helper.js b/spec/frontend/helpers/dom_events_helper.js
index b66c12daf4f..139e0813397 100644
--- a/spec/frontend/helpers/dom_events_helper.js
+++ b/spec/frontend/helpers/dom_events_helper.js
@@ -1,3 +1,4 @@
+// eslint-disable-next-line import/prefer-default-export
export const triggerDOMEvent = type => {
window.document.dispatchEvent(
new Event(type, {
@@ -6,5 +7,3 @@ export const triggerDOMEvent = type => {
}),
);
};
-
-export default () => {};
diff --git a/spec/frontend/helpers/dom_shims/index.js b/spec/frontend/helpers/dom_shims/index.js
index d18bb94c107..2ba5701fc77 100644
--- a/spec/frontend/helpers/dom_shims/index.js
+++ b/spec/frontend/helpers/dom_shims/index.js
@@ -4,7 +4,7 @@ import './element_scroll_to';
import './form_element';
import './get_client_rects';
import './inner_text';
-import './mutation_observer';
+import './range';
import './window_scroll_to';
import './scroll_by';
import './size_properties';
diff --git a/spec/frontend/helpers/dom_shims/mutation_observer.js b/spec/frontend/helpers/dom_shims/mutation_observer.js
deleted file mode 100644
index 68c494f19ea..00000000000
--- a/spec/frontend/helpers/dom_shims/mutation_observer.js
+++ /dev/null
@@ -1,7 +0,0 @@
-/* eslint-disable class-methods-use-this */
-class MutationObserverStub {
- disconnect() {}
- observe() {}
-}
-
-global.MutationObserver = MutationObserverStub;
diff --git a/spec/frontend/helpers/dom_shims/range.js b/spec/frontend/helpers/dom_shims/range.js
new file mode 100644
index 00000000000..4ffdf3280ad
--- /dev/null
+++ b/spec/frontend/helpers/dom_shims/range.js
@@ -0,0 +1,13 @@
+if (window.Range.prototype.getBoundingClientRect) {
+ throw new Error('window.Range.prototype.getBoundingClientRect already exists. Remove this stub!');
+}
+window.Range.prototype.getBoundingClientRect = function getBoundingClientRect() {
+ return { x: 0, y: 0, width: 0, height: 0, top: 0, right: 0, bottom: 0, left: 0 };
+};
+
+if (window.Range.prototype.getClientRects) {
+ throw new Error('window.Range.prototype.getClientRects already exists. Remove this stub!');
+}
+window.Range.prototype.getClientRects = function getClientRects() {
+ return [this.getBoundingClientRect()];
+};
diff --git a/spec/frontend/helpers/filtered_search_spec_helper.js b/spec/frontend/helpers/filtered_search_spec_helper.js
index ceb7982bbc3..ecf10694a16 100644
--- a/spec/frontend/helpers/filtered_search_spec_helper.js
+++ b/spec/frontend/helpers/filtered_search_spec_helper.js
@@ -15,7 +15,7 @@ export default class FilteredSearchSpecHelper {
<div class="value-container">
<div class="value">${value}</div>
<div class="remove-token" role="button">
- <i class="fa fa-close"></i>
+ <svg class="s16 close-icon"></svg>
</div>
</div>
</div>
diff --git a/spec/frontend/helpers/init_vue_mr_page_helper.js b/spec/frontend/helpers/init_vue_mr_page_helper.js
index c1d608cc5a0..b9aed63d0f6 100644
--- a/spec/frontend/helpers/init_vue_mr_page_helper.js
+++ b/spec/frontend/helpers/init_vue_mr_page_helper.js
@@ -22,6 +22,7 @@ export default function initVueMRPage() {
mrDiscussionsEl.setAttribute('data-noteable-data', JSON.stringify(noteableDataMock));
mrDiscussionsEl.setAttribute('data-notes-data', JSON.stringify(notesDataMock));
mrDiscussionsEl.setAttribute('data-noteable-type', 'merge-request');
+ mrDiscussionsEl.setAttribute('data-is-locked', 'false');
mrTestEl.appendChild(mrDiscussionsEl);
const discussionCounterEl = document.createElement('div');
diff --git a/spec/frontend/helpers/monitor_helper_spec.js b/spec/frontend/helpers/monitor_helper_spec.js
index 083b6404125..219b05e312b 100644
--- a/spec/frontend/helpers/monitor_helper_spec.js
+++ b/spec/frontend/helpers/monitor_helper_spec.js
@@ -1,12 +1,38 @@
-import * as monitorHelper from '~/helpers/monitor_helper';
+import { getSeriesLabel, makeDataSeries } from '~/helpers/monitor_helper';
describe('monitor helper', () => {
const defaultConfig = { default: true, name: 'default name' };
const name = 'data name';
const series = [[1, 1], [2, 2], [3, 3]];
- const data = ({ metric = { default_name: name }, values = series } = {}) => [{ metric, values }];
+
+ describe('getSeriesLabel', () => {
+ const metricAttributes = { __name__: 'up', app: 'prometheus' };
+
+ it('gets a single attribute label', () => {
+ expect(getSeriesLabel('app', metricAttributes)).toBe('app: prometheus');
+ });
+
+ it('gets a templated label', () => {
+ expect(getSeriesLabel('{{__name__}}', metricAttributes)).toBe('up');
+ expect(getSeriesLabel('{{app}}', metricAttributes)).toBe('prometheus');
+ expect(getSeriesLabel('{{missing}}', metricAttributes)).toBe('{{missing}}');
+ });
+
+ it('gets a multiple label', () => {
+ expect(getSeriesLabel(null, metricAttributes)).toBe('__name__: up, app: prometheus');
+ expect(getSeriesLabel('', metricAttributes)).toBe('__name__: up, app: prometheus');
+ });
+
+ it('gets a simple label', () => {
+ expect(getSeriesLabel('A label', {})).toBe('A label');
+ });
+ });
describe('makeDataSeries', () => {
+ const data = ({ metric = { default_name: name }, values = series } = {}) => [
+ { metric, values },
+ ];
+
const expectedDataSeries = [
{
...defaultConfig,
@@ -15,19 +41,17 @@ describe('monitor helper', () => {
];
it('converts query results to data series', () => {
- expect(monitorHelper.makeDataSeries(data({ metric: {} }), defaultConfig)).toEqual(
- expectedDataSeries,
- );
+ expect(makeDataSeries(data({ metric: {} }), defaultConfig)).toEqual(expectedDataSeries);
});
it('returns an empty array if no query results exist', () => {
- expect(monitorHelper.makeDataSeries([], defaultConfig)).toEqual([]);
+ expect(makeDataSeries([], defaultConfig)).toEqual([]);
});
it('handles multi-series query results', () => {
const expectedData = { ...expectedDataSeries[0], name: 'default name: data name' };
- expect(monitorHelper.makeDataSeries([...data(), ...data()], defaultConfig)).toEqual([
+ expect(makeDataSeries([...data(), ...data()], defaultConfig)).toEqual([
expectedData,
expectedData,
]);
@@ -39,10 +63,7 @@ describe('monitor helper', () => {
name: '{{cmd}}',
};
- const [result] = monitorHelper.makeDataSeries(
- [{ metric: { cmd: 'brpop' }, values: series }],
- config,
- );
+ const [result] = makeDataSeries([{ metric: { cmd: 'brpop' }, values: series }], config);
expect(result.name).toEqual('brpop');
});
@@ -53,7 +74,7 @@ describe('monitor helper', () => {
name: '',
};
- const [result] = monitorHelper.makeDataSeries(
+ const [result] = makeDataSeries(
[
{
metric: {
@@ -79,7 +100,7 @@ describe('monitor helper', () => {
name: 'backend: {{ backend }}',
};
- const [result] = monitorHelper.makeDataSeries(
+ const [result] = makeDataSeries(
[{ metric: { backend: 'HA Server' }, values: series }],
config,
);
@@ -90,10 +111,7 @@ describe('monitor helper', () => {
it('supports repeated template variables', () => {
const config = { ...defaultConfig, name: '{{cmd}}, {{cmd}}' };
- const [result] = monitorHelper.makeDataSeries(
- [{ metric: { cmd: 'brpop' }, values: series }],
- config,
- );
+ const [result] = makeDataSeries([{ metric: { cmd: 'brpop' }, values: series }], config);
expect(result.name).toEqual('brpop, brpop');
});
@@ -101,7 +119,7 @@ describe('monitor helper', () => {
it('supports hyphenated template variables', () => {
const config = { ...defaultConfig, name: 'expired - {{ test-attribute }}' };
- const [result] = monitorHelper.makeDataSeries(
+ const [result] = makeDataSeries(
[{ metric: { 'test-attribute': 'test-attribute-value' }, values: series }],
config,
);
@@ -115,7 +133,7 @@ describe('monitor helper', () => {
name: '{{job}}: {{cmd}}',
};
- const [result] = monitorHelper.makeDataSeries(
+ const [result] = makeDataSeries(
[{ metric: { cmd: 'brpop', job: 'redis' }, values: series }],
config,
);
@@ -129,7 +147,7 @@ describe('monitor helper', () => {
name: '{{cmd}}',
};
- const [firstSeries, secondSeries] = monitorHelper.makeDataSeries(
+ const [firstSeries, secondSeries] = makeDataSeries(
[
{ metric: { cmd: 'brpop' }, values: series },
{ metric: { cmd: 'zrangebyscore' }, values: series },
diff --git a/spec/frontend/ide/components/activity_bar_spec.js b/spec/frontend/ide/components/activity_bar_spec.js
index 8b3853d4535..762f3c5dad1 100644
--- a/spec/frontend/ide/components/activity_bar_spec.js
+++ b/spec/frontend/ide/components/activity_bar_spec.js
@@ -1,15 +1,17 @@
import Vue from 'vue';
-import store from '~/ide/stores';
+import { createStore } from '~/ide/stores';
import { leftSidebarViews } from '~/ide/constants';
import ActivityBar from '~/ide/components/activity_bar.vue';
import { createComponentWithStore } from '../../helpers/vue_mount_component_helper';
-import { resetStore } from '../helpers';
describe('IDE activity bar', () => {
const Component = Vue.extend(ActivityBar);
let vm;
+ let store;
beforeEach(() => {
+ store = createStore();
+
Vue.set(store.state.projects, 'abcproject', {
web_url: 'testing',
});
@@ -20,8 +22,6 @@ describe('IDE activity bar', () => {
afterEach(() => {
vm.$destroy();
-
- resetStore(vm.$store);
});
describe('updateActivityBarView', () => {
diff --git a/spec/frontend/ide/components/commit_sidebar/empty_state_spec.js b/spec/frontend/ide/components/commit_sidebar/empty_state_spec.js
index 16d0b354a30..dbb43e43c19 100644
--- a/spec/frontend/ide/components/commit_sidebar/empty_state_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/empty_state_spec.js
@@ -1,13 +1,15 @@
import Vue from 'vue';
-import store from '~/ide/stores';
+import { createStore } from '~/ide/stores';
import emptyState from '~/ide/components/commit_sidebar/empty_state.vue';
import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
-import { resetStore } from '../../helpers';
describe('IDE commit panel empty state', () => {
let vm;
+ let store;
beforeEach(() => {
+ store = createStore();
+
const Component = Vue.extend(emptyState);
Vue.set(store.state, 'noChangesStateSvgPath', 'no-changes');
@@ -19,8 +21,6 @@ describe('IDE commit panel empty state', () => {
afterEach(() => {
vm.$destroy();
-
- resetStore(vm.$store);
});
it('renders no changes text when last commit message is empty', () => {
diff --git a/spec/frontend/ide/components/commit_sidebar/form_spec.js b/spec/frontend/ide/components/commit_sidebar/form_spec.js
index c62df4a3795..9245cefc183 100644
--- a/spec/frontend/ide/components/commit_sidebar/form_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/form_spec.js
@@ -1,19 +1,20 @@
import Vue from 'vue';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
import { projectData } from 'jest/ide/mock_data';
-import store from '~/ide/stores';
+import waitForPromises from 'helpers/wait_for_promises';
+import { createStore } from '~/ide/stores';
import CommitForm from '~/ide/components/commit_sidebar/form.vue';
import { leftSidebarViews } from '~/ide/constants';
-import { resetStore } from '../../helpers';
-import waitForPromises from 'helpers/wait_for_promises';
describe('IDE commit form', () => {
const Component = Vue.extend(CommitForm);
let vm;
+ let store;
const beginCommitButton = () => vm.$el.querySelector('[data-testid="begin-commit-button"]');
beforeEach(() => {
+ store = createStore();
store.state.changedFiles.push('test');
store.state.currentProjectId = 'abcproject';
store.state.currentBranchId = 'master';
@@ -24,8 +25,6 @@ describe('IDE commit form', () => {
afterEach(() => {
vm.$destroy();
-
- resetStore(vm.$store);
});
it('enables begin commit button when there are changes', () => {
diff --git a/spec/frontend/ide/components/commit_sidebar/list_collapsed_spec.js b/spec/frontend/ide/components/commit_sidebar/list_collapsed_spec.js
index 45372d18965..42e0a20bc7b 100644
--- a/spec/frontend/ide/components/commit_sidebar/list_collapsed_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/list_collapsed_spec.js
@@ -1,14 +1,17 @@
import Vue from 'vue';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
-import store from '~/ide/stores';
+import { createStore } from '~/ide/stores';
import listCollapsed from '~/ide/components/commit_sidebar/list_collapsed.vue';
import { file } from '../../helpers';
import { removeWhitespace } from '../../../helpers/text_helper';
describe('Multi-file editor commit sidebar list collapsed', () => {
let vm;
+ let store;
beforeEach(() => {
+ store = createStore();
+
const Component = Vue.extend(listCollapsed);
vm = createComponentWithStore(Component, store, {
diff --git a/spec/frontend/ide/components/commit_sidebar/list_spec.js b/spec/frontend/ide/components/commit_sidebar/list_spec.js
index 2b5664ffc4e..2107ff96e95 100644
--- a/spec/frontend/ide/components/commit_sidebar/list_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/list_spec.js
@@ -1,13 +1,16 @@
import Vue from 'vue';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
-import store from '~/ide/stores';
+import { createStore } from '~/ide/stores';
import commitSidebarList from '~/ide/components/commit_sidebar/list.vue';
-import { file, resetStore } from '../../helpers';
+import { file } from '../../helpers';
describe('Multi-file editor commit sidebar list', () => {
+ let store;
let vm;
beforeEach(() => {
+ store = createStore();
+
const Component = Vue.extend(commitSidebarList);
vm = createComponentWithStore(Component, store, {
@@ -26,8 +29,6 @@ describe('Multi-file editor commit sidebar list', () => {
afterEach(() => {
vm.$destroy();
-
- resetStore(vm.$store);
});
describe('with a list of files', () => {
diff --git a/spec/frontend/ide/components/commit_sidebar/radio_group_spec.js b/spec/frontend/ide/components/commit_sidebar/radio_group_spec.js
index ac80ba58056..bf61f4bbe77 100644
--- a/spec/frontend/ide/components/commit_sidebar/radio_group_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/radio_group_spec.js
@@ -1,13 +1,15 @@
import Vue from 'vue';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
-import { resetStore } from 'jest/ide/helpers';
-import store from '~/ide/stores';
+import { createStore } from '~/ide/stores';
import radioGroup from '~/ide/components/commit_sidebar/radio_group.vue';
describe('IDE commit sidebar radio group', () => {
let vm;
+ let store;
beforeEach(done => {
+ store = createStore();
+
const Component = Vue.extend(radioGroup);
store.state.commit.commitAction = '2';
@@ -25,8 +27,6 @@ describe('IDE commit sidebar radio group', () => {
afterEach(() => {
vm.$destroy();
-
- resetStore(vm.$store);
});
it('uses label if present', () => {
diff --git a/spec/frontend/ide/components/commit_sidebar/success_message_spec.js b/spec/frontend/ide/components/commit_sidebar/success_message_spec.js
index e1a432b81be..db13c90fbb9 100644
--- a/spec/frontend/ide/components/commit_sidebar/success_message_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/success_message_spec.js
@@ -1,13 +1,15 @@
import Vue from 'vue';
-import store from '~/ide/stores';
+import { createStore } from '~/ide/stores';
import successMessage from '~/ide/components/commit_sidebar/success_message.vue';
import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
-import { resetStore } from '../../helpers';
describe('IDE commit panel successful commit state', () => {
let vm;
+ let store;
beforeEach(() => {
+ store = createStore();
+
const Component = Vue.extend(successMessage);
vm = createComponentWithStore(Component, store, {
@@ -19,8 +21,6 @@ describe('IDE commit panel successful commit state', () => {
afterEach(() => {
vm.$destroy();
-
- resetStore(vm.$store);
});
it('renders last commit message when it exists', done => {
diff --git a/spec/frontend/ide/components/file_row_extra_spec.js b/spec/frontend/ide/components/file_row_extra_spec.js
index e78bacadebb..4bd27d23f76 100644
--- a/spec/frontend/ide/components/file_row_extra_spec.js
+++ b/spec/frontend/ide/components/file_row_extra_spec.js
@@ -2,7 +2,7 @@ import Vue from 'vue';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
import { createStore } from '~/ide/stores';
import FileRowExtra from '~/ide/components/file_row_extra.vue';
-import { file, resetStore } from '../helpers';
+import { file } from '../helpers';
describe('IDE extra file row component', () => {
let Component;
@@ -32,7 +32,6 @@ describe('IDE extra file row component', () => {
afterEach(() => {
vm.$destroy();
- resetStore(vm.$store);
stagedFilesCount = 0;
unstagedFilesCount = 0;
diff --git a/spec/frontend/ide/components/file_templates/bar_spec.js b/spec/frontend/ide/components/file_templates/bar_spec.js
index 21dbe18a223..5a33837fb14 100644
--- a/spec/frontend/ide/components/file_templates/bar_spec.js
+++ b/spec/frontend/ide/components/file_templates/bar_spec.js
@@ -2,7 +2,7 @@ import Vue from 'vue';
import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
import { createStore } from '~/ide/stores';
import Bar from '~/ide/components/file_templates/bar.vue';
-import { resetStore, file } from '../../helpers';
+import { file } from '../../helpers';
describe('IDE file templates bar component', () => {
let Component;
@@ -26,7 +26,6 @@ describe('IDE file templates bar component', () => {
afterEach(() => {
vm.$destroy();
- resetStore(vm.$store);
});
describe('template type dropdown', () => {
diff --git a/spec/frontend/ide/components/ide_review_spec.js b/spec/frontend/ide/components/ide_review_spec.js
index b56957e1f6d..c9ac2ac423d 100644
--- a/spec/frontend/ide/components/ide_review_spec.js
+++ b/spec/frontend/ide/components/ide_review_spec.js
@@ -3,7 +3,7 @@ import IdeReview from '~/ide/components/ide_review.vue';
import { createStore } from '~/ide/stores';
import { createComponentWithStore } from '../../helpers/vue_mount_component_helper';
import { trimText } from '../../helpers/text_helper';
-import { resetStore, file } from '../helpers';
+import { file } from '../helpers';
import { projectData } from '../mock_data';
describe('IDE review mode', () => {
@@ -26,8 +26,6 @@ describe('IDE review mode', () => {
afterEach(() => {
vm.$destroy();
-
- resetStore(vm.$store);
});
it('renders list of files', () => {
diff --git a/spec/frontend/ide/components/ide_side_bar_spec.js b/spec/frontend/ide/components/ide_side_bar_spec.js
index 65cad2e7eb0..67257b40879 100644
--- a/spec/frontend/ide/components/ide_side_bar_spec.js
+++ b/spec/frontend/ide/components/ide_side_bar_spec.js
@@ -1,15 +1,17 @@
import Vue from 'vue';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
-import store from '~/ide/stores';
+import { createStore } from '~/ide/stores';
import ideSidebar from '~/ide/components/ide_side_bar.vue';
import { leftSidebarViews } from '~/ide/constants';
-import { resetStore } from '../helpers';
import { projectData } from '../mock_data';
describe('IdeSidebar', () => {
let vm;
+ let store;
beforeEach(() => {
+ store = createStore();
+
const Component = Vue.extend(ideSidebar);
store.state.currentProjectId = 'abcproject';
@@ -20,8 +22,6 @@ describe('IdeSidebar', () => {
afterEach(() => {
vm.$destroy();
-
- resetStore(vm.$store);
});
it('renders a sidebar', () => {
diff --git a/spec/frontend/ide/components/ide_spec.js b/spec/frontend/ide/components/ide_spec.js
index efc1d984dec..a7b07a9f0e2 100644
--- a/spec/frontend/ide/components/ide_spec.js
+++ b/spec/frontend/ide/components/ide_spec.js
@@ -2,7 +2,7 @@ import Vue from 'vue';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
import { createStore } from '~/ide/stores';
import ide from '~/ide/components/ide.vue';
-import { file, resetStore } from '../helpers';
+import { file } from '../helpers';
import { projectData } from '../mock_data';
import extendStore from '~/ide/stores/extend';
@@ -41,8 +41,6 @@ describe('ide component, empty repo', () => {
afterEach(() => {
vm.$destroy();
-
- resetStore(vm.$store);
});
it('renders "New file" button in empty repo', done => {
@@ -63,8 +61,6 @@ describe('ide component, non-empty repo', () => {
afterEach(() => {
vm.$destroy();
-
- resetStore(vm.$store);
});
it('shows error message when set', done => {
diff --git a/spec/frontend/ide/components/ide_tree_list_spec.js b/spec/frontend/ide/components/ide_tree_list_spec.js
index 30f11db3153..4593ef6049b 100644
--- a/spec/frontend/ide/components/ide_tree_list_spec.js
+++ b/spec/frontend/ide/components/ide_tree_list_spec.js
@@ -1,8 +1,8 @@
import Vue from 'vue';
import IdeTreeList from '~/ide/components/ide_tree_list.vue';
-import store from '~/ide/stores';
+import { createStore } from '~/ide/stores';
import { createComponentWithStore } from '../../helpers/vue_mount_component_helper';
-import { resetStore, file } from '../helpers';
+import { file } from '../helpers';
import { projectData } from '../mock_data';
describe('IDE tree list', () => {
@@ -10,6 +10,7 @@ describe('IDE tree list', () => {
const normalBranchTree = [file('fileName')];
const emptyBranchTree = [];
let vm;
+ let store;
const bootstrapWithTree = (tree = normalBranchTree) => {
store.state.currentProjectId = 'abcproject';
@@ -25,10 +26,12 @@ describe('IDE tree list', () => {
});
};
+ beforeEach(() => {
+ store = createStore();
+ });
+
afterEach(() => {
vm.$destroy();
-
- resetStore(vm.$store);
});
describe('normal branch', () => {
diff --git a/spec/frontend/ide/components/ide_tree_spec.js b/spec/frontend/ide/components/ide_tree_spec.js
index 01f007f09c3..899daa0bf57 100644
--- a/spec/frontend/ide/components/ide_tree_spec.js
+++ b/spec/frontend/ide/components/ide_tree_spec.js
@@ -1,14 +1,17 @@
import Vue from 'vue';
import IdeTree from '~/ide/components/ide_tree.vue';
-import store from '~/ide/stores';
+import { createStore } from '~/ide/stores';
import { createComponentWithStore } from '../../helpers/vue_mount_component_helper';
-import { resetStore, file } from '../helpers';
+import { file } from '../helpers';
import { projectData } from '../mock_data';
describe('IdeRepoTree', () => {
+ let store;
let vm;
beforeEach(() => {
+ store = createStore();
+
const IdeRepoTree = Vue.extend(IdeTree);
store.state.currentProjectId = 'abcproject';
@@ -24,8 +27,6 @@ describe('IdeRepoTree', () => {
afterEach(() => {
vm.$destroy();
-
- resetStore(vm.$store);
});
it('renders list of files', () => {
diff --git a/spec/frontend/ide/components/jobs/detail_spec.js b/spec/frontend/ide/components/jobs/detail_spec.js
index 8f3815d5aab..acd30dee718 100644
--- a/spec/frontend/ide/components/jobs/detail_spec.js
+++ b/spec/frontend/ide/components/jobs/detail_spec.js
@@ -1,9 +1,9 @@
import Vue from 'vue';
+import { TEST_HOST } from 'helpers/test_constants';
import JobDetail from '~/ide/components/jobs/detail.vue';
import { createStore } from '~/ide/stores';
import { createComponentWithStore } from '../../../helpers/vue_mount_component_helper';
import { jobs } from '../../mock_data';
-import { TEST_HOST } from 'helpers/test_constants';
describe('IDE jobs detail view', () => {
let vm;
diff --git a/spec/frontend/ide/components/new_dropdown/index_spec.js b/spec/frontend/ide/components/new_dropdown/index_spec.js
index 00781c16609..c6cebf36de3 100644
--- a/spec/frontend/ide/components/new_dropdown/index_spec.js
+++ b/spec/frontend/ide/components/new_dropdown/index_spec.js
@@ -1,13 +1,15 @@
import Vue from 'vue';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
-import store from '~/ide/stores';
+import { createStore } from '~/ide/stores';
import newDropdown from '~/ide/components/new_dropdown/index.vue';
-import { resetStore } from '../../helpers';
describe('new dropdown component', () => {
+ let store;
let vm;
beforeEach(() => {
+ store = createStore();
+
const component = Vue.extend(newDropdown);
vm = createComponentWithStore(component, store, {
@@ -30,8 +32,6 @@ describe('new dropdown component', () => {
afterEach(() => {
vm.$destroy();
-
- resetStore(vm.$store);
});
it('renders new file, upload and new directory links', () => {
diff --git a/spec/frontend/ide/components/new_dropdown/modal_spec.js b/spec/frontend/ide/components/new_dropdown/modal_spec.js
index da17cc3601e..ea8ba24c9d0 100644
--- a/spec/frontend/ide/components/new_dropdown/modal_spec.js
+++ b/spec/frontend/ide/components/new_dropdown/modal_spec.js
@@ -2,7 +2,7 @@ import Vue from 'vue';
import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
import { createStore } from '~/ide/stores';
import modal from '~/ide/components/new_dropdown/modal.vue';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
jest.mock('~/flash');
diff --git a/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js b/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js
index e32abc98aae..bb9ba32a699 100644
--- a/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js
+++ b/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js
@@ -1,9 +1,9 @@
import { createLocalVue, shallowMount } from '@vue/test-utils';
+import Vuex from 'vuex';
import { createStore } from '~/ide/stores';
import paneModule from '~/ide/stores/modules/pane';
import CollapsibleSidebar from '~/ide/components/panes/collapsible_sidebar.vue';
import IdeSidebarNav from '~/ide/components/ide_sidebar_nav.vue';
-import Vuex from 'vuex';
const localVue = createLocalVue();
localVue.use(Vuex);
diff --git a/spec/frontend/ide/components/pipelines/list_spec.js b/spec/frontend/ide/components/pipelines/list_spec.js
index 795ded35d20..86cdbafaff9 100644
--- a/spec/frontend/ide/components/pipelines/list_spec.js
+++ b/spec/frontend/ide/components/pipelines/list_spec.js
@@ -2,11 +2,11 @@ import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import { GlLoadingIcon } from '@gitlab/ui';
import { TEST_HOST } from 'helpers/test_constants';
+import { pipelines } from 'jest/ide/mock_data';
import List from '~/ide/components/pipelines/list.vue';
import JobsList from '~/ide/components/jobs/list.vue';
import Tab from '~/vue_shared/components/tabs/tab.vue';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
-import { pipelines } from 'jest/ide/mock_data';
import IDEServices from '~/ide/services';
const localVue = createLocalVue();
diff --git a/spec/frontend/ide/components/preview/navigator_spec.js b/spec/frontend/ide/components/preview/navigator_spec.js
index aa15f391e77..ba5ac3bbbea 100644
--- a/spec/frontend/ide/components/preview/navigator_spec.js
+++ b/spec/frontend/ide/components/preview/navigator_spec.js
@@ -1,8 +1,8 @@
import { shallowMount } from '@vue/test-utils';
import { TEST_HOST } from 'helpers/test_constants';
import { GlLoadingIcon } from '@gitlab/ui';
-import ClientsideNavigator from '~/ide/components/preview/navigator.vue';
import { listen } from 'codesandbox-api';
+import ClientsideNavigator from '~/ide/components/preview/navigator.vue';
jest.mock('codesandbox-api', () => ({
listen: jest.fn().mockReturnValue(jest.fn()),
diff --git a/spec/frontend/ide/components/repo_editor_spec.js b/spec/frontend/ide/components/repo_editor_spec.js
index a4336b8f2eb..f0ae2ba732b 100644
--- a/spec/frontend/ide/components/repo_editor_spec.js
+++ b/spec/frontend/ide/components/repo_editor_spec.js
@@ -3,6 +3,8 @@ import Vue from 'vue';
import MockAdapter from 'axios-mock-adapter';
import '~/behaviors/markdown/render_gfm';
import { Range } from 'monaco-editor';
+import waitForPromises from 'helpers/wait_for_promises';
+import waitUsingRealTimer from 'helpers/wait_using_real_timer';
import axios from '~/lib/utils/axios_utils';
import service from '~/ide/services';
import { createStoreOptions } from '~/ide/stores';
@@ -15,10 +17,8 @@ import {
viewerTypes,
} from '~/ide/constants';
import { createComponentWithStore } from '../../helpers/vue_mount_component_helper';
-import waitForPromises from 'helpers/wait_for_promises';
import { file } from '../helpers';
import { exampleConfigs, exampleFiles } from '../lib/editorconfig/mock_data';
-import waitUsingRealTimer from 'helpers/wait_using_real_timer';
describe('RepoEditor', () => {
let vm;
diff --git a/spec/frontend/ide/helpers.js b/spec/frontend/ide/helpers.js
index a9620d26313..8caa9c2b437 100644
--- a/spec/frontend/ide/helpers.js
+++ b/spec/frontend/ide/helpers.js
@@ -1,25 +1,5 @@
import * as pathUtils from 'path';
import { decorateData } from '~/ide/stores/utils';
-import state from '~/ide/stores/state';
-import commitState from '~/ide/stores/modules/commit/state';
-import mergeRequestsState from '~/ide/stores/modules/merge_requests/state';
-import pipelinesState from '~/ide/stores/modules/pipelines/state';
-import branchesState from '~/ide/stores/modules/branches/state';
-import fileTemplatesState from '~/ide/stores/modules/file_templates/state';
-import paneState from '~/ide/stores/modules/pane/state';
-
-export const resetStore = store => {
- const newState = {
- ...state(),
- commit: commitState(),
- mergeRequests: mergeRequestsState(),
- pipelines: pipelinesState(),
- branches: branchesState(),
- fileTemplates: fileTemplatesState(),
- rightPane: paneState(),
- };
- store.replaceState(newState);
-};
export const file = (name = 'name', id = name, type = '', parent = null) =>
decorateData({
diff --git a/spec/frontend/ide/ide_router_spec.js b/spec/frontend/ide/ide_router_spec.js
index b53e2019819..a4fe00883cf 100644
--- a/spec/frontend/ide/ide_router_spec.js
+++ b/spec/frontend/ide/ide_router_spec.js
@@ -1,6 +1,6 @@
+import waitForPromises from 'helpers/wait_for_promises';
import { createRouter } from '~/ide/ide_router';
import { createStore } from '~/ide/stores';
-import waitForPromises from 'helpers/wait_for_promises';
describe('IDE router', () => {
const PROJECT_NAMESPACE = 'my-group/sub-group';
diff --git a/spec/frontend/ide/lib/decorations/controller_spec.js b/spec/frontend/ide/lib/decorations/controller_spec.js
index 4556fc9d646..e9b7faaadfe 100644
--- a/spec/frontend/ide/lib/decorations/controller_spec.js
+++ b/spec/frontend/ide/lib/decorations/controller_spec.js
@@ -2,14 +2,17 @@ import Editor from '~/ide/lib/editor';
import DecorationsController from '~/ide/lib/decorations/controller';
import Model from '~/ide/lib/common/model';
import { file } from '../../helpers';
+import { createStore } from '~/ide/stores';
describe('Multi-file editor library decorations controller', () => {
let editorInstance;
let controller;
let model;
+ let store;
beforeEach(() => {
- editorInstance = Editor.create();
+ store = createStore();
+ editorInstance = Editor.create(store);
editorInstance.createInstance(document.createElement('div'));
controller = new DecorationsController(editorInstance);
diff --git a/spec/frontend/ide/lib/diff/controller_spec.js b/spec/frontend/ide/lib/diff/controller_spec.js
index 0b33a4c6ad6..8ee6388a760 100644
--- a/spec/frontend/ide/lib/diff/controller_spec.js
+++ b/spec/frontend/ide/lib/diff/controller_spec.js
@@ -4,6 +4,7 @@ import ModelManager from '~/ide/lib/common/model_manager';
import DecorationsController from '~/ide/lib/decorations/controller';
import DirtyDiffController, { getDiffChangeType, getDecorator } from '~/ide/lib/diff/controller';
import { computeDiff } from '~/ide/lib/diff/diff';
+import { createStore } from '~/ide/stores';
import { file } from '../../helpers';
describe('Multi-file editor library dirty diff controller', () => {
@@ -12,9 +13,12 @@ describe('Multi-file editor library dirty diff controller', () => {
let modelManager;
let decorationsController;
let model;
+ let store;
beforeEach(() => {
- editorInstance = Editor.create();
+ store = createStore();
+
+ editorInstance = Editor.create(store);
editorInstance.createInstance(document.createElement('div'));
modelManager = new ModelManager();
diff --git a/spec/frontend/ide/lib/editor_spec.js b/spec/frontend/ide/lib/editor_spec.js
index 5f28309422d..529f80e6f6f 100644
--- a/spec/frontend/ide/lib/editor_spec.js
+++ b/spec/frontend/ide/lib/editor_spec.js
@@ -5,6 +5,7 @@ import {
Selection,
} from 'monaco-editor';
import Editor from '~/ide/lib/editor';
+import { createStore } from '~/ide/stores';
import { defaultEditorOptions } from '~/ide/lib/editor_options';
import { file } from '../helpers';
@@ -12,6 +13,7 @@ describe('Multi-file editor library', () => {
let instance;
let el;
let holder;
+ let store;
const setNodeOffsetWidth = val => {
Object.defineProperty(instance.instance.getDomNode(), 'offsetWidth', {
@@ -22,13 +24,14 @@ describe('Multi-file editor library', () => {
};
beforeEach(() => {
+ store = createStore();
el = document.createElement('div');
holder = document.createElement('div');
el.appendChild(holder);
document.body.appendChild(el);
- instance = Editor.create();
+ instance = Editor.create(store);
});
afterEach(() => {
@@ -44,7 +47,7 @@ describe('Multi-file editor library', () => {
});
it('creates instance returns cached instance', () => {
- expect(Editor.create()).toEqual(instance);
+ expect(Editor.create(store)).toEqual(instance);
});
describe('createInstance', () => {
diff --git a/spec/frontend/ide/lib/languages/vue_spec.js b/spec/frontend/ide/lib/languages/vue_spec.js
index 3d8784c1436..ba5c31bb101 100644
--- a/spec/frontend/ide/lib/languages/vue_spec.js
+++ b/spec/frontend/ide/lib/languages/vue_spec.js
@@ -9,7 +9,7 @@ describe('tokenization for .vue files', () => {
registerLanguages(vue);
});
- test.each([
+ it.each([
[
'<div v-if="something">content</div>',
[
diff --git a/spec/frontend/ide/stores/actions/merge_request_spec.js b/spec/frontend/ide/stores/actions/merge_request_spec.js
index e5c4f346459..62971b9cad6 100644
--- a/spec/frontend/ide/stores/actions/merge_request_spec.js
+++ b/spec/frontend/ide/stores/actions/merge_request_spec.js
@@ -1,7 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
-import store from '~/ide/stores';
-import createFlash from '~/flash';
+import { createStore } from '~/ide/stores';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import {
getMergeRequestData,
getMergeRequestChanges,
@@ -10,7 +10,6 @@ import {
} from '~/ide/stores/actions/merge_request';
import service from '~/ide/services';
import { leftSidebarViews, PERMISSION_READ_MR } from '~/ide/constants';
-import { resetStore } from '../../helpers';
const TEST_PROJECT = 'abcproject';
const TEST_PROJECT_ID = 17;
@@ -18,9 +17,12 @@ const TEST_PROJECT_ID = 17;
jest.mock('~/flash');
describe('IDE store merge request actions', () => {
+ let store;
let mock;
beforeEach(() => {
+ store = createStore();
+
mock = new MockAdapter(axios);
store.state.projects[TEST_PROJECT] = {
@@ -34,7 +36,6 @@ describe('IDE store merge request actions', () => {
afterEach(() => {
mock.restore();
- resetStore(store);
});
describe('getMergeRequestsForBranch', () => {
diff --git a/spec/frontend/ide/stores/actions/project_spec.js b/spec/frontend/ide/stores/actions/project_spec.js
index 64024c12903..ca3687307a9 100644
--- a/spec/frontend/ide/stores/actions/project_spec.js
+++ b/spec/frontend/ide/stores/actions/project_spec.js
@@ -1,4 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import axios from '~/lib/utils/axios_utils';
import { createStore } from '~/ide/stores';
import {
@@ -12,8 +14,6 @@ import {
} from '~/ide/stores/actions';
import service from '~/ide/services';
import api from '~/api';
-import testAction from 'helpers/vuex_action_helper';
-import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
const TEST_PROJECT_ID = 'abc/def';
diff --git a/spec/frontend/ide/stores/actions/tree_spec.js b/spec/frontend/ide/stores/actions/tree_spec.js
index c20941843c4..0eabd982d57 100644
--- a/spec/frontend/ide/stores/actions/tree_spec.js
+++ b/spec/frontend/ide/stores/actions/tree_spec.js
@@ -1,5 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
+import { TEST_HOST } from 'jest/helpers/test_constants';
import { showTreeEntry, getFiles, setDirectoryData } from '~/ide/stores/actions/tree';
import * as types from '~/ide/stores/mutation_types';
import axios from '~/lib/utils/axios_utils';
@@ -7,7 +8,6 @@ import { createStore } from '~/ide/stores';
import service from '~/ide/services';
import { createRouter } from '~/ide/ide_router';
import { file, createEntriesFromPaths } from '../../helpers';
-import { TEST_HOST } from 'jest/helpers/test_constants';
describe('Multi-file store tree actions', () => {
let projectTree;
diff --git a/spec/frontend/ide/stores/modules/file_templates/getters_spec.js b/spec/frontend/ide/stores/modules/file_templates/getters_spec.js
index 5855496a330..c9676b23fa1 100644
--- a/spec/frontend/ide/stores/modules/file_templates/getters_spec.js
+++ b/spec/frontend/ide/stores/modules/file_templates/getters_spec.js
@@ -5,7 +5,7 @@ import * as getters from '~/ide/stores/modules/file_templates/getters';
describe('IDE file templates getters', () => {
describe('templateTypes', () => {
it('returns list of template types', () => {
- expect(getters.templateTypes().length).toBe(4);
+ expect(getters.templateTypes().length).toBe(5);
});
});
diff --git a/spec/frontend/ide/stores/modules/router/actions_spec.js b/spec/frontend/ide/stores/modules/router/actions_spec.js
index 4795eae2b79..1458a43da57 100644
--- a/spec/frontend/ide/stores/modules/router/actions_spec.js
+++ b/spec/frontend/ide/stores/modules/router/actions_spec.js
@@ -1,6 +1,6 @@
+import testAction from 'helpers/vuex_action_helper';
import * as actions from '~/ide/stores/modules/router/actions';
import * as types from '~/ide/stores/modules/router/mutation_types';
-import testAction from 'helpers/vuex_action_helper';
const TEST_PATH = 'test/path/abc';
diff --git a/spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js b/spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js
index 4bc937b4784..d0ac2af3ffd 100644
--- a/spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js
+++ b/spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js
@@ -6,7 +6,7 @@ import * as mutationTypes from '~/ide/stores/modules/terminal/mutation_types';
import * as actions from '~/ide/stores/modules/terminal/actions/session_controls';
import httpStatus from '~/lib/utils/http_status';
import axios from '~/lib/utils/axios_utils';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
jest.mock('~/flash');
diff --git a/spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js b/spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js
index 7909f828124..e25746e1dd1 100644
--- a/spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js
+++ b/spec/frontend/ide/stores/modules/terminal/actions/session_status_spec.js
@@ -5,7 +5,7 @@ import * as messages from '~/ide/stores/modules/terminal/messages';
import * as mutationTypes from '~/ide/stores/modules/terminal/mutation_types';
import * as actions from '~/ide/stores/modules/terminal/actions/session_status';
import axios from '~/lib/utils/axios_utils';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
jest.mock('~/flash');
diff --git a/spec/frontend/ide/stores/modules/terminal/messages_spec.js b/spec/frontend/ide/stores/modules/terminal/messages_spec.js
index 966158999da..1bb92a9dfa5 100644
--- a/spec/frontend/ide/stores/modules/terminal/messages_spec.js
+++ b/spec/frontend/ide/stores/modules/terminal/messages_spec.js
@@ -15,6 +15,8 @@ describe('IDE store terminal messages', () => {
sprintf(
messages.ERROR_CONFIG,
{
+ codeStart: `<code>`,
+ codeEnd: `</code>`,
helpStart: `<a href="${escape(TEST_HELP_URL)}" target="_blank">`,
helpEnd: '</a>',
},
diff --git a/spec/frontend/ide/stores/modules/terminal_sync/actions_spec.js b/spec/frontend/ide/stores/modules/terminal_sync/actions_spec.js
index ac976300ed0..3fa57bde415 100644
--- a/spec/frontend/ide/stores/modules/terminal_sync/actions_spec.js
+++ b/spec/frontend/ide/stores/modules/terminal_sync/actions_spec.js
@@ -1,7 +1,7 @@
+import testAction from 'helpers/vuex_action_helper';
import * as actions from '~/ide/stores/modules/terminal_sync/actions';
import mirror, { canConnect, SERVICE_NAME } from '~/ide/lib/mirror';
import * as types from '~/ide/stores/modules/terminal_sync/mutation_types';
-import testAction from 'helpers/vuex_action_helper';
jest.mock('~/ide/lib/mirror');
diff --git a/spec/frontend/ide/sync_router_and_store_spec.js b/spec/frontend/ide/sync_router_and_store_spec.js
index c4ce92b99cc..ccf6e200806 100644
--- a/spec/frontend/ide/sync_router_and_store_spec.js
+++ b/spec/frontend/ide/sync_router_and_store_spec.js
@@ -1,7 +1,7 @@
import VueRouter from 'vue-router';
+import waitForPromises from 'helpers/wait_for_promises';
import { createStore } from '~/ide/stores';
import { syncRouterAndStore } from '~/ide/sync_router_and_store';
-import waitForPromises from 'helpers/wait_for_promises';
const TEST_ROUTE = '/test/lorem/ipsum';
diff --git a/spec/frontend/ide/utils_spec.js b/spec/frontend/ide/utils_spec.js
index b6de576a0a4..e7ef0de45a0 100644
--- a/spec/frontend/ide/utils_spec.js
+++ b/spec/frontend/ide/utils_spec.js
@@ -1,3 +1,4 @@
+import { languages } from 'monaco-editor';
import {
isTextFile,
registerLanguages,
@@ -9,7 +10,6 @@ import {
getPathParent,
readFileAsDataURL,
} from '~/ide/utils';
-import { languages } from 'monaco-editor';
describe('WebIDE utils', () => {
describe('isTextFile', () => {
diff --git a/spec/frontend/image_diff/helpers/comment_indicator_helper_spec.js b/spec/frontend/image_diff/helpers/comment_indicator_helper_spec.js
index 2deb4be2b91..98c05d648b8 100644
--- a/spec/frontend/image_diff/helpers/comment_indicator_helper_spec.js
+++ b/spec/frontend/image_diff/helpers/comment_indicator_helper_spec.js
@@ -1,6 +1,6 @@
+import { TEST_HOST } from 'jest/helpers/test_constants';
import * as commentIndicatorHelper from '~/image_diff/helpers/comment_indicator_helper';
import * as mockData from '../mock_data';
-import { TEST_HOST } from 'jest/helpers/test_constants';
describe('commentIndicatorHelper', () => {
const { coordinate } = mockData;
diff --git a/spec/frontend/image_diff/helpers/utils_helper_spec.js b/spec/frontend/image_diff/helpers/utils_helper_spec.js
index a47c681e775..7f2376826c2 100644
--- a/spec/frontend/image_diff/helpers/utils_helper_spec.js
+++ b/spec/frontend/image_diff/helpers/utils_helper_spec.js
@@ -1,7 +1,7 @@
+import { TEST_HOST } from 'jest/helpers/test_constants';
import * as utilsHelper from '~/image_diff/helpers/utils_helper';
import ImageBadge from '~/image_diff/image_badge';
import * as mockData from '../mock_data';
-import { TEST_HOST } from 'jest/helpers/test_constants';
describe('utilsHelper', () => {
const { noteId, discussionId, image, imageProperties, imageMeta } = mockData;
diff --git a/spec/frontend/image_diff/image_diff_spec.js b/spec/frontend/image_diff/image_diff_spec.js
index 2b29a522193..d89e4312344 100644
--- a/spec/frontend/image_diff/image_diff_spec.js
+++ b/spec/frontend/image_diff/image_diff_spec.js
@@ -1,8 +1,8 @@
+import { TEST_HOST } from 'jest/helpers/test_constants';
import ImageDiff from '~/image_diff/image_diff';
import * as imageUtility from '~/lib/utils/image_utility';
import imageDiffHelper from '~/image_diff/helpers/index';
import * as mockData from './mock_data';
-import { TEST_HOST } from 'jest/helpers/test_constants';
describe('ImageDiff', () => {
let element;
diff --git a/spec/frontend/image_diff/replaced_image_diff_spec.js b/spec/frontend/image_diff/replaced_image_diff_spec.js
index 38a43bfa858..10827d76e55 100644
--- a/spec/frontend/image_diff/replaced_image_diff_spec.js
+++ b/spec/frontend/image_diff/replaced_image_diff_spec.js
@@ -1,8 +1,8 @@
+import { TEST_HOST } from 'jest/helpers/test_constants';
import ReplacedImageDiff from '~/image_diff/replaced_image_diff';
import ImageDiff from '~/image_diff/image_diff';
import { viewTypes } from '~/image_diff/view_types';
import imageDiffHelper from '~/image_diff/helpers/index';
-import { TEST_HOST } from 'jest/helpers/test_constants';
describe('ReplacedImageDiff', () => {
let element;
diff --git a/spec/frontend/import_projects/components/import_projects_table_spec.js b/spec/frontend/import_projects/components/import_projects_table_spec.js
index 419d67e239f..b217242968a 100644
--- a/spec/frontend/import_projects/components/import_projects_table_spec.js
+++ b/spec/frontend/import_projects/components/import_projects_table_spec.js
@@ -2,16 +2,14 @@ import { nextTick } from 'vue';
import Vuex from 'vuex';
import { createLocalVue, shallowMount } from '@vue/test-utils';
import { GlLoadingIcon, GlButton } from '@gitlab/ui';
-import { state, getters } from '~/import_projects/store';
-import eventHub from '~/import_projects/event_hub';
+import state from '~/import_projects/store/state';
+import * as getters from '~/import_projects/store/getters';
+import { STATUSES } from '~/import_projects/constants';
import ImportProjectsTable from '~/import_projects/components/import_projects_table.vue';
import ImportedProjectTableRow from '~/import_projects/components/imported_project_table_row.vue';
import ProviderRepoTableRow from '~/import_projects/components/provider_repo_table_row.vue';
import IncompatibleRepoTableRow from '~/import_projects/components/incompatible_repo_table_row.vue';
-
-jest.mock('~/import_projects/event_hub', () => ({
- $emit: jest.fn(),
-}));
+import PageQueryParamSync from '~/import_projects/components/page_query_param_sync.vue';
describe('ImportProjectsTable', () => {
let wrapper;
@@ -21,13 +19,6 @@ describe('ImportProjectsTable', () => {
const providerTitle = 'THE PROVIDER';
const providerRepo = { id: 10, sanitizedName: 'sanitizedName', fullName: 'fullName' };
- const importedProject = {
- id: 1,
- fullPath: 'fullPath',
- importStatus: 'started',
- providerLink: 'providerLink',
- importSource: 'importSource',
- };
const findImportAllButton = () =>
wrapper
@@ -35,11 +26,15 @@ describe('ImportProjectsTable', () => {
.filter(w => w.props().variant === 'success')
.at(0);
+ const importAllFn = jest.fn();
+ const setPageFn = jest.fn();
+
function createComponent({
state: initialState,
getters: customGetters,
slots,
filterable,
+ paginatable,
} = {}) {
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -52,11 +47,13 @@ describe('ImportProjectsTable', () => {
},
actions: {
fetchRepos: jest.fn(),
- fetchReposFiltered: jest.fn(),
fetchJobs: jest.fn(),
+ fetchNamespaces: jest.fn(),
+ importAll: importAllFn,
stopJobsPolling: jest.fn(),
clearJobsEtagPoll: jest.fn(),
setFilter: jest.fn(),
+ setPage: setPageFn,
},
});
@@ -66,6 +63,7 @@ describe('ImportProjectsTable', () => {
propsData: {
providerTitle,
filterable,
+ paginatable,
},
slots,
});
@@ -79,11 +77,13 @@ describe('ImportProjectsTable', () => {
});
it('renders a loading icon while repos are loading', () => {
- createComponent({
- state: {
- isLoadingRepos: true,
- },
- });
+ createComponent({ state: { isLoadingRepos: true } });
+
+ expect(wrapper.contains(GlLoadingIcon)).toBe(true);
+ });
+
+ it('renders a loading icon while namespaces are loading', () => {
+ createComponent({ state: { isLoadingNamespaces: true } });
expect(wrapper.contains(GlLoadingIcon)).toBe(true);
});
@@ -91,10 +91,16 @@ describe('ImportProjectsTable', () => {
it('renders a table with imported projects and provider repos', () => {
createComponent({
state: {
- importedProjects: [importedProject],
- providerRepos: [providerRepo],
- incompatibleRepos: [{ ...providerRepo, id: 11 }],
- namespaces: [{ path: 'path' }],
+ namespaces: [{ fullPath: 'path' }],
+ repositories: [
+ { importSource: { id: 1 }, importedProject: null, importStatus: STATUSES.NONE },
+ { importSource: { id: 2 }, importedProject: {}, importStatus: STATUSES.FINISHED },
+ {
+ importSource: { id: 3, incompatible: true },
+ importedProject: {},
+ importStatus: STATUSES.NONE,
+ },
+ ],
},
});
@@ -133,13 +139,7 @@ describe('ImportProjectsTable', () => {
);
it('renders an empty state if there are no projects available', () => {
- createComponent({
- state: {
- importedProjects: [],
- providerRepos: [],
- incompatibleProjects: [],
- },
- });
+ createComponent({ state: { repositories: [] } });
expect(wrapper.contains(ProviderRepoTableRow)).toBe(false);
expect(wrapper.contains(ImportedProjectTableRow)).toBe(false);
@@ -147,37 +147,63 @@ describe('ImportProjectsTable', () => {
});
it('sends importAll event when import button is clicked', async () => {
- createComponent({
- state: {
- providerRepos: [providerRepo],
- },
- });
+ createComponent({ state: { providerRepos: [providerRepo] } });
findImportAllButton().vm.$emit('click');
await nextTick();
- expect(eventHub.$emit).toHaveBeenCalledWith('importAll');
+
+ expect(importAllFn).toHaveBeenCalled();
});
it('shows loading spinner when import is in progress', () => {
- createComponent({
- getters: {
- isImportingAnyRepo: () => true,
- },
- });
+ createComponent({ getters: { isImportingAnyRepo: () => true } });
expect(findImportAllButton().props().loading).toBe(true);
});
it('renders filtering input field by default', () => {
createComponent();
+
expect(findFilterField().exists()).toBe(true);
});
it('does not render filtering input field when filterable is false', () => {
createComponent({ filterable: false });
+
expect(findFilterField().exists()).toBe(false);
});
+ describe('when paginatable is set to true', () => {
+ const pageInfo = { page: 1 };
+
+ beforeEach(() => {
+ createComponent({
+ state: {
+ namespaces: [{ fullPath: 'path' }],
+ pageInfo,
+ repositories: [
+ { importSource: { id: 1 }, importedProject: null, importStatus: STATUSES.NONE },
+ ],
+ },
+ paginatable: true,
+ });
+ });
+
+ it('passes current page to page-query-param-sync component', () => {
+ expect(wrapper.find(PageQueryParamSync).props().page).toBe(pageInfo.page);
+ });
+
+ it('dispatches setPage when page-query-param-sync emits popstate', () => {
+ const NEW_PAGE = 2;
+ wrapper.find(PageQueryParamSync).vm.$emit('popstate', NEW_PAGE);
+
+ const { calls } = setPageFn.mock;
+
+ expect(calls).toHaveLength(1);
+ expect(calls[0][1]).toBe(NEW_PAGE);
+ });
+ });
+
it.each`
hasIncompatibleRepos | shouldRenderSlot | action
${false} | ${false} | ${'does not render'}
diff --git a/spec/frontend/import_projects/components/imported_project_table_row_spec.js b/spec/frontend/import_projects/components/imported_project_table_row_spec.js
index 700dd1e025a..8890c352826 100644
--- a/spec/frontend/import_projects/components/imported_project_table_row_spec.js
+++ b/spec/frontend/import_projects/components/imported_project_table_row_spec.js
@@ -1,57 +1,44 @@
-import Vuex from 'vuex';
-import { createLocalVue, mount } from '@vue/test-utils';
-import createStore from '~/import_projects/store';
-import importedProjectTableRow from '~/import_projects/components/imported_project_table_row.vue';
-import STATUS_MAP from '~/import_projects/constants';
+import { mount } from '@vue/test-utils';
+import ImportedProjectTableRow from '~/import_projects/components/imported_project_table_row.vue';
+import ImportStatus from '~/import_projects/components/import_status.vue';
+import { STATUSES } from '~/import_projects/constants';
describe('ImportedProjectTableRow', () => {
- let vm;
+ let wrapper;
const project = {
- id: 1,
- fullPath: 'fullPath',
- importStatus: 'finished',
- providerLink: 'providerLink',
- importSource: 'importSource',
+ importSource: {
+ fullName: 'fullName',
+ providerLink: 'providerLink',
+ },
+ importedProject: {
+ id: 1,
+ fullPath: 'fullPath',
+ importSource: 'importSource',
+ },
+ importStatus: STATUSES.FINISHED,
};
function mountComponent() {
- const localVue = createLocalVue();
- localVue.use(Vuex);
-
- const component = mount(importedProjectTableRow, {
- localVue,
- store: createStore(),
- propsData: {
- project: {
- ...project,
- },
- },
- });
-
- return component.vm;
+ wrapper = mount(ImportedProjectTableRow, { propsData: { project } });
}
beforeEach(() => {
- vm = mountComponent();
+ mountComponent();
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('renders an imported project table row', () => {
- const providerLink = vm.$el.querySelector('.js-provider-link');
- const statusObject = STATUS_MAP[project.importStatus];
-
- expect(vm.$el.classList.contains('js-imported-project')).toBe(true);
- expect(providerLink.href).toMatch(project.providerLink);
- expect(providerLink.textContent).toMatch(project.importSource);
- expect(vm.$el.querySelector('.js-full-path').textContent).toMatch(project.fullPath);
- expect(vm.$el.querySelector(`.${statusObject.textClass}`).textContent).toMatch(
- statusObject.text,
+ const providerLink = wrapper.find('[data-testid=providerLink]');
+
+ expect(providerLink.attributes().href).toMatch(project.importSource.providerLink);
+ expect(providerLink.text()).toMatch(project.importSource.fullName);
+ expect(wrapper.find('[data-testid=fullPath]').text()).toMatch(project.importedProject.fullPath);
+ expect(wrapper.find(ImportStatus).props().status).toBe(project.importStatus);
+ expect(wrapper.find('[data-testid=goToProject').attributes().href).toMatch(
+ project.importedProject.fullPath,
);
-
- expect(vm.$el.querySelector(`.ic-status_${statusObject.icon}`)).not.toBeNull();
- expect(vm.$el.querySelector('.js-go-to-project').href).toMatch(project.fullPath);
});
});
diff --git a/spec/frontend/import_projects/components/page_query_param_sync_spec.js b/spec/frontend/import_projects/components/page_query_param_sync_spec.js
new file mode 100644
index 00000000000..be19ecca1ba
--- /dev/null
+++ b/spec/frontend/import_projects/components/page_query_param_sync_spec.js
@@ -0,0 +1,87 @@
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import { TEST_HOST } from 'helpers/test_constants';
+
+import PageQueryParamSync from '~/import_projects/components/page_query_param_sync.vue';
+
+describe('PageQueryParamSync', () => {
+ let originalPushState;
+ let originalAddEventListener;
+ let originalRemoveEventListener;
+
+ const pushStateMock = jest.fn();
+ const addEventListenerMock = jest.fn();
+ const removeEventListenerMock = jest.fn();
+
+ beforeAll(() => {
+ window.location.search = '';
+ originalPushState = window.pushState;
+
+ window.history.pushState = pushStateMock;
+
+ originalAddEventListener = window.addEventListener;
+ window.addEventListener = addEventListenerMock;
+
+ originalRemoveEventListener = window.removeEventListener;
+ window.removeEventListener = removeEventListenerMock;
+ });
+
+ afterAll(() => {
+ window.history.pushState = originalPushState;
+ window.addEventListener = originalAddEventListener;
+ window.removeEventListener = originalRemoveEventListener;
+ });
+
+ let wrapper;
+ beforeEach(() => {
+ wrapper = shallowMount(PageQueryParamSync, {
+ propsData: { page: 3 },
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('calls push state with page number when page is updated and differs from 1', async () => {
+ wrapper.setProps({ page: 2 });
+
+ await nextTick();
+
+ const { calls } = pushStateMock.mock;
+ expect(calls).toHaveLength(1);
+ expect(calls[0][2]).toBe(`${TEST_HOST}/?page=2`);
+ });
+
+ it('calls push state without page number when page is updated and is 1', async () => {
+ wrapper.setProps({ page: 1 });
+
+ await nextTick();
+
+ const { calls } = pushStateMock.mock;
+ expect(calls).toHaveLength(1);
+ expect(calls[0][2]).toBe(`${TEST_HOST}/`);
+ });
+
+ it('subscribes to popstate event on create', () => {
+ expect(addEventListenerMock).toHaveBeenCalledWith('popstate', expect.any(Function));
+ });
+
+ it('unsubscribes from popstate event when destroyed', () => {
+ const [, fn] = addEventListenerMock.mock.calls[0];
+
+ wrapper.destroy();
+
+ expect(removeEventListenerMock).toHaveBeenCalledWith('popstate', fn);
+ });
+
+ it('emits popstate event when popstate is triggered', async () => {
+ const [, fn] = addEventListenerMock.mock.calls[0];
+
+ delete window.location;
+ window.location = new URL(`${TEST_HOST}/?page=5`);
+ fn();
+
+ expect(wrapper.emitted().popstate[0]).toStrictEqual([5]);
+ });
+});
diff --git a/spec/frontend/import_projects/components/provider_repo_table_row_spec.js b/spec/frontend/import_projects/components/provider_repo_table_row_spec.js
index f5e5141eac8..bd9cd07db78 100644
--- a/spec/frontend/import_projects/components/provider_repo_table_row_spec.js
+++ b/spec/frontend/import_projects/components/provider_repo_table_row_spec.js
@@ -1,100 +1,100 @@
+import { nextTick } from 'vue';
import Vuex from 'vuex';
-import { createLocalVue, mount } from '@vue/test-utils';
-import { state, actions, getters, mutations } from '~/import_projects/store';
-import providerRepoTableRow from '~/import_projects/components/provider_repo_table_row.vue';
-import STATUS_MAP, { STATUSES } from '~/import_projects/constants';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import ProviderRepoTableRow from '~/import_projects/components/provider_repo_table_row.vue';
+import ImportStatus from '~/import_projects/components/import_status.vue';
+import { STATUSES } from '~/import_projects/constants';
+import Select2Select from '~/vue_shared/components/select2_select.vue';
describe('ProviderRepoTableRow', () => {
- let vm;
+ let wrapper;
const fetchImport = jest.fn();
- const importPath = '/import-path';
- const defaultTargetNamespace = 'user';
- const ciCdOnly = true;
+ const setImportTarget = jest.fn();
+ const fakeImportTarget = {
+ targetNamespace: 'target',
+ newName: 'newName',
+ };
+ const ciCdOnly = false;
const repo = {
- id: 10,
- sanitizedName: 'sanitizedName',
- fullName: 'fullName',
- providerLink: 'providerLink',
+ importSource: {
+ id: 'remote-1',
+ fullName: 'fullName',
+ providerLink: 'providerLink',
+ },
+ importedProject: {
+ id: 1,
+ fullPath: 'fullPath',
+ importSource: 'importSource',
+ },
+ importStatus: STATUSES.FINISHED,
};
- function initStore(initialState) {
- const stubbedActions = { ...actions, fetchImport };
+ const availableNamespaces = [
+ { text: 'Groups', children: [{ id: 'test', text: 'test' }] },
+ { text: 'Users', children: [{ id: 'root', text: 'root' }] },
+ ];
+ function initStore(initialState) {
const store = new Vuex.Store({
- state: { ...state(), ...initialState },
- actions: stubbedActions,
- mutations,
- getters,
+ state: initialState,
+ getters: {
+ getImportTarget: () => () => fakeImportTarget,
+ },
+ actions: { fetchImport, setImportTarget },
});
return store;
}
+ const findImportButton = () =>
+ wrapper
+ .findAll('button')
+ .filter(node => node.text() === 'Import')
+ .at(0);
+
function mountComponent(initialState) {
const localVue = createLocalVue();
localVue.use(Vuex);
- const store = initStore({ importPath, defaultTargetNamespace, ciCdOnly, ...initialState });
+ const store = initStore({ ciCdOnly, ...initialState });
- const component = mount(providerRepoTableRow, {
+ wrapper = shallowMount(ProviderRepoTableRow, {
localVue,
store,
- propsData: {
- repo,
- },
+ propsData: { repo, availableNamespaces },
});
-
- return component.vm;
}
beforeEach(() => {
- vm = mountComponent();
+ mountComponent();
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('renders a provider repo table row', () => {
- const providerLink = vm.$el.querySelector('.js-provider-link');
- const statusObject = STATUS_MAP[STATUSES.NONE];
-
- expect(vm.$el.classList.contains('js-provider-repo')).toBe(true);
- expect(providerLink.href).toMatch(repo.providerLink);
- expect(providerLink.textContent).toMatch(repo.fullName);
- expect(vm.$el.querySelector(`.${statusObject.textClass}`).textContent).toMatch(
- statusObject.text,
- );
-
- expect(vm.$el.querySelector(`.ic-status_${statusObject.icon}`)).not.toBeNull();
- expect(vm.$el.querySelector('.js-import-button')).not.toBeNull();
+ const providerLink = wrapper.find('[data-testid=providerLink]');
+
+ expect(providerLink.attributes().href).toMatch(repo.importSource.providerLink);
+ expect(providerLink.text()).toMatch(repo.importSource.fullName);
+ expect(wrapper.find(ImportStatus).props().status).toBe(repo.importStatus);
+ expect(wrapper.contains('button')).toBe(true);
});
it('renders a select2 namespace select', () => {
- const dropdownTrigger = vm.$el.querySelector('.js-namespace-select');
-
- expect(dropdownTrigger).not.toBeNull();
- expect(dropdownTrigger.classList.contains('select2-container')).toBe(true);
-
- dropdownTrigger.click();
-
- expect(vm.$el.querySelector('.select2-drop')).not.toBeNull();
+ expect(wrapper.contains(Select2Select)).toBe(true);
+ expect(wrapper.find(Select2Select).props().options.data).toBe(availableNamespaces);
});
- it('imports repo when clicking import button', () => {
- vm.$el.querySelector('.js-import-button').click();
+ it('imports repo when clicking import button', async () => {
+ findImportButton().trigger('click');
- return vm.$nextTick().then(() => {
- const { calls } = fetchImport.mock;
+ await nextTick();
- // Not using .toBeCalledWith because it expects
- // an unmatchable and undefined 3rd argument.
- expect(calls.length).toBe(1);
- expect(calls[0][1]).toEqual({
- repo,
- newName: repo.sanitizedName,
- targetNamespace: defaultTargetNamespace,
- });
- });
+ const { calls } = fetchImport.mock;
+
+ expect(calls).toHaveLength(1);
+ expect(calls[0][1]).toBe(repo.importSource.id);
});
});
diff --git a/spec/frontend/import_projects/store/actions_spec.js b/spec/frontend/import_projects/store/actions_spec.js
index fd6fbcbfce0..45a59b3f6d6 100644
--- a/spec/frontend/import_projects/store/actions_spec.js
+++ b/spec/frontend/import_projects/store/actions_spec.js
@@ -1,7 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
-import createFlash from '~/flash';
import testAction from 'helpers/vuex_action_helper';
import { TEST_HOST } from 'helpers/test_constants';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import {
@@ -12,41 +12,79 @@ import {
RECEIVE_IMPORT_SUCCESS,
RECEIVE_IMPORT_ERROR,
RECEIVE_JOBS_SUCCESS,
+ REQUEST_NAMESPACES,
+ RECEIVE_NAMESPACES_SUCCESS,
+ RECEIVE_NAMESPACES_ERROR,
+ SET_PAGE,
} from '~/import_projects/store/mutation_types';
-import {
- fetchRepos,
- fetchImport,
- receiveJobsSuccess,
- fetchJobs,
- clearJobsEtagPoll,
- stopJobsPolling,
-} from '~/import_projects/store/actions';
+import actionsFactory from '~/import_projects/store/actions';
+import { getImportTarget } from '~/import_projects/store/getters';
import state from '~/import_projects/store/state';
+import { STATUSES } from '~/import_projects/constants';
jest.mock('~/flash');
+const MOCK_ENDPOINT = `${TEST_HOST}/endpoint.json`;
+const endpoints = {
+ reposPath: MOCK_ENDPOINT,
+ importPath: MOCK_ENDPOINT,
+ jobsPath: MOCK_ENDPOINT,
+ namespacesPath: MOCK_ENDPOINT,
+};
+
+const {
+ clearJobsEtagPoll,
+ stopJobsPolling,
+ importAll,
+ fetchRepos,
+ fetchImport,
+ fetchJobs,
+ fetchNamespaces,
+ setPage,
+} = actionsFactory({
+ endpoints,
+});
+
describe('import_projects store actions', () => {
let localState;
- const repos = [{ id: 1 }, { id: 2 }];
- const importPayload = { newName: 'newName', targetNamespace: 'targetNamespace', repo: { id: 1 } };
+ const importRepoId = 1;
+ const otherImportRepoId = 2;
+ const defaultTargetNamespace = 'default';
+ const sanitizedName = 'sanitizedName';
+ const defaultImportTarget = { newName: sanitizedName, targetNamespace: defaultTargetNamespace };
beforeEach(() => {
- localState = state();
+ localState = {
+ ...state(),
+ defaultTargetNamespace,
+ repositories: [
+ { importSource: { id: importRepoId, sanitizedName }, importStatus: STATUSES.NONE },
+ {
+ importSource: { id: otherImportRepoId, sanitizedName: 's2' },
+ importStatus: STATUSES.NONE,
+ },
+ {
+ importSource: { id: 3, sanitizedName: 's3', incompatible: true },
+ importStatus: STATUSES.NONE,
+ },
+ ],
+ };
+
+ localState.getImportTarget = getImportTarget(localState);
});
describe('fetchRepos', () => {
let mock;
- const payload = { imported_projects: [{}], provider_repos: [{}], namespaces: [{}] };
+ const payload = { imported_projects: [{}], provider_repos: [{}] };
beforeEach(() => {
- localState.reposPath = `${TEST_HOST}/endpoint.json`;
mock = new MockAdapter(axios);
});
afterEach(() => mock.restore());
it('dispatches stopJobsPolling actions and commits REQUEST_REPOS, RECEIVE_REPOS_SUCCESS mutations on a successful request', () => {
- mock.onGet(`${TEST_HOST}/endpoint.json`).reply(200, payload);
+ mock.onGet(MOCK_ENDPOINT).reply(200, payload);
return testAction(
fetchRepos,
@@ -64,7 +102,7 @@ describe('import_projects store actions', () => {
});
it('dispatches stopJobsPolling action and commits REQUEST_REPOS, RECEIVE_REPOS_ERROR mutations on an unsuccessful request', () => {
- mock.onGet(`${TEST_HOST}/endpoint.json`).reply(500);
+ mock.onGet(MOCK_ENDPOINT).reply(500);
return testAction(
fetchRepos,
@@ -75,18 +113,39 @@ describe('import_projects store actions', () => {
);
});
- describe('when filtered', () => {
- beforeEach(() => {
- localState.filter = 'filter';
+ describe('when pagination is enabled', () => {
+ it('includes page in url query params', async () => {
+ const { fetchRepos: fetchReposWithPagination } = actionsFactory({
+ endpoints,
+ hasPagination: true,
+ });
+
+ let requestedUrl;
+ mock.onGet().reply(config => {
+ requestedUrl = config.url;
+ return [200, payload];
+ });
+
+ await testAction(
+ fetchReposWithPagination,
+ null,
+ localState,
+ expect.any(Array),
+ expect.any(Array),
+ );
+
+ expect(requestedUrl).toBe(`${MOCK_ENDPOINT}?page=${localState.pageInfo.page}`);
});
+ });
+ describe('when filtered', () => {
it('fetches repos with filter applied', () => {
mock.onGet(`${TEST_HOST}/endpoint.json?filter=filter`).reply(200, payload);
return testAction(
fetchRepos,
null,
- localState,
+ { ...localState, filter: 'filter' },
[
{ type: REQUEST_REPOS },
{
@@ -104,7 +163,6 @@ describe('import_projects store actions', () => {
let mock;
beforeEach(() => {
- localState.importPath = `${TEST_HOST}/endpoint.json`;
mock = new MockAdapter(axios);
});
@@ -112,15 +170,17 @@ describe('import_projects store actions', () => {
it('commits REQUEST_IMPORT and REQUEST_IMPORT_SUCCESS mutations on a successful request', () => {
const importedProject = { name: 'imported/project' };
- const importRepoId = importPayload.repo.id;
- mock.onPost(`${TEST_HOST}/endpoint.json`).reply(200, importedProject);
+ mock.onPost(MOCK_ENDPOINT).reply(200, importedProject);
return testAction(
fetchImport,
- importPayload,
+ importRepoId,
localState,
[
- { type: REQUEST_IMPORT, payload: importRepoId },
+ {
+ type: REQUEST_IMPORT,
+ payload: { repoId: importRepoId, importTarget: defaultImportTarget },
+ },
{
type: RECEIVE_IMPORT_SUCCESS,
payload: {
@@ -134,15 +194,18 @@ describe('import_projects store actions', () => {
});
it('commits REQUEST_IMPORT and RECEIVE_IMPORT_ERROR and shows generic error message on an unsuccessful request', async () => {
- mock.onPost(`${TEST_HOST}/endpoint.json`).reply(500);
+ mock.onPost(MOCK_ENDPOINT).reply(500);
await testAction(
fetchImport,
- importPayload,
+ importRepoId,
localState,
[
- { type: REQUEST_IMPORT, payload: importPayload.repo.id },
- { type: RECEIVE_IMPORT_ERROR, payload: importPayload.repo.id },
+ {
+ type: REQUEST_IMPORT,
+ payload: { repoId: importRepoId, importTarget: defaultImportTarget },
+ },
+ { type: RECEIVE_IMPORT_ERROR, payload: importRepoId },
],
[],
);
@@ -152,15 +215,18 @@ describe('import_projects store actions', () => {
it('commits REQUEST_IMPORT and RECEIVE_IMPORT_ERROR and shows detailed error message on an unsuccessful request with errors fields in response', async () => {
const ERROR_MESSAGE = 'dummy';
- mock.onPost(`${TEST_HOST}/endpoint.json`).reply(500, { errors: ERROR_MESSAGE });
+ mock.onPost(MOCK_ENDPOINT).reply(500, { errors: ERROR_MESSAGE });
await testAction(
fetchImport,
- importPayload,
+ importRepoId,
localState,
[
- { type: REQUEST_IMPORT, payload: importPayload.repo.id },
- { type: RECEIVE_IMPORT_ERROR, payload: importPayload.repo.id },
+ {
+ type: REQUEST_IMPORT,
+ payload: { repoId: importRepoId, importTarget: defaultImportTarget },
+ },
+ { type: RECEIVE_IMPORT_ERROR, payload: importRepoId },
],
[],
);
@@ -169,24 +235,11 @@ describe('import_projects store actions', () => {
});
});
- describe('receiveJobsSuccess', () => {
- it(`commits ${RECEIVE_JOBS_SUCCESS} mutation`, () => {
- return testAction(
- receiveJobsSuccess,
- repos,
- localState,
- [{ type: RECEIVE_JOBS_SUCCESS, payload: repos }],
- [],
- );
- });
- });
-
describe('fetchJobs', () => {
let mock;
const updatedProjects = [{ name: 'imported/project' }, { name: 'provider/repo' }];
beforeEach(() => {
- localState.jobsPath = `${TEST_HOST}/endpoint.json`;
mock = new MockAdapter(axios);
});
@@ -198,7 +251,7 @@ describe('import_projects store actions', () => {
afterEach(() => mock.restore());
it('commits RECEIVE_JOBS_SUCCESS mutation on a successful request', async () => {
- mock.onGet(`${TEST_HOST}/endpoint.json`).reply(200, updatedProjects);
+ mock.onGet(MOCK_ENDPOINT).reply(200, updatedProjects);
await testAction(
fetchJobs,
@@ -237,4 +290,78 @@ describe('import_projects store actions', () => {
});
});
});
+
+ describe('fetchNamespaces', () => {
+ let mock;
+ const namespaces = [{ full_name: 'test/ns1' }, { full_name: 'test_ns2' }];
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => mock.restore());
+
+ it('commits REQUEST_NAMESPACES and RECEIVE_NAMESPACES_SUCCESS on success', async () => {
+ mock.onGet(MOCK_ENDPOINT).reply(200, namespaces);
+
+ await testAction(
+ fetchNamespaces,
+ null,
+ localState,
+ [
+ { type: REQUEST_NAMESPACES },
+ {
+ type: RECEIVE_NAMESPACES_SUCCESS,
+ payload: convertObjectPropsToCamelCase(namespaces, { deep: true }),
+ },
+ ],
+ [],
+ );
+ });
+
+ it('commits REQUEST_NAMESPACES and RECEIVE_NAMESPACES_ERROR and shows generic error message on an unsuccessful request', async () => {
+ mock.onGet(MOCK_ENDPOINT).reply(500);
+
+ await testAction(
+ fetchNamespaces,
+ null,
+ localState,
+ [{ type: REQUEST_NAMESPACES }, { type: RECEIVE_NAMESPACES_ERROR }],
+ [],
+ );
+
+ expect(createFlash).toHaveBeenCalledWith('Requesting namespaces failed');
+ });
+ });
+
+ describe('importAll', () => {
+ it('dispatches multiple fetchImport actions', async () => {
+ await testAction(
+ importAll,
+ null,
+ localState,
+ [],
+ [
+ { type: 'fetchImport', payload: importRepoId },
+ { type: 'fetchImport', payload: otherImportRepoId },
+ ],
+ );
+ });
+
+ describe('setPage', () => {
+ it('dispatches fetchRepos and commits setPage when page number differs from current one', async () => {
+ await testAction(
+ setPage,
+ 2,
+ { ...localState, pageInfo: { page: 1 } },
+ [{ type: SET_PAGE, payload: 2 }],
+ [{ type: 'fetchRepos' }],
+ );
+ });
+
+ it('does not perform any action if page equals to current one', async () => {
+ await testAction(setPage, 2, { ...localState, pageInfo: { page: 2 } }, [], []);
+ });
+ });
+ });
});
diff --git a/spec/frontend/import_projects/store/getters_spec.js b/spec/frontend/import_projects/store/getters_spec.js
index 93d1ed89783..5c1ea25a684 100644
--- a/spec/frontend/import_projects/store/getters_spec.js
+++ b/spec/frontend/import_projects/store/getters_spec.js
@@ -1,12 +1,28 @@
import {
- namespaceSelectOptions,
+ isLoading,
isImportingAnyRepo,
- hasProviderRepos,
hasIncompatibleRepos,
- hasImportedProjects,
+ hasImportableRepos,
+ getImportTarget,
} from '~/import_projects/store/getters';
+import { STATUSES } from '~/import_projects/constants';
import state from '~/import_projects/store/state';
+const IMPORTED_REPO = {
+ importSource: {},
+ importedProject: { fullPath: 'some/path' },
+};
+
+const IMPORTABLE_REPO = {
+ importSource: { id: 'some-id', sanitizedName: 'sanitized' },
+ importedProject: null,
+ importStatus: STATUSES.NONE,
+};
+
+const INCOMPATIBLE_REPO = {
+ importSource: { incompatible: true },
+};
+
describe('import_projects store getters', () => {
let localState;
@@ -14,85 +30,87 @@ describe('import_projects store getters', () => {
localState = state();
});
- describe('namespaceSelectOptions', () => {
- const namespaces = [{ fullPath: 'namespace-0' }, { fullPath: 'namespace-1' }];
- const defaultTargetNamespace = 'current-user';
-
- it('returns an options array with a "Users" and "Groups" optgroups', () => {
- localState.namespaces = namespaces;
- localState.defaultTargetNamespace = defaultTargetNamespace;
-
- const optionsArray = namespaceSelectOptions(localState);
- const groupsGroup = optionsArray[0];
- const usersGroup = optionsArray[1];
-
- expect(groupsGroup.text).toBe('Groups');
- expect(usersGroup.text).toBe('Users');
-
- groupsGroup.children.forEach((child, index) => {
- expect(child.id).toBe(namespaces[index].fullPath);
- expect(child.text).toBe(namespaces[index].fullPath);
+ it.each`
+ isLoadingRepos | isLoadingNamespaces | isLoadingValue
+ ${false} | ${false} | ${false}
+ ${true} | ${false} | ${true}
+ ${false} | ${true} | ${true}
+ ${true} | ${true} | ${true}
+ `(
+ 'isLoading returns $isLoadingValue when isLoadingRepos is $isLoadingRepos and isLoadingNamespaces is $isLoadingNamespaces',
+ ({ isLoadingRepos, isLoadingNamespaces, isLoadingValue }) => {
+ Object.assign(localState, {
+ isLoadingRepos,
+ isLoadingNamespaces,
});
- expect(usersGroup.children.length).toBe(1);
- expect(usersGroup.children[0].id).toBe(defaultTargetNamespace);
- expect(usersGroup.children[0].text).toBe(defaultTargetNamespace);
- });
- });
-
- describe('isImportingAnyRepo', () => {
- it('returns true if there are any reposBeingImported', () => {
- localState.reposBeingImported = new Array(1);
-
- expect(isImportingAnyRepo(localState)).toBe(true);
- });
+ expect(isLoading(localState)).toBe(isLoadingValue);
+ },
+ );
+
+ it.each`
+ importStatus | value
+ ${STATUSES.NONE} | ${false}
+ ${STATUSES.SCHEDULING} | ${true}
+ ${STATUSES.SCHEDULED} | ${true}
+ ${STATUSES.STARTED} | ${true}
+ ${STATUSES.FINISHED} | ${false}
+ `(
+ 'isImportingAnyRepo returns $value when repo with $importStatus status is available',
+ ({ importStatus, value }) => {
+ localState.repositories = [{ importStatus }];
+
+ expect(isImportingAnyRepo(localState)).toBe(value);
+ },
+ );
- it('returns false if there are no reposBeingImported', () => {
- localState.reposBeingImported = [];
-
- expect(isImportingAnyRepo(localState)).toBe(false);
- });
- });
-
- describe('hasProviderRepos', () => {
- it('returns true if there are any providerRepos', () => {
- localState.providerRepos = new Array(1);
+ describe('hasIncompatibleRepos', () => {
+ it('returns true if there are any incompatible projects', () => {
+ localState.repositories = [IMPORTABLE_REPO, IMPORTED_REPO, INCOMPATIBLE_REPO];
- expect(hasProviderRepos(localState)).toBe(true);
+ expect(hasIncompatibleRepos(localState)).toBe(true);
});
- it('returns false if there are no providerRepos', () => {
- localState.providerRepos = [];
+ it('returns false if there are no incompatible projects', () => {
+ localState.repositories = [IMPORTABLE_REPO, IMPORTED_REPO];
- expect(hasProviderRepos(localState)).toBe(false);
+ expect(hasIncompatibleRepos(localState)).toBe(false);
});
});
- describe('hasImportedProjects', () => {
- it('returns true if there are any importedProjects', () => {
- localState.importedProjects = new Array(1);
+ describe('hasImportableRepos', () => {
+ it('returns true if there are any importable projects ', () => {
+ localState.repositories = [IMPORTABLE_REPO, IMPORTED_REPO, INCOMPATIBLE_REPO];
- expect(hasImportedProjects(localState)).toBe(true);
+ expect(hasImportableRepos(localState)).toBe(true);
});
- it('returns false if there are no importedProjects', () => {
- localState.importedProjects = [];
+ it('returns false if there are no importable projects', () => {
+ localState.repositories = [IMPORTED_REPO, INCOMPATIBLE_REPO];
- expect(hasImportedProjects(localState)).toBe(false);
+ expect(hasImportableRepos(localState)).toBe(false);
});
});
- describe('hasIncompatibleRepos', () => {
- it('returns true if there are any incompatibleProjects', () => {
- localState.incompatibleRepos = new Array(1);
+ describe('getImportTarget', () => {
+ it('returns default value if no custom target available', () => {
+ localState.defaultTargetNamespace = 'default';
+ localState.repositories = [IMPORTABLE_REPO];
- expect(hasIncompatibleRepos(localState)).toBe(true);
+ expect(getImportTarget(localState)(IMPORTABLE_REPO.importSource.id)).toStrictEqual({
+ newName: IMPORTABLE_REPO.importSource.sanitizedName,
+ targetNamespace: localState.defaultTargetNamespace,
+ });
});
- it('returns false if there are no incompatibleProjects', () => {
- localState.incompatibleRepos = [];
+ it('returns custom import target if available', () => {
+ const fakeTarget = { newName: 'something', targetNamespace: 'ns' };
+ localState.repositories = [IMPORTABLE_REPO];
+ localState.customImportTargets[IMPORTABLE_REPO.importSource.id] = fakeTarget;
- expect(hasIncompatibleRepos(localState)).toBe(false);
+ expect(getImportTarget(localState)(IMPORTABLE_REPO.importSource.id)).toStrictEqual(
+ fakeTarget,
+ );
});
});
});
diff --git a/spec/frontend/import_projects/store/mutations_spec.js b/spec/frontend/import_projects/store/mutations_spec.js
index 505545f7aa5..3672ec9f2c0 100644
--- a/spec/frontend/import_projects/store/mutations_spec.js
+++ b/spec/frontend/import_projects/store/mutations_spec.js
@@ -1,34 +1,303 @@
import * as types from '~/import_projects/store/mutation_types';
import mutations from '~/import_projects/store/mutations';
+import { STATUSES } from '~/import_projects/constants';
describe('import_projects store mutations', () => {
- describe(`${types.RECEIVE_IMPORT_SUCCESS}`, () => {
- it('removes repoId from reposBeingImported and providerRepos, adds to importedProjects', () => {
- const repoId = 1;
- const state = {
- reposBeingImported: [repoId],
- providerRepos: [{ id: repoId }],
+ let state;
+ const SOURCE_PROJECT = {
+ id: 1,
+ full_name: 'full/name',
+ sanitized_name: 'name',
+ provider_link: 'https://demo.link/full/name',
+ };
+ const IMPORTED_PROJECT = {
+ name: 'demo',
+ importSource: 'something',
+ providerLink: 'custom-link',
+ importStatus: 'status',
+ fullName: 'fullName',
+ };
+
+ describe(`${types.SET_FILTER}`, () => {
+ it('overwrites current filter value', () => {
+ state = { filter: 'some-value' };
+ const NEW_VALUE = 'new-value';
+
+ mutations[types.SET_FILTER](state, NEW_VALUE);
+
+ expect(state.filter).toBe(NEW_VALUE);
+ });
+ });
+
+ describe(`${types.REQUEST_REPOS}`, () => {
+ it('sets repos loading flag to true', () => {
+ state = {};
+
+ mutations[types.REQUEST_REPOS](state);
+
+ expect(state.isLoadingRepos).toBe(true);
+ });
+ });
+
+ describe(`${types.RECEIVE_REPOS_SUCCESS}`, () => {
+ describe('for imported projects', () => {
+ const response = {
+ importedProjects: [IMPORTED_PROJECT],
+ providerRepos: [],
+ };
+
+ it('picks import status from response', () => {
+ state = {};
+
+ mutations[types.RECEIVE_REPOS_SUCCESS](state, response);
+
+ expect(state.repositories[0].importStatus).toBe(IMPORTED_PROJECT.importStatus);
+ });
+
+ it('recreates importSource from response', () => {
+ state = {};
+
+ mutations[types.RECEIVE_REPOS_SUCCESS](state, response);
+
+ expect(state.repositories[0].importSource).toStrictEqual(
+ expect.objectContaining({
+ fullName: IMPORTED_PROJECT.importSource,
+ sanitizedName: IMPORTED_PROJECT.name,
+ providerLink: IMPORTED_PROJECT.providerLink,
+ }),
+ );
+ });
+
+ it('passes project to importProject', () => {
+ state = {};
+
+ mutations[types.RECEIVE_REPOS_SUCCESS](state, response);
+
+ expect(IMPORTED_PROJECT).toStrictEqual(
+ expect.objectContaining(state.repositories[0].importedProject),
+ );
+ });
+ });
+
+ describe('for importable projects', () => {
+ beforeEach(() => {
+ state = {};
+ const response = {
+ importedProjects: [],
+ providerRepos: [SOURCE_PROJECT],
+ };
+ mutations[types.RECEIVE_REPOS_SUCCESS](state, response);
+ });
+
+ it('sets import status to none', () => {
+ expect(state.repositories[0].importStatus).toBe(STATUSES.NONE);
+ });
+
+ it('sets importSource to project', () => {
+ expect(state.repositories[0].importSource).toBe(SOURCE_PROJECT);
+ });
+ });
+
+ describe('for incompatible projects', () => {
+ const response = {
importedProjects: [],
+ providerRepos: [],
+ incompatibleRepos: [SOURCE_PROJECT],
};
- const importedProject = { id: repoId };
- mutations[types.RECEIVE_IMPORT_SUCCESS](state, { importedProject, repoId });
+ beforeEach(() => {
+ state = {};
+ mutations[types.RECEIVE_REPOS_SUCCESS](state, response);
+ });
+
+ it('sets incompatible flag', () => {
+ expect(state.repositories[0].importSource.incompatible).toBe(true);
+ });
+
+ it('sets importSource to project', () => {
+ expect(state.repositories[0].importSource).toStrictEqual(
+ expect.objectContaining(SOURCE_PROJECT),
+ );
+ });
+ });
+
+ it('sets repos loading flag to false', () => {
+ const response = {
+ importedProjects: [],
+ providerRepos: [],
+ };
+ state = {};
+
+ mutations[types.RECEIVE_REPOS_SUCCESS](state, response);
+
+ expect(state.isLoadingRepos).toBe(false);
+ });
+ });
+
+ describe(`${types.RECEIVE_REPOS_ERROR}`, () => {
+ it('sets repos loading flag to false', () => {
+ state = {};
+
+ mutations[types.RECEIVE_REPOS_ERROR](state);
+
+ expect(state.isLoadingRepos).toBe(false);
+ });
+ });
+
+ describe(`${types.REQUEST_IMPORT}`, () => {
+ beforeEach(() => {
+ const REPO_ID = 1;
+ const importTarget = { targetNamespace: 'ns', newName: 'name ' };
+ state = { repositories: [{ importSource: { id: REPO_ID } }] };
+
+ mutations[types.REQUEST_IMPORT](state, { repoId: REPO_ID, importTarget });
+ });
+
+ it(`sets status to ${STATUSES.SCHEDULING}`, () => {
+ expect(state.repositories[0].importStatus).toBe(STATUSES.SCHEDULING);
+ });
+ });
+
+ describe(`${types.RECEIVE_IMPORT_SUCCESS}`, () => {
+ beforeEach(() => {
+ const REPO_ID = 1;
+ state = { repositories: [{ importSource: { id: REPO_ID } }] };
+
+ mutations[types.RECEIVE_IMPORT_SUCCESS](state, {
+ repoId: REPO_ID,
+ importedProject: IMPORTED_PROJECT,
+ });
+ });
- expect(state.reposBeingImported.includes(repoId)).toBe(false);
- expect(state.providerRepos.some(repo => repo.id === repoId)).toBe(false);
- expect(state.importedProjects.some(repo => repo.id === repoId)).toBe(true);
+ it('sets import status', () => {
+ expect(state.repositories[0].importStatus).toBe(IMPORTED_PROJECT.importStatus);
+ });
+
+ it('sets imported project', () => {
+ expect(IMPORTED_PROJECT).toStrictEqual(
+ expect.objectContaining(state.repositories[0].importedProject),
+ );
+ });
+ });
+
+ describe(`${types.RECEIVE_IMPORT_ERROR}`, () => {
+ beforeEach(() => {
+ const REPO_ID = 1;
+ state = { repositories: [{ importSource: { id: REPO_ID } }] };
+
+ mutations[types.RECEIVE_IMPORT_ERROR](state, REPO_ID);
+ });
+
+ it(`resets import status to ${STATUSES.NONE}`, () => {
+ expect(state.repositories[0].importStatus).toBe(STATUSES.NONE);
});
});
describe(`${types.RECEIVE_JOBS_SUCCESS}`, () => {
- it('updates importStatus of existing importedProjects', () => {
+ it('updates import status of existing project', () => {
const repoId = 1;
- const state = { importedProjects: [{ id: repoId, importStatus: 'started' }] };
- const updatedProjects = [{ id: repoId, importStatus: 'finished' }];
+ state = {
+ repositories: [{ importedProject: { id: repoId }, importStatus: STATUSES.STARTED }],
+ };
+ const updatedProjects = [{ id: repoId, importStatus: STATUSES.FINISHED }];
mutations[types.RECEIVE_JOBS_SUCCESS](state, updatedProjects);
- expect(state.importedProjects[0].importStatus).toBe(updatedProjects[0].importStatus);
+ expect(state.repositories[0].importStatus).toBe(updatedProjects[0].importStatus);
+ });
+ });
+
+ describe(`${types.REQUEST_NAMESPACES}`, () => {
+ it('sets namespaces loading flag to true', () => {
+ state = {};
+
+ mutations[types.REQUEST_NAMESPACES](state);
+
+ expect(state.isLoadingNamespaces).toBe(true);
+ });
+ });
+
+ describe(`${types.RECEIVE_NAMESPACES_SUCCESS}`, () => {
+ const response = [{ fullPath: 'some/path' }];
+
+ beforeEach(() => {
+ state = {};
+ mutations[types.RECEIVE_NAMESPACES_SUCCESS](state, response);
+ });
+
+ it('stores namespaces to state', () => {
+ expect(state.namespaces).toStrictEqual(response);
+ });
+
+ it('sets namespaces loading flag to false', () => {
+ expect(state.isLoadingNamespaces).toBe(false);
+ });
+ });
+
+ describe(`${types.RECEIVE_NAMESPACES_ERROR}`, () => {
+ it('sets namespaces loading flag to false', () => {
+ state = {};
+
+ mutations[types.RECEIVE_NAMESPACES_ERROR](state);
+
+ expect(state.isLoadingNamespaces).toBe(false);
+ });
+ });
+
+ describe(`${types.SET_IMPORT_TARGET}`, () => {
+ const PROJECT = {
+ id: 2,
+ sanitizedName: 'sanitizedName',
+ };
+
+ it('stores custom target if it differs from defaults', () => {
+ state = { customImportTargets: {}, repositories: [{ importSource: PROJECT }] };
+ const importTarget = { targetNamespace: 'ns', newName: 'name ' };
+
+ mutations[types.SET_IMPORT_TARGET](state, { repoId: PROJECT.id, importTarget });
+ expect(state.customImportTargets[PROJECT.id]).toBe(importTarget);
+ });
+
+ it('removes custom target if it is equal to defaults', () => {
+ const importTarget = { targetNamespace: 'ns', newName: 'name ' };
+ state = {
+ defaultTargetNamespace: 'default',
+ customImportTargets: {
+ [PROJECT.id]: importTarget,
+ },
+ repositories: [{ importSource: PROJECT }],
+ };
+
+ mutations[types.SET_IMPORT_TARGET](state, {
+ repoId: PROJECT.id,
+ importTarget: {
+ targetNamespace: state.defaultTargetNamespace,
+ newName: PROJECT.sanitizedName,
+ },
+ });
+
+ expect(state.customImportTargets[SOURCE_PROJECT.id]).toBeUndefined();
+ });
+ });
+
+ describe(`${types.SET_PAGE_INFO}`, () => {
+ it('sets passed page info', () => {
+ state = {};
+ const pageInfo = { page: 1, total: 10 };
+
+ mutations[types.SET_PAGE_INFO](state, pageInfo);
+
+ expect(state.pageInfo).toBe(pageInfo);
+ });
+ });
+
+ describe(`${types.SET_PAGE}`, () => {
+ it('sets page number', () => {
+ const NEW_PAGE = 4;
+ state = { pageInfo: { page: 5 } };
+
+ mutations[types.SET_PAGE](state, NEW_PAGE);
+ expect(state.pageInfo.page).toBe(NEW_PAGE);
});
});
});
diff --git a/spec/frontend/import_projects/utils_spec.js b/spec/frontend/import_projects/utils_spec.js
new file mode 100644
index 00000000000..826b06d5a70
--- /dev/null
+++ b/spec/frontend/import_projects/utils_spec.js
@@ -0,0 +1,32 @@
+import { isProjectImportable } from '~/import_projects/utils';
+import { STATUSES } from '~/import_projects/constants';
+
+describe('import_projects utils', () => {
+ describe('isProjectImportable', () => {
+ it.each`
+ status | result
+ ${STATUSES.FINISHED} | ${false}
+ ${STATUSES.FAILED} | ${false}
+ ${STATUSES.SCHEDULED} | ${false}
+ ${STATUSES.STARTED} | ${false}
+ ${STATUSES.NONE} | ${true}
+ ${STATUSES.SCHEDULING} | ${false}
+ `('returns $result when project is compatible and status is $status', ({ status, result }) => {
+ expect(
+ isProjectImportable({
+ importStatus: status,
+ importSource: { incompatible: false },
+ }),
+ ).toBe(result);
+ });
+
+ it('returns false if project is not compatible', () => {
+ expect(
+ isProjectImportable({
+ importStatus: STATUSES.NONE,
+ importSource: { incompatible: true },
+ }),
+ ).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/incidents/components/incidents_list_spec.js b/spec/frontend/incidents/components/incidents_list_spec.js
new file mode 100644
index 00000000000..33ddd06d6d9
--- /dev/null
+++ b/spec/frontend/incidents/components/incidents_list_spec.js
@@ -0,0 +1,362 @@
+import { mount } from '@vue/test-utils';
+import {
+ GlAlert,
+ GlLoadingIcon,
+ GlTable,
+ GlAvatar,
+ GlPagination,
+ GlSearchBoxByType,
+ GlTab,
+ GlTabs,
+ GlBadge,
+ GlEmptyState,
+} from '@gitlab/ui';
+import { visitUrl, joinPaths, mergeUrlParams } from '~/lib/utils/url_utility';
+import IncidentsList from '~/incidents/components/incidents_list.vue';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+import { I18N, INCIDENT_STATUS_TABS } from '~/incidents/constants';
+import mockIncidents from '../mocks/incidents.json';
+
+jest.mock('~/lib/utils/url_utility', () => ({
+ visitUrl: jest.fn().mockName('visitUrlMock'),
+ joinPaths: jest.fn().mockName('joinPaths'),
+ mergeUrlParams: jest.fn().mockName('mergeUrlParams'),
+}));
+
+describe('Incidents List', () => {
+ let wrapper;
+ const newIssuePath = 'namespace/project/-/issues/new';
+ const emptyListSvgPath = '/assets/empty.svg';
+ const incidentTemplateName = 'incident';
+ const incidentType = 'incident';
+ const incidentsCount = {
+ opened: 14,
+ closed: 1,
+ all: 16,
+ };
+
+ const findTable = () => wrapper.find(GlTable);
+ const findTableRows = () => wrapper.findAll('table tbody tr');
+ const findAlert = () => wrapper.find(GlAlert);
+ const findLoader = () => wrapper.find(GlLoadingIcon);
+ const findTimeAgo = () => wrapper.findAll(TimeAgoTooltip);
+ const findDateColumnHeader = () =>
+ wrapper.find('[data-testid="incident-management-created-at-sort"]');
+ const findSearch = () => wrapper.find(GlSearchBoxByType);
+ const findAssingees = () => wrapper.findAll('[data-testid="incident-assignees"]');
+ const findCreateIncidentBtn = () => wrapper.find('[data-testid="createIncidentBtn"]');
+ const findClosedIcon = () => wrapper.findAll("[data-testid='incident-closed']");
+ const findPagination = () => wrapper.find(GlPagination);
+ const findStatusFilterTabs = () => wrapper.findAll(GlTab);
+ const findStatusFilterBadge = () => wrapper.findAll(GlBadge);
+ const findStatusTabs = () => wrapper.find(GlTabs);
+ const findEmptyState = () => wrapper.find(GlEmptyState);
+
+ function mountComponent({ data = { incidents: [], incidentsCount: {} }, loading = false }) {
+ wrapper = mount(IncidentsList, {
+ data() {
+ return data;
+ },
+ mocks: {
+ $apollo: {
+ queries: {
+ incidents: {
+ loading,
+ },
+ },
+ },
+ },
+ provide: {
+ projectPath: '/project/path',
+ newIssuePath,
+ incidentTemplateName,
+ incidentType,
+ issuePath: '/project/isssues',
+ publishedAvailable: true,
+ emptyListSvgPath,
+ },
+ stubs: {
+ GlButton: true,
+ GlAvatar: true,
+ },
+ });
+ }
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ wrapper = null;
+ }
+ });
+
+ it('shows the loading state', () => {
+ mountComponent({
+ loading: true,
+ });
+ expect(findLoader().exists()).toBe(true);
+ });
+
+ it('shows empty state', () => {
+ mountComponent({
+ data: { incidents: { list: [] }, incidentsCount: {} },
+ loading: false,
+ });
+ expect(findEmptyState().exists()).toBe(true);
+ });
+
+ it('shows error state', () => {
+ mountComponent({
+ data: { incidents: { list: [] }, incidentsCount: { all: 0 }, errored: true },
+ loading: false,
+ });
+ expect(findTable().text()).toContain(I18N.noIncidents);
+ expect(findAlert().exists()).toBe(true);
+ });
+
+ describe('Incident Management list', () => {
+ beforeEach(() => {
+ mountComponent({
+ data: { incidents: { list: mockIncidents }, incidentsCount },
+ loading: false,
+ });
+ });
+
+ it('renders rows based on provided data', () => {
+ expect(findTableRows().length).toBe(mockIncidents.length);
+ });
+
+ it('renders a createdAt with timeAgo component per row', () => {
+ expect(findTimeAgo().length).toBe(mockIncidents.length);
+ });
+
+ describe('Assignees', () => {
+ it('shows Unassigned when there are no assignees', () => {
+ expect(
+ findAssingees()
+ .at(0)
+ .text(),
+ ).toBe(I18N.unassigned);
+ });
+
+ it('renders an avatar component when there is an assignee', () => {
+ const avatar = findAssingees()
+ .at(1)
+ .find(GlAvatar);
+ const { src, label } = avatar.attributes();
+ const { name, avatarUrl } = mockIncidents[1].assignees.nodes[0];
+
+ expect(avatar.exists()).toBe(true);
+ expect(label).toBe(name);
+ expect(src).toBe(avatarUrl);
+ });
+
+ it('contains a link to the issue details', () => {
+ findTableRows()
+ .at(0)
+ .trigger('click');
+ expect(visitUrl).toHaveBeenCalledWith(joinPaths(`/project/isssues/`, mockIncidents[0].iid));
+ });
+
+ it('renders a closed icon for closed incidents', () => {
+ expect(findClosedIcon().length).toBe(
+ mockIncidents.filter(({ state }) => state === 'closed').length,
+ );
+ });
+ });
+ });
+
+ describe('Create Incident', () => {
+ beforeEach(() => {
+ mountComponent({
+ data: { incidents: { list: mockIncidents }, incidentsCount: {} },
+ loading: false,
+ });
+ });
+
+ it('shows the button linking to new incidents page with prefilled incident template when clicked', () => {
+ expect(findCreateIncidentBtn().exists()).toBe(true);
+ findCreateIncidentBtn().trigger('click');
+ expect(mergeUrlParams).toHaveBeenCalledWith(
+ { issuable_template: incidentTemplateName, 'issue[issue_type]': incidentType },
+ newIssuePath,
+ );
+ });
+
+ it('sets button loading on click', () => {
+ findCreateIncidentBtn().vm.$emit('click');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findCreateIncidentBtn().attributes('loading')).toBe('true');
+ });
+ });
+ });
+
+ describe('Pagination', () => {
+ beforeEach(() => {
+ mountComponent({
+ data: {
+ incidents: {
+ list: mockIncidents,
+ pageInfo: { hasNextPage: true, hasPreviousPage: true },
+ },
+ incidentsCount,
+ errored: false,
+ },
+ loading: false,
+ });
+ });
+
+ it('should render pagination', () => {
+ expect(wrapper.find(GlPagination).exists()).toBe(true);
+ });
+
+ describe('prevPage', () => {
+ it('returns prevPage button', () => {
+ findPagination().vm.$emit('input', 3);
+
+ return wrapper.vm.$nextTick(() => {
+ expect(
+ findPagination()
+ .findAll('.page-item')
+ .at(0)
+ .text(),
+ ).toBe('Prev');
+ });
+ });
+
+ it('returns prevPage number', () => {
+ findPagination().vm.$emit('input', 3);
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.vm.prevPage).toBe(2);
+ });
+ });
+
+ it('returns 0 when it is the first page', () => {
+ findPagination().vm.$emit('input', 1);
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.vm.prevPage).toBe(0);
+ });
+ });
+ });
+
+ describe('nextPage', () => {
+ it('returns nextPage button', () => {
+ findPagination().vm.$emit('input', 3);
+
+ return wrapper.vm.$nextTick(() => {
+ expect(
+ findPagination()
+ .findAll('.page-item')
+ .at(1)
+ .text(),
+ ).toBe('Next');
+ });
+ });
+
+ it('returns nextPage number', () => {
+ mountComponent({
+ data: {
+ incidents: {
+ list: [...mockIncidents, ...mockIncidents, ...mockIncidents],
+ pageInfo: { hasNextPage: true, hasPreviousPage: true },
+ },
+ incidentsCount,
+ errored: false,
+ },
+ loading: false,
+ });
+ findPagination().vm.$emit('input', 1);
+
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.vm.nextPage).toBe(2);
+ });
+ });
+
+ it('returns `null` when currentPage is already last page', () => {
+ findStatusTabs().vm.$emit('input', 1);
+ findPagination().vm.$emit('input', 1);
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.vm.nextPage).toBeNull();
+ });
+ });
+ });
+
+ describe('Search', () => {
+ beforeEach(() => {
+ mountComponent({
+ data: {
+ incidents: {
+ list: mockIncidents,
+ pageInfo: { hasNextPage: true, hasPreviousPage: true },
+ },
+ incidentsCount,
+ errored: false,
+ },
+ loading: false,
+ });
+ });
+
+ it('renders the search component for incidents', () => {
+ expect(findSearch().exists()).toBe(true);
+ });
+
+ it('sets the `searchTerm` graphql variable', () => {
+ const SEARCH_TERM = 'Simple Incident';
+
+ findSearch().vm.$emit('input', SEARCH_TERM);
+
+ expect(wrapper.vm.$data.searchTerm).toBe(SEARCH_TERM);
+ });
+ });
+
+ describe('Status Filter Tabs', () => {
+ beforeEach(() => {
+ mountComponent({
+ data: { incidents: mockIncidents, incidentsCount },
+ loading: false,
+ stubs: {
+ GlTab: true,
+ },
+ });
+ });
+
+ it('should display filter tabs', () => {
+ const tabs = findStatusFilterTabs().wrappers;
+
+ tabs.forEach((tab, i) => {
+ expect(tab.attributes('data-testid')).toContain(INCIDENT_STATUS_TABS[i].status);
+ });
+ });
+
+ it('should display filter tabs with alerts count badge for each status', () => {
+ const tabs = findStatusFilterTabs().wrappers;
+ const badges = findStatusFilterBadge();
+
+ tabs.forEach((tab, i) => {
+ const status = INCIDENT_STATUS_TABS[i].status.toLowerCase();
+ expect(tab.attributes('data-testid')).toContain(INCIDENT_STATUS_TABS[i].status);
+ expect(badges.at(i).text()).toContain(incidentsCount[status]);
+ });
+ });
+ });
+ });
+
+ describe('sorting the incident list by column', () => {
+ beforeEach(() => {
+ mountComponent({
+ data: { incidents: mockIncidents, incidentsCount },
+ loading: false,
+ });
+ });
+
+ it('updates sort with new direction and column key', () => {
+ expect(findDateColumnHeader().attributes('aria-sort')).toBe('descending');
+
+ findDateColumnHeader().trigger('click');
+ return wrapper.vm.$nextTick(() => {
+ expect(findDateColumnHeader().attributes('aria-sort')).toBe('ascending');
+ });
+ });
+ });
+});
diff --git a/spec/frontend/incidents/mocks/incidents.json b/spec/frontend/incidents/mocks/incidents.json
new file mode 100644
index 00000000000..4eab709e53f
--- /dev/null
+++ b/spec/frontend/incidents/mocks/incidents.json
@@ -0,0 +1,39 @@
+[
+ {
+ "iid": "15",
+ "title": "New: Incident",
+ "createdAt": "2020-06-03T15:46:08Z",
+ "assignees": {},
+ "state": "opened"
+ },
+ {
+ "iid": "14",
+ "title": "Create issue4",
+ "createdAt": "2020-05-19T09:26:07Z",
+ "assignees": {
+ "nodes": [
+ {
+ "name": "Benjamin Braun",
+ "username": "kami.hegmann",
+ "avatarUrl": "https://invalid'",
+ "webUrl": "https://invalid"
+ }
+ ]
+ },
+ "state": "opened"
+ },
+ {
+ "iid": "13",
+ "title": "Create issue3",
+ "createdAt": "2020-05-19T08:53:55Z",
+ "assignees": {},
+ "state": "closed"
+ },
+ {
+ "iid": "12",
+ "title": "Create issue2",
+ "createdAt": "2020-05-18T17:13:35Z",
+ "assignees": {},
+ "state": "closed"
+ }
+]
diff --git a/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap b/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap
index dd3589e2951..f3f610e4bb7 100644
--- a/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap
+++ b/spec/frontend/incidents_settings/components/__snapshots__/alerts_form_spec.js.snap
@@ -81,19 +81,23 @@ exports[`Alert integration settings form default state should match the default
</gl-form-checkbox-stub>
</gl-form-group-stub>
- <gl-button-stub
- category="tertiary"
- class="js-no-auto-disable"
- data-qa-selector="save_changes_button"
- icon=""
- size="medium"
- type="submit"
- variant="success"
+ <div
+ class="gl-display-flex gl-justify-content-end"
>
+ <gl-button-stub
+ category="primary"
+ class="js-no-auto-disable"
+ data-qa-selector="save_changes_button"
+ icon=""
+ size="medium"
+ type="submit"
+ variant="success"
+ >
+
+ Save changes
- Save changes
-
- </gl-button-stub>
+ </gl-button-stub>
+ </div>
</form>
</div>
`;
diff --git a/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap b/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap
index 5f355ee8261..3ad4c13382d 100644
--- a/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap
+++ b/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap
@@ -9,16 +9,16 @@ exports[`IncidentsSettingTabs should render the component 1`] = `
<div
class="settings-header"
>
- <h3
- class="h4"
+ <h4
+ class="gl-my-3! gl-py-1"
>
Incidents
- </h3>
+ </h4>
<gl-button-stub
- category="tertiary"
+ category="primary"
class="js-settings-toggle"
icon=""
size="medium"
diff --git a/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap b/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
index 17ada722034..78bb238fcb6 100644
--- a/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
+++ b/spec/frontend/incidents_settings/components/__snapshots__/pagerduty_form_spec.js.snap
@@ -35,27 +35,31 @@ exports[`Alert integration settings form should match the default snapshot 1`] =
/>
<div
- class="gl-text-gray-400 gl-pt-2"
+ class="gl-text-gray-200 gl-pt-2"
>
<gl-sprintf-stub
message="Create a GitLab issue for each PagerDuty incident by %{docsLink}"
/>
</div>
- <gl-button-stub
- category="tertiary"
- class="gl-mt-3"
- data-testid="webhook-reset-btn"
- icon=""
- role="button"
- size="medium"
- tabindex="0"
- variant="default"
+ <div
+ class="gl-display-flex gl-justify-content-end"
>
+ <gl-button-stub
+ category="primary"
+ class="gl-mt-3"
+ data-testid="webhook-reset-btn"
+ icon=""
+ role="button"
+ size="medium"
+ tabindex="0"
+ variant="default"
+ >
+
+ Reset webhook URL
- Reset webhook URL
-
- </gl-button-stub>
+ </gl-button-stub>
+ </div>
<gl-modal-stub
modalclass=""
@@ -72,18 +76,22 @@ exports[`Alert integration settings form should match the default snapshot 1`] =
</gl-modal-stub>
</gl-form-group-stub>
- <gl-button-stub
- category="tertiary"
- class="js-no-auto-disable"
- icon=""
- size="medium"
- type="submit"
- variant="success"
+ <div
+ class="gl-display-flex gl-justify-content-end"
>
+ <gl-button-stub
+ category="primary"
+ class="js-no-auto-disable"
+ icon=""
+ size="medium"
+ type="submit"
+ variant="success"
+ >
+
+ Save changes
- Save changes
-
- </gl-button-stub>
+ </gl-button-stub>
+ </div>
</form>
</div>
`;
diff --git a/spec/frontend/incidents_settings/components/incidents_settings_service_spec.js b/spec/frontend/incidents_settings/components/incidents_settings_service_spec.js
index 58f9a318808..5010fc0bb5c 100644
--- a/spec/frontend/incidents_settings/components/incidents_settings_service_spec.js
+++ b/spec/frontend/incidents_settings/components/incidents_settings_service_spec.js
@@ -1,9 +1,9 @@
-import axios from '~/lib/utils/axios_utils';
import AxiosMockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import httpStatusCodes from '~/lib/utils/http_status';
import IncidentsSettingsService from '~/incidents_settings/incidents_settings_service';
import { ERROR_MSG } from '~/incidents_settings/constants';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import { refreshCurrentPage } from '~/lib/utils/url_utility';
jest.mock('~/flash');
diff --git a/spec/frontend/incidents_settings/components/incidents_settings_tabs_spec.js b/spec/frontend/incidents_settings/components/incidents_settings_tabs_spec.js
index 47e2aecc108..c56b9ed2a69 100644
--- a/spec/frontend/incidents_settings/components/incidents_settings_tabs_spec.js
+++ b/spec/frontend/incidents_settings/components/incidents_settings_tabs_spec.js
@@ -6,9 +6,7 @@ describe('IncidentsSettingTabs', () => {
let wrapper;
beforeEach(() => {
- wrapper = shallowMount(IncidentsSettingTabs, {
- provide: { glFeatures: { pagerdutyWebhook: true } },
- });
+ wrapper = shallowMount(IncidentsSettingTabs);
});
afterEach(() => {
diff --git a/spec/frontend/incidents_settings/components/pagerduty_form_spec.js b/spec/frontend/incidents_settings/components/pagerduty_form_spec.js
index 521094ad54c..50d0de8a753 100644
--- a/spec/frontend/incidents_settings/components/pagerduty_form_spec.js
+++ b/spec/frontend/incidents_settings/components/pagerduty_form_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import waitForPromises from 'helpers/wait_for_promises';
-import PagerDutySettingsForm from '~/incidents_settings/components/pagerduty_form.vue';
import { GlAlert, GlModal } from '@gitlab/ui';
+import PagerDutySettingsForm from '~/incidents_settings/components/pagerduty_form.vue';
describe('Alert integration settings form', () => {
let wrapper;
diff --git a/spec/frontend/integrations/edit/components/dynamic_field_spec.js b/spec/frontend/integrations/edit/components/dynamic_field_spec.js
index 3a7a0efcab7..53234419f5f 100644
--- a/spec/frontend/integrations/edit/components/dynamic_field_spec.js
+++ b/spec/frontend/integrations/edit/components/dynamic_field_spec.js
@@ -1,6 +1,6 @@
import { mount } from '@vue/test-utils';
-import DynamicField from '~/integrations/edit/components/dynamic_field.vue';
import { GlFormGroup, GlFormCheckbox, GlFormInput, GlFormSelect, GlFormTextarea } from '@gitlab/ui';
+import DynamicField from '~/integrations/edit/components/dynamic_field.vue';
describe('DynamicField', () => {
let wrapper;
diff --git a/spec/frontend/integrations/edit/components/integration_form_spec.js b/spec/frontend/integrations/edit/components/integration_form_spec.js
index 482c6a439f2..f8e2eb5e7f4 100644
--- a/spec/frontend/integrations/edit/components/integration_form_spec.js
+++ b/spec/frontend/integrations/edit/components/integration_form_spec.js
@@ -1,4 +1,5 @@
import { shallowMount } from '@vue/test-utils';
+import { mockIntegrationProps } from 'jest/integrations/edit/mock_data';
import { createStore } from '~/integrations/edit/store';
import IntegrationForm from '~/integrations/edit/components/integration_form.vue';
import OverrideDropdown from '~/integrations/edit/components/override_dropdown.vue';
@@ -7,7 +8,6 @@ import JiraTriggerFields from '~/integrations/edit/components/jira_trigger_field
import JiraIssuesFields from '~/integrations/edit/components/jira_issues_fields.vue';
import TriggerFields from '~/integrations/edit/components/trigger_fields.vue';
import DynamicField from '~/integrations/edit/components/dynamic_field.vue';
-import { mockIntegrationProps } from 'jest/integrations/edit/mock_data';
describe('IntegrationForm', () => {
let wrapper;
diff --git a/spec/frontend/integrations/edit/components/trigger_fields_spec.js b/spec/frontend/integrations/edit/components/trigger_fields_spec.js
index 41bccb8ada0..df12c70f9f5 100644
--- a/spec/frontend/integrations/edit/components/trigger_fields_spec.js
+++ b/spec/frontend/integrations/edit/components/trigger_fields_spec.js
@@ -1,6 +1,6 @@
import { mount } from '@vue/test-utils';
-import TriggerFields from '~/integrations/edit/components/trigger_fields.vue';
import { GlFormGroup, GlFormCheckbox, GlFormInput } from '@gitlab/ui';
+import TriggerFields from '~/integrations/edit/components/trigger_fields.vue';
describe('TriggerFields', () => {
let wrapper;
diff --git a/spec/frontend/integrations/edit/store/actions_spec.js b/spec/frontend/integrations/edit/store/actions_spec.js
index c3ce6e51a3d..5356c0a411b 100644
--- a/spec/frontend/integrations/edit/store/actions_spec.js
+++ b/spec/frontend/integrations/edit/store/actions_spec.js
@@ -1,9 +1,8 @@
+import testAction from 'helpers/vuex_action_helper';
import createState from '~/integrations/edit/store/state';
import { setOverride } from '~/integrations/edit/store/actions';
import * as types from '~/integrations/edit/store/mutation_types';
-import testAction from 'helpers/vuex_action_helper';
-
describe('Integration form store actions', () => {
let state;
diff --git a/spec/frontend/issuable_form_spec.js b/spec/frontend/issuable_form_spec.js
new file mode 100644
index 00000000000..009ca28ff78
--- /dev/null
+++ b/spec/frontend/issuable_form_spec.js
@@ -0,0 +1,56 @@
+import $ from 'jquery';
+
+import IssuableForm from '~/issuable_form';
+
+function createIssuable() {
+ const instance = new IssuableForm($(document.createElement('form')));
+
+ instance.titleField = $(document.createElement('input'));
+
+ return instance;
+}
+
+describe('IssuableForm', () => {
+ let instance;
+
+ beforeEach(() => {
+ instance = createIssuable();
+ });
+
+ describe('removeWip', () => {
+ it.each`
+ prefix
+ ${'wip '}
+ ${' wIP: '}
+ ${'[WIp] '}
+ ${'wIP:'}
+ ${' [WIp]'}
+ ${'drAft '}
+ ${'draFT: '}
+ ${' [DRaft] '}
+ ${'drAft:'}
+ ${'[draFT]'}
+ ${' dRaFt - '}
+ ${'dRaFt - '}
+ ${'(draft) '}
+ ${' (DrafT)'}
+ ${'wip wip: [wip] draft draft - draft: [draft] (draft)'}
+ `('removes "$prefix" from the beginning of the title', ({ prefix }) => {
+ instance.titleField.val(`${prefix}The Issuable's Title Value`);
+
+ instance.removeWip();
+
+ expect(instance.titleField.val()).toBe("The Issuable's Title Value");
+ });
+ });
+
+ describe('addWip', () => {
+ it("properly adds the work in progress prefix to the Issuable's title", () => {
+ instance.titleField.val("The Issuable's Title Value");
+
+ instance.addWip();
+
+ expect(instance.titleField.val()).toBe("Draft: The Issuable's Title Value");
+ });
+ });
+});
diff --git a/spec/frontend/issuable_suggestions/components/item_spec.js b/spec/frontend/issuable_suggestions/components/item_spec.js
index 36799f4ee9f..ad37ccd2ca5 100644
--- a/spec/frontend/issuable_suggestions/components/item_spec.js
+++ b/spec/frontend/issuable_suggestions/components/item_spec.js
@@ -1,10 +1,10 @@
import { shallowMount } from '@vue/test-utils';
import { GlTooltip, GlLink } from '@gitlab/ui';
+import { TEST_HOST } from 'jest/helpers/test_constants';
import Icon from '~/vue_shared/components/icon.vue';
import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image.vue';
import Suggestion from '~/issuable_suggestions/components/item.vue';
import mockData from '../mock_data';
-import { TEST_HOST } from 'jest/helpers/test_constants';
describe('Issuable suggestions suggestion component', () => {
let vm;
diff --git a/spec/frontend/issuables_list/components/__snapshots__/issuables_list_app_spec.js.snap b/spec/frontend/issuables_list/components/__snapshots__/issuables_list_app_spec.js.snap
index 3e445319746..c327b7de827 100644
--- a/spec/frontend/issuables_list/components/__snapshots__/issuables_list_app_spec.js.snap
+++ b/spec/frontend/issuables_list/components/__snapshots__/issuables_list_app_spec.js.snap
@@ -2,7 +2,6 @@
exports[`Issuables list component with empty issues response with all state should display a catch-all if there are no issues to show 1`] = `
<gl-empty-state-stub
- description="The Issue Tracker is the place to add things that need to be improved or solved in a project. You can register or sign in to create issues for this project."
svgpath="/emptySvg"
title="There are no issues to show"
/>
diff --git a/spec/frontend/issuables_list/components/issuable_spec.js b/spec/frontend/issuables_list/components/issuable_spec.js
index 87868b7eeff..6ede46a602a 100644
--- a/spec/frontend/issuables_list/components/issuable_spec.js
+++ b/spec/frontend/issuables_list/components/issuable_spec.js
@@ -76,8 +76,9 @@ describe('Issuable component', () => {
});
const checkExists = findFn => () => findFn().exists();
- const hasConfidentialIcon = () =>
- wrapper.findAll(GlIcon).wrappers.some(iconWrapper => iconWrapper.props('name') === 'eye-slash');
+ const hasIcon = (iconName, iconWrapper = wrapper) =>
+ iconWrapper.findAll(GlIcon).wrappers.some(icon => icon.props('name') === iconName);
+ const hasConfidentialIcon = () => hasIcon('eye-slash');
const findTaskStatus = () => wrapper.find('.task-status');
const findOpenedAgoContainer = () => wrapper.find('[data-testid="openedByMessage"]');
const findAuthor = () => wrapper.find({ ref: 'openedAgoByContainer' });
@@ -85,18 +86,20 @@ describe('Issuable component', () => {
const findMilestoneTooltip = () => findMilestone().attributes('title');
const findDueDate = () => wrapper.find('.js-due-date');
const findLabels = () => wrapper.findAll(GlLabel);
- const findWeight = () => wrapper.find('.js-weight');
+ const findWeight = () => wrapper.find('[data-testid="weight"]');
const findAssignees = () => wrapper.find(IssueAssignees);
- const findMergeRequestsCount = () => wrapper.find('.js-merge-requests');
- const findUpvotes = () => wrapper.find('.js-upvotes');
- const findDownvotes = () => wrapper.find('.js-downvotes');
- const findNotes = () => wrapper.find('.js-notes');
+ const findBlockingIssuesCount = () => wrapper.find('[data-testid="blocking-issues"]');
+ const findMergeRequestsCount = () => wrapper.find('[data-testid="merge-requests"]');
+ const findUpvotes = () => wrapper.find('[data-testid="upvotes"]');
+ const findDownvotes = () => wrapper.find('[data-testid="downvotes"]');
+ const findNotes = () => wrapper.find('[data-testid="notes-count"]');
const findBulkCheckbox = () => wrapper.find('input.selected-issuable');
const findScopedLabels = () => findLabels().filter(w => isScopedLabel({ title: w.text() }));
const findUnscopedLabels = () => findLabels().filter(w => !isScopedLabel({ title: w.text() }));
const findIssuableTitle = () => wrapper.find('[data-testid="issuable-title"]');
const findIssuableStatus = () => wrapper.find('[data-testid="issuable-status"]');
const containsJiraLogo = () => wrapper.contains('[data-testid="jira-logo"]');
+ const findHealthStatus = () => wrapper.find('.health-status');
describe('when mounted', () => {
it('initializes user popovers', () => {
@@ -181,6 +184,7 @@ describe('Issuable component', () => {
${'due date'} | ${checkExists(findDueDate)}
${'labels'} | ${checkExists(findLabels)}
${'weight'} | ${checkExists(findWeight)}
+ ${'blocking issues count'} | ${checkExists(findBlockingIssuesCount)}
${'merge request count'} | ${checkExists(findMergeRequestsCount)}
${'upvotes'} | ${checkExists(findUpvotes)}
${'downvotes'} | ${checkExists(findDownvotes)}
@@ -286,11 +290,7 @@ describe('Issuable component', () => {
it('renders milestone', () => {
expect(findMilestone().exists()).toBe(true);
- expect(
- findMilestone()
- .find('.fa-clock-o')
- .exists(),
- ).toBe(true);
+ expect(hasIcon('clock', findMilestone())).toBe(true);
expect(findMilestone().text()).toEqual(TEST_MILESTONE.title);
});
@@ -430,11 +430,12 @@ describe('Issuable component', () => {
});
describe.each`
- desc | key | finder
- ${'with merge requests count'} | ${'merge_requests_count'} | ${findMergeRequestsCount}
- ${'with upvote count'} | ${'upvotes'} | ${findUpvotes}
- ${'with downvote count'} | ${'downvotes'} | ${findDownvotes}
- ${'with notes count'} | ${'user_notes_count'} | ${findNotes}
+ desc | key | finder
+ ${'with blocking issues count'} | ${'blocking_issues_count'} | ${findBlockingIssuesCount}
+ ${'with merge requests count'} | ${'merge_requests_count'} | ${findMergeRequestsCount}
+ ${'with upvote count'} | ${'upvotes'} | ${findUpvotes}
+ ${'with downvote count'} | ${'downvotes'} | ${findDownvotes}
+ ${'with notes count'} | ${'user_notes_count'} | ${findNotes}
`('$desc', ({ key, finder }) => {
beforeEach(() => {
issuable[key] = TEST_META_COUNT;
@@ -442,7 +443,7 @@ describe('Issuable component', () => {
factory({ issuable });
});
- it('renders merge requests count', () => {
+ it('renders correct count', () => {
expect(finder().exists()).toBe(true);
expect(finder().text()).toBe(TEST_META_COUNT.toString());
expect(finder().classes('no-comments')).toBe(false);
@@ -474,4 +475,19 @@ describe('Issuable component', () => {
});
});
});
+
+ if (IS_EE) {
+ describe('with health status', () => {
+ it('renders health status tag', () => {
+ factory({ issuable });
+ expect(findHealthStatus().exists()).toBe(true);
+ });
+
+ it('does not render when health status is absent', () => {
+ issuable.health_status = null;
+ factory({ issuable });
+ expect(findHealthStatus().exists()).toBe(false);
+ });
+ });
+ }
});
diff --git a/spec/frontend/issuables_list/components/issuables_list_app_spec.js b/spec/frontend/issuables_list/components/issuables_list_app_spec.js
index 9f4995a54ee..65b87ddf6a6 100644
--- a/spec/frontend/issuables_list/components/issuables_list_app_spec.js
+++ b/spec/frontend/issuables_list/components/issuables_list_app_spec.js
@@ -4,14 +4,14 @@ import { shallowMount } from '@vue/test-utils';
import { GlEmptyState, GlPagination, GlSkeletonLoading } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
import { TEST_HOST } from 'helpers/test_constants';
-import flash from '~/flash';
+import { deprecatedCreateFlash as flash } from '~/flash';
import IssuablesListApp from '~/issuables_list/components/issuables_list_app.vue';
import Issuable from '~/issuables_list/components/issuable.vue';
import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
import issueablesEventBus from '~/issuables_list/eventhub';
import { PAGE_SIZE, PAGE_SIZE_MANUAL, RELATIVE_POSITION } from '~/issuables_list/constants';
-jest.mock('~/flash', () => jest.fn());
+jest.mock('~/flash');
jest.mock('~/issuables_list/eventhub');
jest.mock('~/lib/utils/common_utils', () => ({
...jest.requireActual('~/lib/utils/common_utils'),
@@ -21,7 +21,7 @@ jest.mock('~/lib/utils/common_utils', () => ({
const TEST_LOCATION = `${TEST_HOST}/issues`;
const TEST_ENDPOINT = '/issues';
const TEST_CREATE_ISSUES_PATH = '/createIssue';
-const TEST_EMPTY_SVG_PATH = '/emptySvg';
+const TEST_SVG_PATH = '/emptySvg';
const setUrl = query => {
window.location.href = `${TEST_LOCATION}${query}`;
@@ -48,11 +48,15 @@ describe('Issuables list component', () => {
};
const factory = (props = { sortKey: 'priority' }) => {
+ const emptyStateMeta = {
+ createIssuePath: TEST_CREATE_ISSUES_PATH,
+ svgPath: TEST_SVG_PATH,
+ };
+
wrapper = shallowMount(IssuablesListApp, {
propsData: {
endpoint: TEST_ENDPOINT,
- createIssuePath: TEST_CREATE_ISSUES_PATH,
- emptySvgPath: TEST_EMPTY_SVG_PATH,
+ emptyStateMeta,
...props,
},
});
@@ -117,9 +121,10 @@ describe('Issuables list component', () => {
expect(wrapper.vm).toMatchObject({
// Props
canBulkEdit: false,
- createIssuePath: TEST_CREATE_ISSUES_PATH,
- emptySvgPath: TEST_EMPTY_SVG_PATH,
-
+ emptyStateMeta: {
+ createIssuePath: TEST_CREATE_ISSUES_PATH,
+ svgPath: TEST_SVG_PATH,
+ },
// Data
filters: {
state: 'opened',
diff --git a/spec/frontend/issuables_list/issuable_list_test_data.js b/spec/frontend/issuables_list/issuable_list_test_data.js
index 19d8ee7f71a..313aa15bd31 100644
--- a/spec/frontend/issuables_list/issuable_list_test_data.js
+++ b/spec/frontend/issuables_list/issuable_list_test_data.js
@@ -18,6 +18,7 @@ export const simpleIssue = {
},
assignee: null,
user_notes_count: 0,
+ blocking_issues_count: 0,
merge_requests_count: 0,
upvotes: 0,
downvotes: 0,
@@ -29,6 +30,7 @@ export const simpleIssue = {
references: {
relative: 'html-boilerplate#45',
},
+ health_status: 'on_track',
};
export const testLabels = [
diff --git a/spec/frontend/issue_show/components/app_spec.js b/spec/frontend/issue_show/components/app_spec.js
index d970fd349e7..f76f42cb9ae 100644
--- a/spec/frontend/issue_show/components/app_spec.js
+++ b/spec/frontend/issue_show/components/app_spec.js
@@ -2,6 +2,7 @@ import { GlIntersectionObserver } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
+import { useMockIntersectionObserver } from 'helpers/mock_dom_observer';
import axios from '~/lib/utils/axios_utils';
import { visitUrl } from '~/lib/utils/url_utility';
import '~/behaviors/markdown/render_gfm';
@@ -22,6 +23,8 @@ const zoomMeetingUrl = 'https://gitlab.zoom.us/j/95919234811';
const publishedIncidentUrl = 'https://status.com/';
describe('Issuable output', () => {
+ useMockIntersectionObserver();
+
let mock;
let realtimeRequestCount = 0;
let wrapper;
@@ -45,11 +48,6 @@ describe('Issuable output', () => {
</div>
`);
- window.IntersectionObserver = class {
- disconnect = jest.fn();
- observe = jest.fn();
- };
-
mock = new MockAdapter(axios);
mock
.onGet('/gitlab-org/gitlab-shell/-/issues/9/realtime_changes/realtime_changes')
@@ -84,7 +82,6 @@ describe('Issuable output', () => {
});
afterEach(() => {
- delete window.IntersectionObserver;
mock.restore();
realtimeRequestCount = 0;
diff --git a/spec/frontend/issue_show/components/issuable_header_warnings_spec.js b/spec/frontend/issue_show/components/issuable_header_warnings_spec.js
deleted file mode 100644
index 5a166812d84..00000000000
--- a/spec/frontend/issue_show/components/issuable_header_warnings_spec.js
+++ /dev/null
@@ -1,79 +0,0 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-import Vuex from 'vuex';
-import IssuableHeaderWarnings from '~/issue_show/components/issuable_header_warnings.vue';
-import createStore from '~/notes/stores';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-describe('IssuableHeaderWarnings', () => {
- let wrapper;
- let store;
-
- const findConfidential = () => wrapper.find('[data-testid="confidential"]');
- const findLocked = () => wrapper.find('[data-testid="locked"]');
- const confidentialIconName = () => findConfidential().attributes('name');
- const lockedIconName = () => findLocked().attributes('name');
-
- const createComponent = () => {
- wrapper = shallowMount(IssuableHeaderWarnings, { store, localVue });
- };
-
- beforeEach(() => {
- store = createStore();
- });
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- store = null;
- });
-
- describe('when confidential is on', () => {
- beforeEach(() => {
- store.state.noteableData.confidential = true;
-
- createComponent();
- });
-
- it('renders the confidential icon', () => {
- expect(confidentialIconName()).toBe('eye-slash');
- });
- });
-
- describe('when confidential is off', () => {
- beforeEach(() => {
- store.state.noteableData.confidential = false;
-
- createComponent();
- });
-
- it('does not find the component', () => {
- expect(findConfidential().exists()).toBe(false);
- });
- });
-
- describe('when discussion locked is on', () => {
- beforeEach(() => {
- store.state.noteableData.discussion_locked = true;
-
- createComponent();
- });
-
- it('renders the locked icon', () => {
- expect(lockedIconName()).toBe('lock');
- });
- });
-
- describe('when discussion locked is off', () => {
- beforeEach(() => {
- store.state.noteableData.discussion_locked = false;
-
- createComponent();
- });
-
- it('does not find the component', () => {
- expect(findLocked().exists()).toBe(false);
- });
- });
-});
diff --git a/spec/frontend/jira_import/components/jira_import_app_spec.js b/spec/frontend/jira_import/components/jira_import_app_spec.js
index 64b4461d7b2..27314a0eb6e 100644
--- a/spec/frontend/jira_import/components/jira_import_app_spec.js
+++ b/spec/frontend/jira_import/components/jira_import_app_spec.js
@@ -1,21 +1,26 @@
import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
-import { mount, shallowMount } from '@vue/test-utils';
-import AxiosMockAdapter from 'axios-mock-adapter';
+import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
-import axios from '~/lib/utils/axios_utils';
import JiraImportApp from '~/jira_import/components/jira_import_app.vue';
import JiraImportForm from '~/jira_import/components/jira_import_form.vue';
import JiraImportProgress from '~/jira_import/components/jira_import_progress.vue';
import JiraImportSetup from '~/jira_import/components/jira_import_setup.vue';
-import initiateJiraImportMutation from '~/jira_import/queries/initiate_jira_import.mutation.graphql';
-import getJiraUserMappingMutation from '~/jira_import/queries/get_jira_user_mapping.mutation.graphql';
-import { imports, issuesPath, jiraIntegrationPath, jiraProjects, userMappings } from '../mock_data';
+import {
+ imports,
+ issuesPath,
+ jiraIntegrationPath,
+ jiraProjects,
+ projectId,
+ projectPath,
+} from '../mock_data';
describe('JiraImportApp', () => {
- let axiosMock;
- let mutateSpy;
let wrapper;
+ const inProgressIllustration = 'in-progress-illustration.svg';
+
+ const setupIllustration = 'setup-illustration.svg';
+
const getFormComponent = () => wrapper.find(JiraImportForm);
const getProgressComponent = () => wrapper.find(JiraImportProgress);
@@ -29,28 +34,22 @@ describe('JiraImportApp', () => {
const mountComponent = ({
isJiraConfigured = true,
errorMessage = '',
- selectedProject = 'MTG',
showAlert = false,
isInProgress = false,
loading = false,
- mutate = mutateSpy,
- mountFunction = shallowMount,
} = {}) =>
- mountFunction(JiraImportApp, {
+ shallowMount(JiraImportApp, {
propsData: {
- inProgressIllustration: 'in-progress-illustration.svg',
+ inProgressIllustration,
isJiraConfigured,
issuesPath,
jiraIntegrationPath,
- projectId: '5',
- projectPath: 'gitlab-org/gitlab-test',
- setupIllustration: 'setup-illustration.svg',
+ projectId,
+ projectPath,
+ setupIllustration,
},
data() {
return {
- isSubmitting: false,
- selectedProject,
- userMappings,
errorMessage,
showAlert,
jiraImportDetails: {
@@ -64,26 +63,11 @@ describe('JiraImportApp', () => {
mocks: {
$apollo: {
loading,
- mutate,
},
},
});
- beforeEach(() => {
- axiosMock = new AxiosMockAdapter(axios);
- mutateSpy = jest.fn(() =>
- Promise.resolve({
- data: {
- jiraImportStart: { errors: [] },
- jiraImportUsers: { jiraUsers: [], errors: [] },
- },
- }),
- );
- });
-
afterEach(() => {
- axiosMock.restore();
- mutateSpy.mockRestore();
wrapper.destroy();
wrapper = null;
});
@@ -176,111 +160,84 @@ describe('JiraImportApp', () => {
});
});
- describe('import in progress screen', () => {
+ describe('import setup component', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({ isJiraConfigured: false });
+ });
+
+ it('receives the illustration', () => {
+ expect(getSetupComponent().props('illustration')).toBe(setupIllustration);
+ });
+
+ it('receives the path to the Jira integration page', () => {
+ expect(getSetupComponent().props('jiraIntegrationPath')).toBe(jiraIntegrationPath);
+ });
+ });
+
+ describe('import in progress component', () => {
beforeEach(() => {
wrapper = mountComponent({ isInProgress: true });
});
- it('shows the illustration', () => {
- expect(getProgressComponent().props('illustration')).toBe('in-progress-illustration.svg');
+ it('receives the illustration', () => {
+ expect(getProgressComponent().props('illustration')).toBe(inProgressIllustration);
});
- it('shows the name of the most recent import initiator', () => {
+ it('receives the name of the most recent import initiator', () => {
expect(getProgressComponent().props('importInitiator')).toBe('Jane Doe');
});
- it('shows the name of the most recent imported project', () => {
+ it('receives the name of the most recent imported project', () => {
expect(getProgressComponent().props('importProject')).toBe('MTG');
});
- it('shows the time of the most recent import', () => {
+ it('receives the time of the most recent import', () => {
expect(getProgressComponent().props('importTime')).toBe('2020-04-09T16:17:18+00:00');
});
- it('has the path to the issues page', () => {
+ it('receives the path to the issues page', () => {
expect(getProgressComponent().props('issuesPath')).toBe('gitlab-org/gitlab-test/-/issues');
});
});
- describe('jira import form screen', () => {
- describe('when selected project has been imported before', () => {
- it('shows jira-import::MTG-3 label since project MTG has been imported 2 time before', () => {
- wrapper = mountComponent();
-
- expect(getFormComponent().props('importLabel')).toBe('jira-import::MTG-3');
- });
-
- it('shows warning alert to explain project MTG has been imported 2 times before', () => {
- wrapper = mountComponent({ mountFunction: mount });
-
- expect(getAlert().text()).toBe(
- 'You have imported from this project 2 times before. Each new import will create duplicate issues.',
- );
- });
+ describe('import form component', () => {
+ beforeEach(() => {
+ wrapper = mountComponent();
});
- describe('when selected project has not been imported before', () => {
- beforeEach(() => {
- wrapper = mountComponent({ selectedProject: 'MJP' });
- });
-
- it('shows jira-import::MJP-1 label since project MJP has not been imported before', () => {
- expect(getFormComponent().props('importLabel')).toBe('jira-import::MJP-1');
- });
-
- it('does not show warning alert since project MJP has not been imported before', () => {
- expect(getAlert().exists()).toBe(false);
- });
+ it('receives the illustration', () => {
+ expect(getFormComponent().props('issuesPath')).toBe(issuesPath);
});
- });
- describe('initiating a Jira import', () => {
- it('calls the mutation with the expected arguments', () => {
- wrapper = mountComponent();
+ it('receives the name of the most recent import initiator', () => {
+ expect(getFormComponent().props('jiraImports')).toEqual(imports);
+ });
- const mutationArguments = {
- mutation: initiateJiraImportMutation,
- variables: {
- input: {
- jiraProjectKey: 'MTG',
- projectPath: 'gitlab-org/gitlab-test',
- usersMapping: [
- {
- jiraAccountId: 'aei23f98f-q23fj98qfj',
- gitlabId: 15,
- },
- {
- jiraAccountId: 'fu39y8t34w-rq3u289t3h4i',
- gitlabId: undefined,
- },
- ],
- },
- },
- };
+ it('receives the name of the most recent imported project', () => {
+ expect(getFormComponent().props('jiraProjects')).toEqual(jiraProjects);
+ });
- getFormComponent().vm.$emit('initiateJiraImport', 'MTG');
+ it('receives the project ID', () => {
+ expect(getFormComponent().props('projectId')).toBe(projectId);
+ });
- expect(mutateSpy).toHaveBeenCalledWith(expect.objectContaining(mutationArguments));
+ it('receives the project path', () => {
+ expect(getFormComponent().props('projectPath')).toBe(projectPath);
});
- it('shows alert message with error message on error', () => {
- const mutate = jest.fn(() => Promise.reject());
+ it('shows an alert when it emits an error', async () => {
+ expect(getAlert().exists()).toBe(false);
- wrapper = mountComponent({ mutate });
+ getFormComponent().vm.$emit('error', 'There was an error');
- getFormComponent().vm.$emit('initiateJiraImport', 'MTG');
+ await Vue.nextTick();
- // One tick doesn't update the dom to the desired state so we have two ticks here
- return Vue.nextTick()
- .then(Vue.nextTick)
- .then(() => {
- expect(getAlert().text()).toBe('There was an error importing the Jira project.');
- });
+ expect(getAlert().exists()).toBe(true);
});
});
describe('alert', () => {
- it('can be dismissed', () => {
+ it('can be dismissed', async () => {
wrapper = mountComponent({
errorMessage: 'There was an error importing the Jira project.',
showAlert: true,
@@ -291,40 +248,9 @@ describe('JiraImportApp', () => {
getAlert().vm.$emit('dismiss');
- return Vue.nextTick().then(() => {
- expect(getAlert().exists()).toBe(false);
- });
- });
- });
-
- describe('on mount', () => {
- it('makes a GraphQL mutation call to get user mappings', () => {
- wrapper = mountComponent();
+ await Vue.nextTick();
- const mutationArguments = {
- mutation: getJiraUserMappingMutation,
- variables: {
- input: {
- projectPath: 'gitlab-org/gitlab-test',
- },
- },
- };
-
- expect(mutateSpy).toHaveBeenCalledWith(expect.objectContaining(mutationArguments));
- });
-
- it('does not make a GraphQL mutation call to get user mappings when Jira is not configured', () => {
- wrapper = mountComponent({ isJiraConfigured: false });
-
- expect(mutateSpy).not.toHaveBeenCalled();
- });
-
- it('shows error message when there is an error with the GraphQL mutation call', () => {
- const mutate = jest.fn(() => Promise.reject());
-
- wrapper = mountComponent({ mutate });
-
- expect(getAlert().exists()).toBe(true);
+ expect(getAlert().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/jira_import/components/jira_import_form_spec.js b/spec/frontend/jira_import/components/jira_import_form_spec.js
index 685b0288e92..7cc7b40f4c8 100644
--- a/spec/frontend/jira_import/components/jira_import_form_spec.js
+++ b/spec/frontend/jira_import/components/jira_import_form_spec.js
@@ -1,56 +1,97 @@
-import { GlButton, GlFormSelect, GlLabel, GlTable } from '@gitlab/ui';
+import { GlAlert, GlButton, GlNewDropdown, GlFormSelect, GlLabel, GlTable } from '@gitlab/ui';
import { getByRole } from '@testing-library/dom';
import { mount, shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import JiraImportForm from '~/jira_import/components/jira_import_form.vue';
-import { issuesPath, jiraProjects, userMappings } from '../mock_data';
+import getJiraUserMappingMutation from '~/jira_import/queries/get_jira_user_mapping.mutation.graphql';
+import initiateJiraImportMutation from '~/jira_import/queries/initiate_jira_import.mutation.graphql';
+import {
+ imports,
+ issuesPath,
+ jiraProjects,
+ projectId,
+ projectPath,
+ userMappings as defaultUserMappings,
+} from '../mock_data';
describe('JiraImportForm', () => {
let axiosMock;
+ let mutateSpy;
let wrapper;
const currentUsername = 'mrgitlab';
- const importLabel = 'jira-import::MTG-1';
- const value = 'MTG';
+
+ const getAlert = () => wrapper.find(GlAlert);
const getSelectDropdown = () => wrapper.find(GlFormSelect);
+ const getContinueButton = () => wrapper.find(GlButton);
+
const getCancelButton = () => wrapper.findAll(GlButton).at(1);
+ const getLabel = () => wrapper.find(GlLabel);
+
+ const getTable = () => wrapper.find(GlTable);
+
+ const getUserDropdown = () => getTable().find(GlNewDropdown);
+
const getHeader = name => getByRole(wrapper.element, 'columnheader', { name });
- const mountComponent = ({ isSubmitting = false, mountFunction = shallowMount } = {}) =>
+ const mountComponent = ({
+ isSubmitting = false,
+ loading = false,
+ mutate = mutateSpy,
+ selectedProject = 'MTG',
+ userMappings = defaultUserMappings,
+ mountFunction = shallowMount,
+ } = {}) =>
mountFunction(JiraImportForm, {
propsData: {
- importLabel,
- isSubmitting,
issuesPath,
+ jiraImports: imports,
jiraProjects,
- projectId: '5',
- userMappings,
- value,
+ projectId,
+ projectPath,
},
data: () => ({
isFetching: false,
+ isSubmitting,
searchTerm: '',
+ selectedProject,
selectState: null,
users: [],
+ userMappings,
}),
+ mocks: {
+ $apollo: {
+ loading,
+ mutate,
+ },
+ },
currentUsername,
});
beforeEach(() => {
axiosMock = new AxiosMockAdapter(axios);
+ mutateSpy = jest.fn(() =>
+ Promise.resolve({
+ data: {
+ jiraImportStart: { errors: [] },
+ jiraImportUsers: { jiraUsers: [], errors: [] },
+ },
+ }),
+ );
});
afterEach(() => {
axiosMock.restore();
+ mutateSpy.mockRestore();
wrapper.destroy();
wrapper = null;
});
- describe('select dropdown', () => {
+ describe('select dropdown project selection', () => {
it('is shown', () => {
wrapper = mountComponent();
@@ -67,12 +108,34 @@ describe('JiraImportForm', () => {
});
});
- it('emits an "input" event when the input select value changes', () => {
- wrapper = mountComponent();
+ describe('when selected project has been imported before', () => {
+ it('shows jira-import::MTG-3 label since project MTG has been imported 2 time before', () => {
+ wrapper = mountComponent();
+
+ expect(getLabel().props('title')).toBe('jira-import::MTG-3');
+ });
+
+ it('shows warning alert to explain project MTG has been imported 2 times before', () => {
+ wrapper = mountComponent({ mountFunction: mount });
+
+ expect(getAlert().text()).toBe(
+ 'You have imported from this project 2 times before. Each new import will create duplicate issues.',
+ );
+ });
+ });
+
+ describe('when selected project has not been imported before', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({ selectedProject: 'MJP' });
+ });
- getSelectDropdown().vm.$emit('change', value);
+ it('shows jira-import::MJP-1 label since project MJP has not been imported before', () => {
+ expect(getLabel().props('title')).toBe('jira-import::MJP-1');
+ });
- expect(wrapper.emitted('input')[0]).toEqual([value]);
+ it('does not show warning alert since project MJP has not been imported before', () => {
+ expect(getAlert().exists()).toBe(false);
+ });
});
});
@@ -81,10 +144,6 @@ describe('JiraImportForm', () => {
wrapper = mountComponent();
});
- it('shows a label which will be applied to imported Jira projects', () => {
- expect(wrapper.find(GlLabel).props('title')).toBe(importLabel);
- });
-
it('shows a heading for the user mapping section', () => {
expect(
getByRole(wrapper.element, 'heading', { name: 'Jira-GitLab user mapping template' }),
@@ -93,7 +152,7 @@ describe('JiraImportForm', () => {
it('shows information to the user', () => {
expect(wrapper.find('p').text()).toBe(
- 'Jira users have been matched with similar GitLab users. This can be overwritten by selecting a GitLab user from the dropdown in the "GitLab username" column. If it wasn\'t possible to match a Jira user with a GitLab user, the dropdown defaults to the user conducting the import.',
+ 'Jira users have been imported from the configured Jira instance. They can be mapped by selecting a GitLab user from the dropdown in the "GitLab username" column. When the form appears, the dropdown defaults to the user conducting the import.',
);
});
});
@@ -121,13 +180,53 @@ describe('JiraImportForm', () => {
it('shows all user mappings', () => {
wrapper = mountComponent({ mountFunction: mount });
- expect(wrapper.find(GlTable).findAll('tbody tr').length).toBe(userMappings.length);
+ expect(getTable().findAll('tbody tr')).toHaveLength(2);
});
it('shows correct information in each cell', () => {
wrapper = mountComponent({ mountFunction: mount });
- expect(wrapper.find(GlTable).element).toMatchSnapshot();
+ expect(getTable().element).toMatchSnapshot();
+ });
+
+ describe('when there is no Jira->GitLab user mapping', () => {
+ it('shows the logged in user in the dropdown', () => {
+ wrapper = mountComponent({
+ mountFunction: mount,
+ userMappings: [
+ {
+ jiraAccountId: 'aei23f98f-q23fj98qfj',
+ jiraDisplayName: 'Jane Doe',
+ jiraEmail: 'janedoe@example.com',
+ gitlabId: undefined,
+ gitlabUsername: undefined,
+ },
+ ],
+ });
+
+ expect(getUserDropdown().text()).toContain(currentUsername);
+ });
+ });
+
+ describe('when there is a Jira->GitLab user mapping', () => {
+ it('shows the mapped user in the dropdown', () => {
+ const gitlabUsername = 'mai';
+
+ wrapper = mountComponent({
+ mountFunction: mount,
+ userMappings: [
+ {
+ jiraAccountId: 'aei23f98f-q23fj98qfj',
+ jiraDisplayName: 'Jane Doe',
+ jiraEmail: 'janedoe@example.com',
+ gitlabId: 14,
+ gitlabUsername,
+ },
+ ],
+ });
+
+ expect(getUserDropdown().text()).toContain(gitlabUsername);
+ });
});
});
});
@@ -137,13 +236,13 @@ describe('JiraImportForm', () => {
it('is shown', () => {
wrapper = mountComponent();
- expect(wrapper.find(GlButton).text()).toBe('Continue');
+ expect(getContinueButton().text()).toBe('Continue');
});
it('is in loading state when the form is submitting', async () => {
wrapper = mountComponent({ isSubmitting: true });
- expect(wrapper.find(GlButton).props('loading')).toBe(true);
+ expect(getContinueButton().props('loading')).toBe(true);
});
});
@@ -162,13 +261,61 @@ describe('JiraImportForm', () => {
});
});
- describe('form', () => {
- it('emits an "initiateJiraImport" event with the selected dropdown value when submitted', () => {
+ describe('submitting the form', () => {
+ it('initiates the Jira import mutation with the expected arguments', () => {
wrapper = mountComponent();
+ const mutationArguments = {
+ mutation: initiateJiraImportMutation,
+ variables: {
+ input: {
+ jiraProjectKey: 'MTG',
+ projectPath,
+ usersMapping: [
+ {
+ jiraAccountId: 'aei23f98f-q23fj98qfj',
+ gitlabId: 15,
+ },
+ {
+ jiraAccountId: 'fu39y8t34w-rq3u289t3h4i',
+ gitlabId: undefined,
+ },
+ ],
+ },
+ },
+ };
+
wrapper.find('form').trigger('submit');
- expect(wrapper.emitted('initiateJiraImport')[0]).toEqual([value]);
+ expect(mutateSpy).toHaveBeenCalledWith(expect.objectContaining(mutationArguments));
+ });
+ });
+
+ describe('on mount GraphQL user mapping mutation', () => {
+ it('is called with the expected arguments', () => {
+ wrapper = mountComponent();
+
+ const mutationArguments = {
+ mutation: getJiraUserMappingMutation,
+ variables: {
+ input: {
+ projectPath,
+ },
+ },
+ };
+
+ expect(mutateSpy).toHaveBeenCalledWith(expect.objectContaining(mutationArguments));
+ });
+
+ describe('when there is an error when called', () => {
+ beforeEach(() => {
+ const mutate = jest.fn(() => Promise.reject());
+ wrapper = mountComponent({ mutate });
+ });
+
+ it('shows error message', () => {
+ expect(getAlert().exists()).toBe(true);
+ });
});
});
});
diff --git a/spec/frontend/jira_import/mock_data.js b/spec/frontend/jira_import/mock_data.js
index a7447221b15..8ea40080f32 100644
--- a/spec/frontend/jira_import/mock_data.js
+++ b/spec/frontend/jira_import/mock_data.js
@@ -3,6 +3,16 @@ import { IMPORT_STATE } from '~/jira_import/utils/jira_import_utils';
export const fullPath = 'gitlab-org/gitlab-test';
+export const issuesPath = 'gitlab-org/gitlab-test/-/issues';
+
+export const illustration = 'illustration.svg';
+
+export const jiraIntegrationPath = 'gitlab-org/gitlab-test/-/services/jira/edit';
+
+export const projectId = '5';
+
+export const projectPath = 'gitlab-org/gitlab-test';
+
export const queryDetails = {
query: getJiraImportDetailsQuery,
variables: {
@@ -71,12 +81,6 @@ export const jiraImportMutationResponse = {
},
};
-export const issuesPath = 'gitlab-org/gitlab-test/-/issues';
-
-export const jiraIntegrationPath = 'gitlab-org/gitlab-test/-/services/jira/edit';
-
-export const illustration = 'illustration.svg';
-
export const jiraProjects = [
{ text: 'My Jira Project (MJP)', value: 'MJP' },
{ text: 'My Second Jira Project (MSJP)', value: 'MSJP' },
diff --git a/spec/frontend/jobs/components/empty_state_spec.js b/spec/frontend/jobs/components/empty_state_spec.js
index c6eac4e27b3..29d0c4e07aa 100644
--- a/spec/frontend/jobs/components/empty_state_spec.js
+++ b/spec/frontend/jobs/components/empty_state_spec.js
@@ -1,12 +1,10 @@
-import Vue from 'vue';
-import component from '~/jobs/components/empty_state.vue';
-import mountComponent from '../../helpers/vue_mount_component_helper';
+import { mount } from '@vue/test-utils';
+import EmptyState from '~/jobs/components/empty_state.vue';
describe('Empty State', () => {
- const Component = Vue.extend(component);
- let vm;
+ let wrapper;
- const props = {
+ const defaultProps = {
illustrationPath: 'illustrations/pending_job_empty.svg',
illustrationSizeClass: 'svg-430',
title: 'This job has not started yet',
@@ -14,100 +12,107 @@ describe('Empty State', () => {
variablesSettingsUrl: '',
};
+ const createWrapper = props => {
+ wrapper = mount(EmptyState, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ };
+
const content = 'This job is in pending state and is waiting to be picked by a runner';
+ const findEmptyStateImage = () => wrapper.find('img');
+ const findTitle = () => wrapper.find('[data-testid="job-empty-state-title"]');
+ const findContent = () => wrapper.find('[data-testid="job-empty-state-content"]');
+ const findAction = () => wrapper.find('[data-testid="job-empty-state-action"]');
+ const findManualVarsForm = () => wrapper.find('[data-testid="manual-vars-form"]');
+
afterEach(() => {
- vm.$destroy();
+ if (wrapper?.destroy) {
+ wrapper.destroy();
+ wrapper = null;
+ }
});
describe('renders image and title', () => {
beforeEach(() => {
- vm = mountComponent(Component, {
- ...props,
- content,
- });
+ createWrapper();
});
- it('renders img with provided path and size', () => {
- expect(vm.$el.querySelector('img').getAttribute('src')).toEqual(props.illustrationPath);
- expect(vm.$el.querySelector('.svg-content').classList).toContain(props.illustrationSizeClass);
+ it('renders empty state image', () => {
+ expect(findEmptyStateImage().exists()).toBe(true);
});
it('renders provided title', () => {
- expect(vm.$el.querySelector('.js-job-empty-state-title').textContent.trim()).toEqual(
- props.title,
- );
+ expect(
+ findTitle()
+ .text()
+ .trim(),
+ ).toBe(defaultProps.title);
});
});
describe('with content', () => {
- it('renders content', () => {
- vm = mountComponent(Component, {
- ...props,
- content,
- });
+ beforeEach(() => {
+ createWrapper({ content });
+ });
- expect(vm.$el.querySelector('.js-job-empty-state-content').textContent.trim()).toEqual(
- content,
- );
+ it('renders content', () => {
+ expect(
+ findContent()
+ .text()
+ .trim(),
+ ).toBe(content);
});
});
describe('without content', () => {
- it('does not render content', () => {
- vm = mountComponent(Component, {
- ...props,
- });
+ beforeEach(() => {
+ createWrapper();
+ });
- expect(vm.$el.querySelector('.js-job-empty-state-content')).toBeNull();
+ it('does not render content', () => {
+ expect(findContent().exists()).toBe(false);
});
});
describe('with action', () => {
- it('renders action', () => {
- vm = mountComponent(Component, {
- ...props,
- content,
+ beforeEach(() => {
+ createWrapper({
action: {
path: 'runner',
button_title: 'Check runner',
method: 'post',
},
});
+ });
- expect(vm.$el.querySelector('.js-job-empty-state-action').getAttribute('href')).toEqual(
- 'runner',
- );
+ it('renders action', () => {
+ expect(findAction().attributes('href')).toBe('runner');
});
});
describe('without action', () => {
- it('does not render action', () => {
- vm = mountComponent(Component, {
- ...props,
- content,
+ beforeEach(() => {
+ createWrapper({
action: null,
});
+ });
- expect(vm.$el.querySelector('.js-job-empty-state-action')).toBeNull();
+ it('does not render action', () => {
+ expect(findAction().exists()).toBe(false);
});
- });
- describe('without playbale action', () => {
it('does not render manual variables form', () => {
- vm = mountComponent(Component, {
- ...props,
- content,
- });
-
- expect(vm.$el.querySelector('.js-manual-vars-form')).toBeNull();
+ expect(findManualVarsForm().exists()).toBe(false);
});
});
- describe('with playbale action and not scheduled job', () => {
+ describe('with playable action and not scheduled job', () => {
beforeEach(() => {
- vm = mountComponent(Component, {
- ...props,
+ createWrapper({
content,
playable: true,
scheduled: false,
@@ -120,22 +125,25 @@ describe('Empty State', () => {
});
it('renders manual variables form', () => {
- expect(vm.$el.querySelector('.js-manual-vars-form')).not.toBeNull();
+ expect(findManualVarsForm().exists()).toBe(true);
});
it('does not render the empty state action', () => {
- expect(vm.$el.querySelector('.js-job-empty-state-action')).toBeNull();
+ expect(findAction().exists()).toBe(false);
});
});
- describe('with playbale action and scheduled job', () => {
- it('does not render manual variables form', () => {
- vm = mountComponent(Component, {
- ...props,
+ describe('with playable action and scheduled job', () => {
+ beforeEach(() => {
+ createWrapper({
+ playable: true,
+ scheduled: true,
content,
});
+ });
- expect(vm.$el.querySelector('.js-manual-vars-form')).toBeNull();
+ it('does not render manual variables form', () => {
+ expect(findManualVarsForm().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/jobs/components/job_app_spec.js b/spec/frontend/jobs/components/job_app_spec.js
index d0b3d4f6247..e9ecafcd4c3 100644
--- a/spec/frontend/jobs/components/job_app_spec.js
+++ b/spec/frontend/jobs/components/job_app_spec.js
@@ -1,12 +1,19 @@
import Vuex from 'vuex';
import { mount, createLocalVue } from '@vue/test-utils';
+import { GlLoadingIcon } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import { getJSONFixture } from 'helpers/fixtures';
+import { TEST_HOST } from 'jest/helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
import JobApp from '~/jobs/components/job_app.vue';
+import Sidebar from '~/jobs/components/sidebar.vue';
+import StuckBlock from '~/jobs/components/stuck_block.vue';
+import UnmetPrerequisitesBlock from '~/jobs/components/unmet_prerequisites_block.vue';
+import EnvironmentsBlock from '~/jobs/components/environments_block.vue';
+import ErasedBlock from '~/jobs/components/erased_block.vue';
+import EmptyState from '~/jobs/components/empty_state.vue';
import createStore from '~/jobs/store';
import job from '../mock_data';
-import { TEST_HOST } from 'jest/helpers/test_constants';
describe('Job App', () => {
const localVue = createLocalVue();
@@ -55,6 +62,26 @@ describe('Job App', () => {
.then(() => wrapper.vm.$nextTick());
};
+ const findLoadingComponent = () => wrapper.find(GlLoadingIcon);
+ const findSidebar = () => wrapper.find(Sidebar);
+ const findJobContent = () => wrapper.find('[data-testid="job-content"');
+ const findStuckBlockComponent = () => wrapper.find(StuckBlock);
+ const findStuckBlockWithTags = () => wrapper.find('[data-testid="job-stuck-with-tags"');
+ const findStuckBlockNoActiveRunners = () =>
+ wrapper.find('[data-testid="job-stuck-no-active-runners"');
+ const findFailedJobComponent = () => wrapper.find(UnmetPrerequisitesBlock);
+ const findEnvironmentsBlockComponent = () => wrapper.find(EnvironmentsBlock);
+ const findErasedBlock = () => wrapper.find(ErasedBlock);
+ const findArchivedJob = () => wrapper.find('[data-testid="archived-job"]');
+ const findEmptyState = () => wrapper.find(EmptyState);
+ const findJobNewIssueLink = () => wrapper.find('[data-testid="job-new-issue"]');
+ const findJobEmptyStateTitle = () => wrapper.find('[data-testid="job-empty-state-title"]');
+ const findJobTraceScrollTop = () => wrapper.find('[data-testid="job-controller-scroll-top"]');
+ const findJobTraceScrollBottom = () =>
+ wrapper.find('[data-testid="job-controller-scroll-bottom"]');
+ const findJobTraceController = () => wrapper.find('[data-testid="job-raw-link-controller"]');
+ const findJobTraceEraseLink = () => wrapper.find('[data-testid="job-log-erase-link"]');
+
beforeEach(() => {
mock = new MockAdapter(axios);
store = createStore();
@@ -72,9 +99,9 @@ describe('Job App', () => {
});
it('renders loading icon', () => {
- expect(wrapper.find('.js-job-loading').exists()).toBe(true);
- expect(wrapper.find('.js-job-sidebar').exists()).toBe(false);
- expect(wrapper.find('.js-job-content').exists()).toBe(false);
+ expect(findLoadingComponent().exists()).toBe(true);
+ expect(findSidebar().exists()).toBe(false);
+ expect(findJobContent().exists()).toBe(false);
});
});
@@ -115,7 +142,7 @@ describe('Job App', () => {
});
it('should render new issue link', () => {
- expect(wrapper.find('.js-new-issue').attributes('href')).toEqual(job.new_issue_path);
+ expect(findJobNewIssueLink().attributes('href')).toEqual(job.new_issue_path);
});
});
@@ -134,7 +161,7 @@ describe('Job App', () => {
});
describe('stuck block', () => {
- describe('without active runners availabl', () => {
+ describe('without active runners available', () => {
it('renders stuck block when there are no runners', () =>
setupAndMount({
jobData: {
@@ -153,8 +180,8 @@ describe('Job App', () => {
tags: [],
},
}).then(() => {
- expect(wrapper.find('.js-job-stuck').exists()).toBe(true);
- expect(wrapper.find('.js-job-stuck .js-stuck-no-active-runner').exists()).toBe(true);
+ expect(findStuckBlockComponent().exists()).toBe(true);
+ expect(findStuckBlockNoActiveRunners().exists()).toBe(true);
}));
});
@@ -176,8 +203,8 @@ describe('Job App', () => {
},
},
}).then(() => {
- expect(wrapper.find('.js-job-stuck').text()).toContain(job.tags[0]);
- expect(wrapper.find('.js-job-stuck .js-stuck-with-tags').exists()).toBe(true);
+ expect(findStuckBlockComponent().text()).toContain(job.tags[0]);
+ expect(findStuckBlockWithTags().exists()).toBe(true);
}));
});
@@ -199,8 +226,8 @@ describe('Job App', () => {
},
},
}).then(() => {
- expect(wrapper.find('.js-job-stuck').text()).toContain(job.tags[0]);
- expect(wrapper.find('.js-job-stuck .js-stuck-with-tags').exists()).toBe(true);
+ expect(findStuckBlockComponent().text()).toContain(job.tags[0]);
+ expect(findStuckBlockWithTags().exists()).toBe(true);
}));
});
@@ -210,7 +237,7 @@ describe('Job App', () => {
runners: { available: true },
},
}).then(() => {
- expect(wrapper.find('.js-job-stuck').exists()).toBe(false);
+ expect(findStuckBlockComponent().exists()).toBe(false);
}));
});
@@ -239,7 +266,7 @@ describe('Job App', () => {
tags: [],
},
}).then(() => {
- expect(wrapper.find('.js-job-failed').exists()).toBe(true);
+ expect(findFailedJobComponent().exists()).toBe(true);
}));
});
@@ -255,12 +282,12 @@ describe('Job App', () => {
},
},
}).then(() => {
- expect(wrapper.find('.js-job-environment').exists()).toBe(true);
+ expect(findEnvironmentsBlockComponent().exists()).toBe(true);
}));
it('does not render environment block when job has environment', () =>
setupAndMount().then(() => {
- expect(wrapper.find('.js-job-environment').exists()).toBe(false);
+ expect(findEnvironmentsBlockComponent().exists()).toBe(false);
}));
});
@@ -275,7 +302,7 @@ describe('Job App', () => {
erased_at: '2016-11-07T11:11:16.525Z',
},
}).then(() => {
- expect(wrapper.find('.js-job-erased-block').exists()).toBe(true);
+ expect(findErasedBlock().exists()).toBe(true);
}));
it('does not render erased block when `erased` is false', () =>
@@ -284,7 +311,7 @@ describe('Job App', () => {
erased_at: null,
},
}).then(() => {
- expect(wrapper.find('.js-job-erased-block').exists()).toBe(false);
+ expect(findErasedBlock().exists()).toBe(false);
}));
});
@@ -313,7 +340,7 @@ describe('Job App', () => {
},
},
}).then(() => {
- expect(wrapper.find('.js-job-empty-state').exists()).toBe(true);
+ expect(findEmptyState().exists()).toBe(true);
}));
it('does not render empty state when job does not have trace but it is running', () =>
@@ -329,12 +356,12 @@ describe('Job App', () => {
},
},
}).then(() => {
- expect(wrapper.find('.js-job-empty-state').exists()).toBe(false);
+ expect(findEmptyState().exists()).toBe(false);
}));
it('does not render empty state when job has trace but it is not running', () =>
setupAndMount({ jobData: { has_trace: true } }).then(() => {
- expect(wrapper.find('.js-job-empty-state').exists()).toBe(false);
+ expect(findEmptyState().exists()).toBe(false);
}));
it('displays remaining time for a delayed job', () => {
@@ -345,9 +372,9 @@ describe('Job App', () => {
() => new Date(delayedJobFixture.scheduled_at).getTime() - oneHourInMilliseconds,
);
return setupAndMount({ jobData: delayedJobFixture }).then(() => {
- expect(wrapper.find('.js-job-empty-state').exists()).toBe(true);
+ expect(findEmptyState().exists()).toBe(true);
- const title = wrapper.find('.js-job-empty-state-title').text();
+ const title = findJobEmptyStateTitle().text();
expect(title).toEqual('This is a delayed job to run in 01:00:00');
});
@@ -386,7 +413,7 @@ describe('Job App', () => {
beforeEach(() => setupAndMount({ jobData: { archived: true } }));
it('renders warning about job being archived', () => {
- expect(wrapper.find('.js-archived-job ').exists()).toBe(true);
+ expect(findArchivedJob().exists()).toBe(true);
});
});
@@ -394,7 +421,7 @@ describe('Job App', () => {
beforeEach(() => setupAndMount());
it('does not warning about job being archived', () => {
- expect(wrapper.find('.js-archived-job ').exists()).toBe(false);
+ expect(findArchivedJob().exists()).toBe(false);
});
});
@@ -413,16 +440,16 @@ describe('Job App', () => {
);
it('should render scroll buttons', () => {
- expect(wrapper.find('.js-scroll-top').exists()).toBe(true);
- expect(wrapper.find('.js-scroll-bottom').exists()).toBe(true);
+ expect(findJobTraceScrollTop().exists()).toBe(true);
+ expect(findJobTraceScrollBottom().exists()).toBe(true);
});
it('should render link to raw ouput', () => {
- expect(wrapper.find('.js-raw-link-controller').exists()).toBe(true);
+ expect(findJobTraceController().exists()).toBe(true);
});
it('should render link to erase job', () => {
- expect(wrapper.find('.js-erase-link').exists()).toBe(true);
+ expect(findJobTraceEraseLink().exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/jobs/components/job_log_controllers_spec.js b/spec/frontend/jobs/components/job_log_controllers_spec.js
index 04f20811601..233cef05622 100644
--- a/spec/frontend/jobs/components/job_log_controllers_spec.js
+++ b/spec/frontend/jobs/components/job_log_controllers_spec.js
@@ -1,16 +1,17 @@
-import Vue from 'vue';
-import component from '~/jobs/components/job_log_controllers.vue';
-import mountComponent from '../../helpers/vue_mount_component_helper';
+import { mount } from '@vue/test-utils';
+import JobLogControllers from '~/jobs/components/job_log_controllers.vue';
describe('Job log controllers', () => {
- const Component = Vue.extend(component);
- let vm;
+ let wrapper;
afterEach(() => {
- vm.$destroy();
+ if (wrapper?.destroy) {
+ wrapper.destroy();
+ wrapper = null;
+ }
});
- const props = {
+ const defaultProps = {
rawPath: '/raw',
erasePath: '/erase',
size: 511952,
@@ -20,70 +21,80 @@ describe('Job log controllers', () => {
isTraceSizeVisible: true,
};
+ const createWrapper = props => {
+ wrapper = mount(JobLogControllers, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ };
+
+ const findTruncatedInfo = () => wrapper.find('[data-testid="log-truncated-info"]');
+ const findRawLink = () => wrapper.find('[data-testid="raw-link"]');
+ const findRawLinkController = () => wrapper.find('[data-testid="job-raw-link-controller"]');
+ const findEraseLink = () => wrapper.find('[data-testid="job-log-erase-link"]');
+ const findScrollTop = () => wrapper.find('[data-testid="job-controller-scroll-top"]');
+ const findScrollBottom = () => wrapper.find('[data-testid="job-controller-scroll-bottom"]');
+
describe('Truncate information', () => {
describe('with isTraceSizeVisible', () => {
beforeEach(() => {
- vm = mountComponent(Component, props);
+ createWrapper();
});
it('renders size information', () => {
- expect(vm.$el.querySelector('.js-truncated-info').textContent).toContain('499.95 KiB');
+ expect(findTruncatedInfo().text()).toMatch('499.95 KiB');
});
it('renders link to raw trace', () => {
- expect(vm.$el.querySelector('.js-raw-link').getAttribute('href')).toEqual('/raw');
+ expect(findRawLink().attributes('href')).toBe(defaultProps.rawPath);
});
});
});
describe('links section', () => {
describe('with raw trace path', () => {
- it('renders raw trace link', () => {
- vm = mountComponent(Component, props);
+ beforeEach(() => {
+ createWrapper();
+ });
- expect(vm.$el.querySelector('.js-raw-link-controller').getAttribute('href')).toEqual(
- '/raw',
- );
+ it('renders raw trace link', () => {
+ expect(findRawLinkController().attributes('href')).toBe(defaultProps.rawPath);
});
});
describe('without raw trace path', () => {
- it('does not render raw trace link', () => {
- vm = mountComponent(Component, {
- erasePath: '/erase',
- size: 511952,
- isScrollTopDisabled: true,
- isScrollBottomDisabled: true,
- isScrollingDown: false,
- isTraceSizeVisible: true,
+ beforeEach(() => {
+ createWrapper({
+ rawPath: null,
});
+ });
- expect(vm.$el.querySelector('.js-raw-link-controller')).toBeNull();
+ it('does not render raw trace link', () => {
+ expect(findRawLinkController().exists()).toBe(false);
});
});
describe('when is erasable', () => {
beforeEach(() => {
- vm = mountComponent(Component, props);
+ createWrapper();
});
it('renders erase job link', () => {
- expect(vm.$el.querySelector('.js-erase-link')).not.toBeNull();
+ expect(findEraseLink().exists()).toBe(true);
});
});
describe('when it is not erasable', () => {
- it('does not render erase button', () => {
- vm = mountComponent(Component, {
- rawPath: '/raw',
- size: 511952,
- isScrollTopDisabled: true,
- isScrollBottomDisabled: true,
- isScrollingDown: false,
- isTraceSizeVisible: true,
+ beforeEach(() => {
+ createWrapper({
+ erasePath: null,
});
+ });
- expect(vm.$el.querySelector('.js-erase-link')).toBeNull();
+ it('does not render erase button', () => {
+ expect(findEraseLink().exists()).toBe(false);
});
});
});
@@ -92,45 +103,39 @@ describe('Job log controllers', () => {
describe('scroll top button', () => {
describe('when user can scroll top', () => {
beforeEach(() => {
- vm = mountComponent(Component, props);
+ createWrapper({
+ isScrollTopDisabled: false,
+ });
});
- it('renders enabled scroll top button', () => {
- expect(vm.$el.querySelector('.js-scroll-top').getAttribute('disabled')).toBeNull();
- });
+ it('emits scrollJobLogTop event on click', async () => {
+ findScrollTop().trigger('click');
- it('emits scrollJobLogTop event on click', () => {
- jest.spyOn(vm, '$emit').mockImplementation(() => {});
- vm.$el.querySelector('.js-scroll-top').click();
+ await wrapper.vm.$nextTick();
- expect(vm.$emit).toHaveBeenCalledWith('scrollJobLogTop');
+ expect(wrapper.emitted().scrollJobLogTop).toHaveLength(1);
});
});
describe('when user can not scroll top', () => {
beforeEach(() => {
- vm = mountComponent(Component, {
- rawPath: '/raw',
- erasePath: '/erase',
- size: 511952,
+ createWrapper({
isScrollTopDisabled: true,
isScrollBottomDisabled: false,
isScrollingDown: false,
- isTraceSizeVisible: true,
});
});
it('renders disabled scroll top button', () => {
- expect(vm.$el.querySelector('.js-scroll-top').getAttribute('disabled')).toEqual(
- 'disabled',
- );
+ expect(findScrollTop().attributes('disabled')).toBe('disabled');
});
- it('does not emit scrollJobLogTop event on click', () => {
- jest.spyOn(vm, '$emit').mockImplementation(() => {});
- vm.$el.querySelector('.js-scroll-top').click();
+ it('does not emit scrollJobLogTop event on click', async () => {
+ findScrollTop().trigger('click');
- expect(vm.$emit).not.toHaveBeenCalledWith('scrollJobLogTop');
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.emitted().scrollJobLogTop).toBeUndefined();
});
});
});
@@ -138,69 +143,61 @@ describe('Job log controllers', () => {
describe('scroll bottom button', () => {
describe('when user can scroll bottom', () => {
beforeEach(() => {
- vm = mountComponent(Component, props);
+ createWrapper();
});
- it('renders enabled scroll bottom button', () => {
- expect(vm.$el.querySelector('.js-scroll-bottom').getAttribute('disabled')).toBeNull();
- });
+ it('emits scrollJobLogBottom event on click', async () => {
+ findScrollBottom().trigger('click');
- it('emits scrollJobLogBottom event on click', () => {
- jest.spyOn(vm, '$emit').mockImplementation(() => {});
- vm.$el.querySelector('.js-scroll-bottom').click();
+ await wrapper.vm.$nextTick();
- expect(vm.$emit).toHaveBeenCalledWith('scrollJobLogBottom');
+ expect(wrapper.emitted().scrollJobLogBottom).toHaveLength(1);
});
});
describe('when user can not scroll bottom', () => {
beforeEach(() => {
- vm = mountComponent(Component, {
- rawPath: '/raw',
- erasePath: '/erase',
- size: 511952,
+ createWrapper({
isScrollTopDisabled: false,
isScrollBottomDisabled: true,
isScrollingDown: false,
- isTraceSizeVisible: true,
});
});
it('renders disabled scroll bottom button', () => {
- expect(vm.$el.querySelector('.js-scroll-bottom').getAttribute('disabled')).toEqual(
- 'disabled',
- );
+ expect(findScrollBottom().attributes('disabled')).toEqual('disabled');
});
- it('does not emit scrollJobLogBottom event on click', () => {
- jest.spyOn(vm, '$emit').mockImplementation(() => {});
- vm.$el.querySelector('.js-scroll-bottom').click();
+ it('does not emit scrollJobLogBottom event on click', async () => {
+ findScrollBottom().trigger('click');
- expect(vm.$emit).not.toHaveBeenCalledWith('scrollJobLogBottom');
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.emitted().scrollJobLogBottom).toBeUndefined();
});
});
describe('while isScrollingDown is true', () => {
- it('renders animate class for the scroll down button', () => {
- vm = mountComponent(Component, props);
+ beforeEach(() => {
+ createWrapper();
+ });
- expect(vm.$el.querySelector('.js-scroll-bottom').className).toContain('animate');
+ it('renders animate class for the scroll down button', () => {
+ expect(findScrollBottom().classes()).toContain('animate');
});
});
describe('while isScrollingDown is false', () => {
- it('does not render animate class for the scroll down button', () => {
- vm = mountComponent(Component, {
- rawPath: '/raw',
- erasePath: '/erase',
- size: 511952,
+ beforeEach(() => {
+ createWrapper({
isScrollTopDisabled: true,
isScrollBottomDisabled: false,
isScrollingDown: false,
- isTraceSizeVisible: true,
});
+ });
- expect(vm.$el.querySelector('.js-scroll-bottom').className).not.toContain('animate');
+ it('does not render animate class for the scroll down button', () => {
+ expect(findScrollBottom().classes()).not.toContain('animate');
});
});
});
diff --git a/spec/frontend/jobs/components/log/mock_data.js b/spec/frontend/jobs/components/log/mock_data.js
index a6a767f7921..eb8c4fe8bc9 100644
--- a/spec/frontend/jobs/components/log/mock_data.js
+++ b/spec/frontend/jobs/components/log/mock_data.js
@@ -34,7 +34,7 @@ export const utilsMockData = [
content: [
{
text:
- 'Using Docker executor with image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.6.6-golang-1.14-git-2.27-lfs-2.9-chrome-83-node-12.x-yarn-1.21-postgresql-11-graphicsmagick-1.3.34',
+ 'Using Docker executor with image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.6.6-golang-1.14-git-2.28-lfs-2.9-chrome-84-node-12.x-yarn-1.21-postgresql-11-graphicsmagick-1.3.34',
},
],
section: 'prepare-executor',
diff --git a/spec/frontend/jobs/components/sidebar_spec.js b/spec/frontend/jobs/components/sidebar_spec.js
index 0c8e2dc3aef..48788df0c93 100644
--- a/spec/frontend/jobs/components/sidebar_spec.js
+++ b/spec/frontend/jobs/components/sidebar_spec.js
@@ -59,11 +59,13 @@ describe('Sidebar details block', () => {
describe('actions', () => {
it('should render link to new issue', () => {
- expect(vm.$el.querySelector('.js-new-issue').getAttribute('href')).toEqual(
+ expect(vm.$el.querySelector('[data-testid="job-new-issue"]').getAttribute('href')).toEqual(
job.new_issue_path,
);
- expect(vm.$el.querySelector('.js-new-issue').textContent.trim()).toEqual('New issue');
+ expect(vm.$el.querySelector('[data-testid="job-new-issue"]').textContent.trim()).toEqual(
+ 'New issue',
+ );
});
it('should render link to retry job', () => {
diff --git a/spec/frontend/jobs/components/stuck_block_spec.js b/spec/frontend/jobs/components/stuck_block_spec.js
index c320793b2be..926286bf75a 100644
--- a/spec/frontend/jobs/components/stuck_block_spec.js
+++ b/spec/frontend/jobs/components/stuck_block_spec.js
@@ -1,31 +1,50 @@
-import Vue from 'vue';
-import component from '~/jobs/components/stuck_block.vue';
-import mountComponent from '../../helpers/vue_mount_component_helper';
+import { GlBadge, GlLink } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import StuckBlock from '~/jobs/components/stuck_block.vue';
describe('Stuck Block Job component', () => {
- const Component = Vue.extend(component);
- let vm;
+ let wrapper;
afterEach(() => {
- vm.$destroy();
+ if (wrapper?.destroy) {
+ wrapper.destroy();
+ wrapper = null;
+ }
});
+ const createWrapper = props => {
+ wrapper = shallowMount(StuckBlock, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ const tags = ['docker', 'gitlab-org'];
+
+ const findStuckNoActiveRunners = () =>
+ wrapper.find('[data-testid="job-stuck-no-active-runners"]');
+ const findStuckNoRunners = () => wrapper.find('[data-testid="job-stuck-no-runners"]');
+ const findStuckWithTags = () => wrapper.find('[data-testid="job-stuck-with-tags"]');
+ const findRunnerPathLink = () => wrapper.find(GlLink);
+ const findAllBadges = () => wrapper.findAll(GlBadge);
+
describe('with no runners for project', () => {
beforeEach(() => {
- vm = mountComponent(Component, {
+ createWrapper({
hasNoRunnersForProject: true,
runnersPath: '/root/project/runners#js-runners-settings',
});
});
it('renders only information about project not having runners', () => {
- expect(vm.$el.querySelector('.js-stuck-no-runners')).not.toBeNull();
- expect(vm.$el.querySelector('.js-stuck-with-tags')).toBeNull();
- expect(vm.$el.querySelector('.js-stuck-no-active-runner')).toBeNull();
+ expect(findStuckNoRunners().exists()).toBe(true);
+ expect(findStuckWithTags().exists()).toBe(false);
+ expect(findStuckNoActiveRunners().exists()).toBe(false);
});
it('renders link to runners page', () => {
- expect(vm.$el.querySelector('.js-runners-path').getAttribute('href')).toEqual(
+ expect(findRunnerPathLink().attributes('href')).toBe(
'/root/project/runners#js-runners-settings',
);
});
@@ -33,26 +52,27 @@ describe('Stuck Block Job component', () => {
describe('with tags', () => {
beforeEach(() => {
- vm = mountComponent(Component, {
+ createWrapper({
hasNoRunnersForProject: false,
- tags: ['docker', 'gitlab-org'],
+ tags,
runnersPath: '/root/project/runners#js-runners-settings',
});
});
it('renders information about the tags not being set', () => {
- expect(vm.$el.querySelector('.js-stuck-no-runners')).toBeNull();
- expect(vm.$el.querySelector('.js-stuck-with-tags')).not.toBeNull();
- expect(vm.$el.querySelector('.js-stuck-no-active-runner')).toBeNull();
+ expect(findStuckWithTags().exists()).toBe(true);
+ expect(findStuckNoActiveRunners().exists()).toBe(false);
+ expect(findStuckNoRunners().exists()).toBe(false);
});
it('renders tags', () => {
- expect(vm.$el.textContent).toContain('docker');
- expect(vm.$el.textContent).toContain('gitlab-org');
+ findAllBadges().wrappers.forEach((badgeElt, index) => {
+ return expect(badgeElt.text()).toBe(tags[index]);
+ });
});
it('renders link to runners page', () => {
- expect(vm.$el.querySelector('.js-runners-path').getAttribute('href')).toEqual(
+ expect(findRunnerPathLink().attributes('href')).toBe(
'/root/project/runners#js-runners-settings',
);
});
@@ -60,20 +80,20 @@ describe('Stuck Block Job component', () => {
describe('without active runners', () => {
beforeEach(() => {
- vm = mountComponent(Component, {
+ createWrapper({
hasNoRunnersForProject: false,
runnersPath: '/root/project/runners#js-runners-settings',
});
});
it('renders information about project not having runners', () => {
- expect(vm.$el.querySelector('.js-stuck-no-runners')).toBeNull();
- expect(vm.$el.querySelector('.js-stuck-with-tags')).toBeNull();
- expect(vm.$el.querySelector('.js-stuck-no-active-runner')).not.toBeNull();
+ expect(findStuckNoActiveRunners().exists()).toBe(true);
+ expect(findStuckNoRunners().exists()).toBe(false);
+ expect(findStuckWithTags().exists()).toBe(false);
});
it('renders link to runners page', () => {
- expect(vm.$el.querySelector('.js-runners-path').getAttribute('href')).toEqual(
+ expect(findRunnerPathLink().attributes('href')).toBe(
'/root/project/runners#js-runners-settings',
);
});
diff --git a/spec/frontend/labels_select_spec.js b/spec/frontend/labels_select_spec.js
index 8b08eb9e124..cbc9a923f8b 100644
--- a/spec/frontend/labels_select_spec.js
+++ b/spec/frontend/labels_select_spec.js
@@ -29,7 +29,7 @@ const mockScopedLabels2 = [
title: 'Foo::Bar2',
description: 'Foobar2',
color: '#FFFFFF',
- text_color: '#000000',
+ text_color: '#333333',
},
];
@@ -61,10 +61,11 @@ describe('LabelsSelect', () => {
expect($labelEl.find('a').attr('title')).toBe(label.description);
});
- it('generated label item template has correct label styles', () => {
+ it('generated label item template has correct label styles and classes', () => {
expect($labelEl.find('span.gl-label-text').attr('style')).toBe(
- `background-color: ${label.color}; color: ${label.text_color};`,
+ `background-color: ${label.color};`,
);
+ expect($labelEl.find('span.gl-label-text')).toHaveClass('gl-label-text-light');
});
it('generated label item has a gl-label-text class', () => {
@@ -100,16 +101,12 @@ describe('LabelsSelect', () => {
expect($labelEl.find('a').attr('data-html')).toBe('true');
});
- it('generated label item template has correct label styles', () => {
+ it('generated label item template has correct label styles and classes', () => {
expect($labelEl.find('span.gl-label-text').attr('style')).toBe(
- `background-color: ${label.color}; color: ${label.text_color};`,
+ `background-color: ${label.color};`,
);
- expect(
- $labelEl
- .find('span.gl-label-text')
- .last()
- .attr('style'),
- ).toBe(`color: ${label.color};`);
+ expect($labelEl.find('span.gl-label-text')).toHaveClass('gl-label-text-light');
+ expect($labelEl.find('span.gl-label-text').last()).not.toHaveClass('gl-label-text-light');
});
it('generated label item has a badge class', () => {
@@ -131,16 +128,12 @@ describe('LabelsSelect', () => {
);
});
- it('generated label item template has correct label styles', () => {
+ it('generated label item template has correct label styles and classes', () => {
expect($labelEl.find('span.gl-label-text').attr('style')).toBe(
- `background-color: ${label.color}; color: ${label.text_color};`,
+ `background-color: ${label.color};`,
);
- expect(
- $labelEl
- .find('span.gl-label-text')
- .last()
- .attr('style'),
- ).toBe(`color: ${label.text_color};`);
+ expect($labelEl.find('span.gl-label-text')).toHaveClass('gl-label-text-dark');
+ expect($labelEl.find('span.gl-label-text').last()).toHaveClass('gl-label-text-dark');
});
});
});
diff --git a/spec/frontend/lazy_loader_spec.js b/spec/frontend/lazy_loader_spec.js
index 79a49aedf37..5eb09bc2359 100644
--- a/spec/frontend/lazy_loader_spec.js
+++ b/spec/frontend/lazy_loader_spec.js
@@ -1,8 +1,8 @@
import { noop } from 'lodash';
-import LazyLoader from '~/lazy_loader';
import { TEST_HOST } from 'helpers/test_constants';
-import waitForPromises from './helpers/wait_for_promises';
import { useMockMutationObserver, useMockIntersectionObserver } from 'helpers/mock_dom_observer';
+import LazyLoader from '~/lazy_loader';
+import waitForPromises from './helpers/wait_for_promises';
const execImmediately = callback => {
callback();
@@ -45,10 +45,24 @@ describe('LazyLoader', () => {
return newImg;
};
+ const mockLoadEvent = () => {
+ const addEventListener = window.addEventListener.bind(window);
+
+ jest.spyOn(window, 'addEventListener').mockImplementation((event, callback) => {
+ if (event === 'load') {
+ callback();
+ } else {
+ addEventListener(event, callback);
+ }
+ });
+ };
+
beforeEach(() => {
jest.spyOn(window, 'requestAnimationFrame').mockImplementation(execImmediately);
jest.spyOn(window, 'requestIdleCallback').mockImplementation(execImmediately);
jest.spyOn(LazyLoader, 'loadImage');
+
+ mockLoadEvent();
});
afterEach(() => {
diff --git a/spec/frontend/lib/utils/common_utils_spec.js b/spec/frontend/lib/utils/common_utils_spec.js
index 585f0de9cc3..effc446d846 100644
--- a/spec/frontend/lib/utils/common_utils_spec.js
+++ b/spec/frontend/lib/utils/common_utils_spec.js
@@ -1,5 +1,5 @@
-import * as commonUtils from '~/lib/utils/common_utils';
import $ from 'jquery';
+import * as commonUtils from '~/lib/utils/common_utils';
describe('common_utils', () => {
describe('parseUrl', () => {
diff --git a/spec/frontend/lib/utils/csrf_token_spec.js b/spec/frontend/lib/utils/csrf_token_spec.js
index 1b98ef126e9..55dd29571c0 100644
--- a/spec/frontend/lib/utils/csrf_token_spec.js
+++ b/spec/frontend/lib/utils/csrf_token_spec.js
@@ -1,5 +1,5 @@
-import csrf from '~/lib/utils/csrf';
import { setHTMLFixture } from 'helpers/fixtures';
+import csrf from '~/lib/utils/csrf';
describe('csrf', () => {
let testContext;
diff --git a/spec/frontend/lib/utils/datetime_utility_spec.js b/spec/frontend/lib/utils/datetime_utility_spec.js
index adf5c312149..9eb5587e83c 100644
--- a/spec/frontend/lib/utils/datetime_utility_spec.js
+++ b/spec/frontend/lib/utils/datetime_utility_spec.js
@@ -1,6 +1,6 @@
-import { __, s__ } from '~/locale';
import $ from 'jquery';
import timezoneMock from 'timezone-mock';
+import { __, s__ } from '~/locale';
import '~/commons/bootstrap';
import * as datetimeUtility from '~/lib/utils/datetime_utility';
@@ -628,3 +628,28 @@ describe('localTimeAgo', () => {
expect(element.getAttribute('title')).toBe(title);
});
});
+
+describe('dateFromParams', () => {
+ it('returns the expected date object', () => {
+ const expectedDate = new Date('2019-07-17T00:00:00.000Z');
+ const date = datetimeUtility.dateFromParams(2019, 6, 17);
+
+ expect(date.getYear()).toBe(expectedDate.getYear());
+ expect(date.getMonth()).toBe(expectedDate.getMonth());
+ expect(date.getDate()).toBe(expectedDate.getDate());
+ });
+});
+
+describe('differenceInSeconds', () => {
+ const startDateTime = new Date('2019-07-17T00:00:00.000Z');
+
+ it.each`
+ startDate | endDate | expected
+ ${startDateTime} | ${new Date('2019-07-17T00:00:00.000Z')} | ${0}
+ ${startDateTime} | ${new Date('2019-07-17T12:00:00.000Z')} | ${43200}
+ ${startDateTime} | ${new Date('2019-07-18T00:00:00.000Z')} | ${86400}
+ ${new Date('2019-07-18T00:00:00.000Z')} | ${startDateTime} | ${-86400}
+ `('returns $expected for $endDate - $startDate', ({ startDate, endDate, expected }) => {
+ expect(datetimeUtility.differenceInSeconds(startDate, endDate)).toBe(expected);
+ });
+});
diff --git a/spec/frontend/lib/utils/poll_spec.js b/spec/frontend/lib/utils/poll_spec.js
index 5ee9738ebf3..135c752b5cb 100644
--- a/spec/frontend/lib/utils/poll_spec.js
+++ b/spec/frontend/lib/utils/poll_spec.js
@@ -1,6 +1,6 @@
+import waitForPromises from 'helpers/wait_for_promises';
import Poll from '~/lib/utils/poll';
import { successCodes } from '~/lib/utils/http_status';
-import waitForPromises from 'helpers/wait_for_promises';
describe('Poll', () => {
let callbacks;
@@ -128,6 +128,35 @@ describe('Poll', () => {
});
});
+ describe('with delayed initial request', () => {
+ it('delays the first request', async done => {
+ mockServiceCall({ status: 200, headers: { 'poll-interval': 1 } });
+
+ const Polling = new Poll({
+ resource: service,
+ method: 'fetch',
+ data: { page: 1 },
+ successCallback: callbacks.success,
+ errorCallback: callbacks.error,
+ });
+
+ Polling.makeDelayedRequest(1);
+
+ expect(Polling.timeoutID).toBeTruthy();
+
+ waitForAllCallsToFinish(2, () => {
+ Polling.stop();
+
+ expect(service.fetch.mock.calls).toHaveLength(2);
+ expect(service.fetch).toHaveBeenCalledWith({ page: 1 });
+ expect(callbacks.success).toHaveBeenCalled();
+ expect(callbacks.error).not.toHaveBeenCalled();
+
+ done();
+ });
+ });
+ });
+
describe('stop', () => {
it('stops polling when method is called', done => {
mockServiceCall({ status: 200, headers: { 'poll-interval': 1 } });
diff --git a/spec/frontend/lib/utils/poll_until_complete_spec.js b/spec/frontend/lib/utils/poll_until_complete_spec.js
index 15602b87b9c..c1df30756fd 100644
--- a/spec/frontend/lib/utils/poll_until_complete_spec.js
+++ b/spec/frontend/lib/utils/poll_until_complete_spec.js
@@ -1,8 +1,8 @@
import AxiosMockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
import pollUntilComplete from '~/lib/utils/poll_until_complete';
import httpStatusCodes from '~/lib/utils/http_status';
-import { TEST_HOST } from 'helpers/test_constants';
const endpoint = `${TEST_HOST}/foo`;
const mockData = 'mockData';
diff --git a/spec/frontend/lib/utils/sticky_spec.js b/spec/frontend/lib/utils/sticky_spec.js
index 4ad68cc9ff6..01e8fe777af 100644
--- a/spec/frontend/lib/utils/sticky_spec.js
+++ b/spec/frontend/lib/utils/sticky_spec.js
@@ -1,5 +1,5 @@
-import { isSticky } from '~/lib/utils/sticky';
import { setHTMLFixture } from 'helpers/fixtures';
+import { isSticky } from '~/lib/utils/sticky';
const TEST_OFFSET_TOP = 500;
diff --git a/spec/frontend/lib/utils/url_utility_spec.js b/spec/frontend/lib/utils/url_utility_spec.js
index e769580b587..a13ac3778cf 100644
--- a/spec/frontend/lib/utils/url_utility_spec.js
+++ b/spec/frontend/lib/utils/url_utility_spec.js
@@ -160,6 +160,118 @@ describe('URL utility', () => {
'https://host/path?op=%2B&foo=bar',
);
});
+
+ describe('with spread array option', () => {
+ const spreadArrayOptions = { spreadArrays: true };
+
+ it('maintains multiple values', () => {
+ expect(mergeUrlParams({}, '?array[]=foo&array[]=bar', spreadArrayOptions)).toBe(
+ '?array[]=foo&array[]=bar',
+ );
+ });
+
+ it('overrides multiple values with one', () => {
+ expect(
+ mergeUrlParams({ array: ['baz'] }, '?array[]=foo&array[]=bar', spreadArrayOptions),
+ ).toBe('?array[]=baz');
+ });
+ it('removes existing params', () => {
+ expect(
+ mergeUrlParams({ array: null }, '?array[]=foo&array[]=bar', spreadArrayOptions),
+ ).toBe('');
+ });
+ it('removes existing params and keeps others', () => {
+ expect(
+ mergeUrlParams(
+ { array: null },
+ '?array[]=foo&array[]=bar&other=quis',
+ spreadArrayOptions,
+ ),
+ ).toBe('?other=quis');
+ });
+ it('removes existing params along others', () => {
+ expect(
+ mergeUrlParams(
+ { array: null, other: 'quis' },
+ '?array[]=foo&array[]=bar',
+ spreadArrayOptions,
+ ),
+ ).toBe('?other=quis');
+ });
+ it('handles empty arrays along other parameters', () => {
+ expect(mergeUrlParams({ array: [], other: 'quis' }, '?array=baz', spreadArrayOptions)).toBe(
+ '?array[]=&other=quis',
+ );
+ });
+ it('handles multiple values along other parameters', () => {
+ expect(
+ mergeUrlParams(
+ { array: ['foo', 'bar'], other: 'quis' },
+ '?array=baz',
+ spreadArrayOptions,
+ ),
+ ).toBe('?array[]=foo&array[]=bar&other=quis');
+ });
+ it('handles array values with encoding', () => {
+ expect(
+ mergeUrlParams({ array: ['foo+', 'bar,baz'] }, '?array[]=%2Fbaz', spreadArrayOptions),
+ ).toBe('?array[]=foo%2B&array[]=bar%2Cbaz');
+ });
+ it('handles multiple arrays', () => {
+ expect(
+ mergeUrlParams(
+ { array1: ['foo+', 'bar,baz'], array2: ['quis', 'quux'] },
+ '?array1[]=%2Fbaz',
+ spreadArrayOptions,
+ ),
+ ).toBe('?array1[]=foo%2B&array1[]=bar%2Cbaz&array2[]=quis&array2[]=quux');
+ });
+ });
+
+ describe('without spread array option', () => {
+ it('maintains multiple values', () => {
+ expect(mergeUrlParams({}, '?array=foo%2Cbar')).toBe('?array=foo%2Cbar');
+ });
+ it('overrides multiple values with one', () => {
+ expect(mergeUrlParams({ array: ['baz'] }, '?array=foo%2Cbar')).toBe('?array=baz');
+ });
+ it('removes existing params', () => {
+ expect(mergeUrlParams({ array: null }, '?array=foo%2Cbar')).toBe('');
+ });
+ it('removes existing params and keeps others', () => {
+ expect(mergeUrlParams({ array: null }, '?array=foo&array=bar&other=quis')).toBe(
+ '?other=quis',
+ );
+ });
+ it('removes existing params along others', () => {
+ expect(mergeUrlParams({ array: null, other: 'quis' }, '?array=foo&array=bar')).toBe(
+ '?other=quis',
+ );
+ });
+ it('handles empty arrays along other parameters', () => {
+ expect(mergeUrlParams({ array: [], other: 'quis' }, '?array=baz')).toBe(
+ '?array=&other=quis',
+ );
+ });
+ it('handles multiple values along other parameters', () => {
+ expect(mergeUrlParams({ array: ['foo', 'bar'], other: 'quis' }, '?array=baz')).toBe(
+ '?array=foo%2Cbar&other=quis',
+ );
+ });
+ it('handles array values with encoding', () => {
+ expect(mergeUrlParams({ array: ['foo+', 'bar,baz'] }, '?array=%2Fbaz')).toBe(
+ '?array=foo%2B%2Cbar%2Cbaz',
+ );
+ });
+ it('handles multiple arrays', () => {
+ expect(
+ mergeUrlParams(
+ { array1: ['foo+', 'bar,baz'], array2: ['quis', 'quux'] },
+ '?array1=%2Fbaz',
+ ),
+ ).toBe('?array1=foo%2B%2Cbar%2Cbaz&array2=quis%2Cquux');
+ });
+ });
});
describe('removeParams', () => {
diff --git a/spec/frontend/locale/index_spec.js b/spec/frontend/locale/index_spec.js
index 346ed5182f4..d65d7c195b2 100644
--- a/spec/frontend/locale/index_spec.js
+++ b/spec/frontend/locale/index_spec.js
@@ -1,6 +1,5 @@
-import { createDateTimeFormat, languageCode } from '~/locale';
-
import { setLanguage } from 'helpers/locale_helper';
+import { createDateTimeFormat, languageCode } from '~/locale';
describe('locale', () => {
afterEach(() => setLanguage(null));
diff --git a/spec/frontend/logs/components/environment_logs_spec.js b/spec/frontend/logs/components/environment_logs_spec.js
index dee62709d81..6421aca684f 100644
--- a/spec/frontend/logs/components/environment_logs_spec.js
+++ b/spec/frontend/logs/components/environment_logs_spec.js
@@ -1,4 +1,4 @@
-import { GlSprintf, GlIcon, GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlSprintf, GlIcon, GlDeprecatedDropdown, GlDeprecatedDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import EnvironmentLogs from '~/logs/components/environment_logs.vue';
@@ -124,7 +124,7 @@ describe('EnvironmentLogs', () => {
expect(wrapper.isVueInstance()).toBe(true);
expect(wrapper.isEmpty()).toBe(false);
- expect(findEnvironmentsDropdown().is(GlDropdown)).toBe(true);
+ expect(findEnvironmentsDropdown().is(GlDeprecatedDropdown)).toBe(true);
expect(findSimpleFilters().exists()).toBe(true);
expect(findLogControlButtons().exists()).toBe(true);
@@ -167,7 +167,7 @@ describe('EnvironmentLogs', () => {
it('displays a disabled environments dropdown', () => {
expect(findEnvironmentsDropdown().attributes('disabled')).toBe('true');
- expect(findEnvironmentsDropdown().findAll(GlDropdownItem).length).toBe(0);
+ expect(findEnvironmentsDropdown().findAll(GlDeprecatedDropdownItem).length).toBe(0);
});
it('does not update buttons state', () => {
@@ -244,7 +244,7 @@ describe('EnvironmentLogs', () => {
});
it('populates environments dropdown', () => {
- const items = findEnvironmentsDropdown().findAll(GlDropdownItem);
+ const items = findEnvironmentsDropdown().findAll(GlDeprecatedDropdownItem);
expect(findEnvironmentsDropdown().props('text')).toBe(mockEnvName);
expect(items.length).toBe(mockEnvironments.length);
mockEnvironments.forEach((env, i) => {
@@ -254,7 +254,7 @@ describe('EnvironmentLogs', () => {
});
it('dropdown has one environment selected', () => {
- const items = findEnvironmentsDropdown().findAll(GlDropdownItem);
+ const items = findEnvironmentsDropdown().findAll(GlDeprecatedDropdownItem);
mockEnvironments.forEach((env, i) => {
const item = items.at(i);
@@ -289,7 +289,7 @@ describe('EnvironmentLogs', () => {
describe('when user clicks', () => {
it('environment name, trace is refreshed', () => {
- const items = findEnvironmentsDropdown().findAll(GlDropdownItem);
+ const items = findEnvironmentsDropdown().findAll(GlDeprecatedDropdownItem);
const index = 1; // any env
expect(dispatch).not.toHaveBeenCalledWith(`${module}/showEnvironment`, expect.anything());
diff --git a/spec/frontend/logs/components/log_advanced_filters_spec.js b/spec/frontend/logs/components/log_advanced_filters_spec.js
index adcd6b4fb07..007c5000e16 100644
--- a/spec/frontend/logs/components/log_advanced_filters_spec.js
+++ b/spec/frontend/logs/components/log_advanced_filters_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import { defaultTimeRange } from '~/vue_shared/constants';
import { GlFilteredSearch } from '@gitlab/ui';
+import { defaultTimeRange } from '~/vue_shared/constants';
import { convertToFixedRange } from '~/lib/utils/datetime_range';
import { createStore } from '~/logs/stores';
import { TOKEN_TYPE_POD_NAME } from '~/logs/constants';
diff --git a/spec/frontend/logs/components/log_simple_filters_spec.js b/spec/frontend/logs/components/log_simple_filters_spec.js
index 13504a2b1fc..e739621431e 100644
--- a/spec/frontend/logs/components/log_simple_filters_spec.js
+++ b/spec/frontend/logs/components/log_simple_filters_spec.js
@@ -1,4 +1,4 @@
-import { GlIcon, GlDropdownItem } from '@gitlab/ui';
+import { GlIcon, GlDeprecatedDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { createStore } from '~/logs/stores';
import { mockPods, mockPodName } from '../mock_data';
@@ -17,7 +17,7 @@ describe('LogSimpleFilters', () => {
const findPodsNoPodsText = () => wrapper.find({ ref: 'noPodsMsg' });
const findPodsDropdownItems = () =>
findPodsDropdown()
- .findAll(GlDropdownItem)
+ .findAll(GlDeprecatedDropdownItem)
.filter(item => !item.is('[disabled]'));
const mockPodsLoading = () => {
diff --git a/spec/frontend/logs/mock_data.js b/spec/frontend/logs/mock_data.js
index f9b3508e01c..f4c567a2ea3 100644
--- a/spec/frontend/logs/mock_data.js
+++ b/spec/frontend/logs/mock_data.js
@@ -36,6 +36,16 @@ export const mockManagedApps = [
path: '/root/autodevops-deploy/-/clusters/15',
gitlab_managed_apps_logs_path: '/root/autodevops-deploy/-/logs?cluster_id=15',
},
+ {
+ cluster_type: 'project_type',
+ enabled: true,
+ environment_scope: '*',
+ name: 'kubernetes-cluster-2',
+ provider_type: 'user',
+ status: 'connected',
+ path: '/root/autodevops-deploy/-/clusters/16',
+ gitlab_managed_apps_logs_path: null,
+ },
];
export const mockPodName = 'production-764c58d697-aaaaa';
diff --git a/spec/frontend/logs/stores/actions_spec.js b/spec/frontend/logs/stores/actions_spec.js
index acd9536a682..e4501abdc76 100644
--- a/spec/frontend/logs/stores/actions_spec.js
+++ b/spec/frontend/logs/stores/actions_spec.js
@@ -17,7 +17,7 @@ import {
import { defaultTimeRange } from '~/vue_shared/constants';
import axios from '~/lib/utils/axios_utils';
-import flash from '~/flash';
+import { deprecatedCreateFlash as flash } from '~/flash';
import {
mockPodName,
diff --git a/spec/frontend/logs/stores/mutations_spec.js b/spec/frontend/logs/stores/mutations_spec.js
index 137533f02d7..4a095e0f26e 100644
--- a/spec/frontend/logs/stores/mutations_spec.js
+++ b/spec/frontend/logs/stores/mutations_spec.js
@@ -272,7 +272,8 @@ describe('Logs Store Mutations', () => {
mutations[types.RECEIVE_MANAGED_APPS_DATA_SUCCESS](state, mockManagedApps);
- expect(state.managedApps.options).toEqual(mockManagedApps);
+ expect(state.managedApps.options.length).toEqual(1);
+ expect(state.managedApps.options).toEqual([mockManagedApps[0]]);
expect(state.managedApps.isLoading).toBe(false);
});
});
diff --git a/spec/frontend/maintenance_mode_settings/components/app_spec.js b/spec/frontend/maintenance_mode_settings/components/app_spec.js
index 0453354b008..ad753642e85 100644
--- a/spec/frontend/maintenance_mode_settings/components/app_spec.js
+++ b/spec/frontend/maintenance_mode_settings/components/app_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
+import { GlToggle, GlFormTextarea, GlButton } from '@gitlab/ui';
import MaintenanceModeSettingsApp from '~/maintenance_mode_settings/components/app.vue';
-import { GlToggle, GlFormTextarea, GlDeprecatedButton } from '@gitlab/ui';
describe('MaintenanceModeSettingsApp', () => {
let wrapper;
@@ -16,7 +16,7 @@ describe('MaintenanceModeSettingsApp', () => {
const findMaintenanceModeSettingsContainer = () => wrapper.find('article');
const findGlToggle = () => wrapper.find(GlToggle);
const findGlFormTextarea = () => wrapper.find(GlFormTextarea);
- const findGlButton = () => wrapper.find(GlDeprecatedButton);
+ const findGlButton = () => wrapper.find(GlButton);
describe('template', () => {
beforeEach(() => {
@@ -35,7 +35,7 @@ describe('MaintenanceModeSettingsApp', () => {
expect(findGlFormTextarea().exists()).toBe(true);
});
- it('renders the GlDeprecatedButton', () => {
+ it('renders the GlButton', () => {
expect(findGlButton().exists()).toBe(true);
});
});
diff --git a/spec/frontend/merge_request_spec.js b/spec/frontend/merge_request_spec.js
index f4f2a78f5f7..16f04d032fd 100644
--- a/spec/frontend/merge_request_spec.js
+++ b/spec/frontend/merge_request_spec.js
@@ -1,10 +1,10 @@
import $ from 'jquery';
import MockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'spec/test_constants';
import axios from '~/lib/utils/axios_utils';
import MergeRequest from '~/merge_request';
import CloseReopenReportToggle from '~/close_reopen_report_toggle';
import IssuablesHelper from '~/helpers/issuables_helper';
-import { TEST_HOST } from 'spec/test_constants';
describe('MergeRequest', () => {
const test = {};
diff --git a/spec/frontend/merge_request_tabs_spec.js b/spec/frontend/merge_request_tabs_spec.js
index ad373d04ec0..85a4ee8974e 100644
--- a/spec/frontend/merge_request_tabs_spec.js
+++ b/spec/frontend/merge_request_tabs_spec.js
@@ -1,11 +1,11 @@
import $ from 'jquery';
import MockAdapter from 'axios-mock-adapter';
+import initMrPage from 'helpers/init_vue_mr_page_helper';
import axios from '~/lib/utils/axios_utils';
import MergeRequestTabs from '~/merge_request_tabs';
import '~/commit/pipelines/pipelines_bundle';
import '~/lib/utils/common_utils';
import 'vendor/jquery.scrollTo';
-import initMrPage from 'helpers/init_vue_mr_page_helper';
jest.mock('~/lib/utils/webpack', () => ({
resetServiceWorkersPublicPath: jest.fn(),
diff --git a/spec/frontend/milestones/project_milestone_combobox_spec.js b/spec/frontend/milestones/project_milestone_combobox_spec.js
index a7321d21559..2265c9bdc2e 100644
--- a/spec/frontend/milestones/project_milestone_combobox_spec.js
+++ b/spec/frontend/milestones/project_milestone_combobox_spec.js
@@ -1,9 +1,9 @@
-import { milestones as projectMilestones } from './mock_data';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { shallowMount } from '@vue/test-utils';
-import MilestoneCombobox from '~/milestones/project_milestone_combobox.vue';
import { GlNewDropdown, GlLoadingIcon, GlSearchBoxByType } from '@gitlab/ui';
+import MilestoneCombobox from '~/milestones/project_milestone_combobox.vue';
+import { milestones as projectMilestones } from './mock_data';
const TEST_SEARCH_ENDPOINT = '/api/v4/projects/8/search';
diff --git a/spec/frontend/monitoring/alert_widget_spec.js b/spec/frontend/monitoring/alert_widget_spec.js
index f0355dfa01b..193dbb3e63f 100644
--- a/spec/frontend/monitoring/alert_widget_spec.js
+++ b/spec/frontend/monitoring/alert_widget_spec.js
@@ -1,8 +1,8 @@
import { shallowMount } from '@vue/test-utils';
import { GlLoadingIcon, GlTooltip, GlSprintf, GlBadge } from '@gitlab/ui';
-import AlertWidget from '~/monitoring/components/alert_widget.vue';
import waitForPromises from 'helpers/wait_for_promises';
-import createFlash from '~/flash';
+import AlertWidget from '~/monitoring/components/alert_widget.vue';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
const mockReadAlert = jest.fn();
const mockCreateAlert = jest.fn();
diff --git a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
index e7c51d82cd2..7ef956f8e05 100644
--- a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
+++ b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
@@ -20,7 +20,6 @@ exports[`Dashboard template matches the default snapshot 1`] = `
data-qa-selector="dashboards_filter_dropdown"
defaultbranch="master"
id="monitor-dashboards-dropdown"
- modalid="duplicateDashboard"
toggle-class="dropdown-menu-toggle"
/>
</div>
@@ -33,26 +32,24 @@ exports[`Dashboard template matches the default snapshot 1`] = `
<div
class="mb-2 pr-2 d-flex d-sm-block"
>
- <gl-dropdown-stub
+ <gl-new-dropdown-stub
+ category="tertiary"
class="flex-grow-1"
data-qa-selector="environments_dropdown"
+ headertext=""
id="monitor-environments-dropdown"
menu-class="monitor-environment-dropdown-menu"
+ size="medium"
text="production"
- toggle-class="dropdown-menu-toggle"
+ toggleclass="dropdown-menu-toggle"
+ variant="default"
>
<div
class="d-flex flex-column overflow-hidden"
>
- <gl-dropdown-header-stub
- class="monitor-environment-dropdown-header text-center"
- >
-
- Environment
-
- </gl-dropdown-header-stub>
-
- <gl-dropdown-divider-stub />
+ <gl-new-dropdown-header-stub>
+ Environment
+ </gl-new-dropdown-header-stub>
<gl-search-box-by-type-stub
class="m-2"
@@ -72,7 +69,7 @@ exports[`Dashboard template matches the default snapshot 1`] = `
</div>
</div>
- </gl-dropdown-stub>
+ </gl-new-dropdown-stub>
</div>
<div
@@ -100,45 +97,23 @@ exports[`Dashboard template matches the default snapshot 1`] = `
<div
class="d-sm-flex"
>
- <div
- class="mb-2 mr-2 d-flex"
- >
- <div
- class="flex-grow-1"
- title="Star dashboard"
- >
- <gl-deprecated-button-stub
- class="w-100"
- size="md"
- variant="default"
- >
- <gl-icon-stub
- name="star-o"
- size="16"
- />
- </gl-deprecated-button-stub>
- </div>
- </div>
-
<!---->
<!---->
- <!---->
-
- <!---->
-
- <!---->
-
- <!---->
+ <div
+ class="gl-mb-3 gl-mr-3 d-flex d-sm-block"
+ >
+ <actions-menu-stub
+ custommetricspath="/monitoring/monitor-project/prometheus/metrics"
+ defaultbranch="master"
+ isootbdashboard="true"
+ validatequerypath="/monitoring/monitor-project/prometheus/metrics/validate_query"
+ />
+ </div>
<!---->
</div>
-
- <duplicate-dashboard-modal-stub
- defaultbranch="master"
- modalid="duplicateDashboard"
- />
</div>
<empty-state-stub
diff --git a/spec/frontend/monitoring/components/alert_widget_form_spec.js b/spec/frontend/monitoring/components/alert_widget_form_spec.js
index a8416216a94..6d71a9b09e5 100644
--- a/spec/frontend/monitoring/components/alert_widget_form_spec.js
+++ b/spec/frontend/monitoring/components/alert_widget_form_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import { GlLink } from '@gitlab/ui';
+import INVALID_URL from '~/lib/utils/invalid_url';
import AlertWidgetForm from '~/monitoring/components/alert_widget_form.vue';
import ModalStub from '../stubs/modal_stub';
@@ -24,7 +25,13 @@ describe('AlertWidgetForm', () => {
const propsWithAlertData = {
...defaultProps,
alertsToManage: {
- alert: { alert_path: alertPath, operator: '<', threshold: 5, metricId },
+ alert: {
+ alert_path: alertPath,
+ operator: '<',
+ threshold: 5,
+ metricId,
+ runbookUrl: INVALID_URL,
+ },
},
configuredAlert: metricId,
};
@@ -46,15 +53,11 @@ describe('AlertWidgetForm', () => {
const modal = () => wrapper.find(ModalStub);
const modalTitle = () => modal().attributes('title');
const submitButton = () => modal().find(GlLink);
+ const findRunbookField = () => modal().find('[data-testid="alertRunbookField"]');
+ const findThresholdField = () => modal().find('[data-qa-selector="alert_threshold_field"]');
const submitButtonTrackingOpts = () =>
JSON.parse(submitButton().attributes('data-tracking-options'));
- const e = {
- preventDefault: jest.fn(),
- };
-
- beforeEach(() => {
- e.preventDefault.mockReset();
- });
+ const stubEvent = { preventDefault: jest.fn() };
afterEach(() => {
if (wrapper) wrapper.destroy();
@@ -81,35 +84,34 @@ describe('AlertWidgetForm', () => {
expect(submitButtonTrackingOpts()).toEqual(dataTrackingOptions.create);
});
- it('emits a "create" event when form submitted without existing alert', () => {
- createComponent();
+ it('emits a "create" event when form submitted without existing alert', async () => {
+ createComponent(defaultProps);
- wrapper.vm.selectQuery('9');
- wrapper.setData({
- threshold: 900,
- });
+ modal().vm.$emit('shown');
+
+ findThresholdField().vm.$emit('input', 900);
+ findRunbookField().vm.$emit('input', INVALID_URL);
- wrapper.vm.handleSubmit(e);
+ modal().vm.$emit('ok', stubEvent);
expect(wrapper.emitted().create[0]).toEqual([
{
alert: undefined,
operator: '>',
threshold: 900,
- prometheus_metric_id: '9',
+ prometheus_metric_id: '8',
+ runbookUrl: INVALID_URL,
},
]);
- expect(e.preventDefault).toHaveBeenCalledTimes(1);
});
it('resets form when modal is dismissed (hidden)', () => {
- createComponent();
+ createComponent(defaultProps);
- wrapper.vm.selectQuery('9');
- wrapper.vm.selectQuery('>');
- wrapper.setData({
- threshold: 800,
- });
+ modal().vm.$emit('shown');
+
+ findThresholdField().vm.$emit('input', 800);
+ findRunbookField().vm.$emit('input', INVALID_URL);
modal().vm.$emit('hidden');
@@ -117,6 +119,7 @@ describe('AlertWidgetForm', () => {
expect(wrapper.vm.operator).toBe(null);
expect(wrapper.vm.threshold).toBe(null);
expect(wrapper.vm.prometheusMetricId).toBe(null);
+ expect(wrapper.vm.runbookUrl).toBe(null);
});
it('sets selectedAlert to the provided configuredAlert on modal show', () => {
@@ -163,7 +166,7 @@ describe('AlertWidgetForm', () => {
beforeEach(() => {
createComponent(propsWithAlertData);
- wrapper.vm.selectQuery(metricId);
+ modal().vm.$emit('shown');
});
it('sets tracking options for delete alert', () => {
@@ -176,7 +179,7 @@ describe('AlertWidgetForm', () => {
});
it('emits "delete" event when form values unchanged', () => {
- wrapper.vm.handleSubmit(e);
+ modal().vm.$emit('ok', stubEvent);
expect(wrapper.emitted().delete[0]).toEqual([
{
@@ -184,37 +187,52 @@ describe('AlertWidgetForm', () => {
operator: '<',
threshold: 5,
prometheus_metric_id: '8',
+ runbookUrl: INVALID_URL,
},
]);
- expect(e.preventDefault).toHaveBeenCalledTimes(1);
});
+ });
- it('emits "update" event when form changed', () => {
- wrapper.setData({
- threshold: 11,
- });
+ it('emits "update" event when form changed', () => {
+ const updatedRunbookUrl = `${INVALID_URL}/test`;
- wrapper.vm.handleSubmit(e);
+ createComponent(propsWithAlertData);
- expect(wrapper.emitted().update[0]).toEqual([
- {
- alert: 'alert',
- operator: '<',
- threshold: 11,
- prometheus_metric_id: '8',
- },
- ]);
- expect(e.preventDefault).toHaveBeenCalledTimes(1);
- });
+ modal().vm.$emit('shown');
+
+ findRunbookField().vm.$emit('input', updatedRunbookUrl);
+ findThresholdField().vm.$emit('input', 11);
- it('sets tracking options for update alert', () => {
- wrapper.setData({
+ modal().vm.$emit('ok', stubEvent);
+
+ expect(wrapper.emitted().update[0]).toEqual([
+ {
+ alert: 'alert',
+ operator: '<',
threshold: 11,
- });
+ prometheus_metric_id: '8',
+ runbookUrl: updatedRunbookUrl,
+ },
+ ]);
+ });
+
+ it('sets tracking options for update alert', async () => {
+ createComponent(propsWithAlertData);
+
+ modal().vm.$emit('shown');
+
+ findThresholdField().vm.$emit('input', 11);
+
+ await wrapper.vm.$nextTick();
+
+ expect(submitButtonTrackingOpts()).toEqual(dataTrackingOptions.update);
+ });
+
+ describe('alert runbooks', () => {
+ it('shows the runbook field', () => {
+ createComponent();
- return wrapper.vm.$nextTick(() => {
- expect(submitButtonTrackingOpts()).toEqual(dataTrackingOptions.update);
- });
+ expect(findRunbookField().exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/monitoring/components/charts/gauge_spec.js b/spec/frontend/monitoring/components/charts/gauge_spec.js
new file mode 100644
index 00000000000..850e2ca87db
--- /dev/null
+++ b/spec/frontend/monitoring/components/charts/gauge_spec.js
@@ -0,0 +1,215 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlGaugeChart } from '@gitlab/ui/dist/charts';
+import GaugeChart from '~/monitoring/components/charts/gauge.vue';
+import { gaugeChartGraphData } from '../../graph_data';
+
+describe('Gauge Chart component', () => {
+ const defaultGraphData = gaugeChartGraphData();
+
+ let wrapper;
+
+ const findGaugeChart = () => wrapper.find(GlGaugeChart);
+
+ const createWrapper = ({ ...graphProps } = {}) => {
+ wrapper = shallowMount(GaugeChart, {
+ propsData: {
+ graphData: {
+ ...defaultGraphData,
+ ...graphProps,
+ },
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('chart component', () => {
+ it('is rendered when props are passed', () => {
+ createWrapper();
+
+ expect(findGaugeChart().exists()).toBe(true);
+ });
+ });
+
+ describe('min and max', () => {
+ const MIN_DEFAULT = 0;
+ const MAX_DEFAULT = 100;
+
+ it('are passed to chart component', () => {
+ createWrapper();
+
+ expect(findGaugeChart().props('min')).toBe(100);
+ expect(findGaugeChart().props('max')).toBe(1000);
+ });
+
+ const invalidCases = [undefined, NaN, 'a string'];
+
+ it.each(invalidCases)(
+ 'if min has invalid value, defaults are used for both min and max',
+ invalidValue => {
+ createWrapper({ minValue: invalidValue });
+
+ expect(findGaugeChart().props('min')).toBe(MIN_DEFAULT);
+ expect(findGaugeChart().props('max')).toBe(MAX_DEFAULT);
+ },
+ );
+
+ it.each(invalidCases)(
+ 'if max has invalid value, defaults are used for both min and max',
+ invalidValue => {
+ createWrapper({ minValue: invalidValue });
+
+ expect(findGaugeChart().props('min')).toBe(MIN_DEFAULT);
+ expect(findGaugeChart().props('max')).toBe(MAX_DEFAULT);
+ },
+ );
+
+ it('if min is bigger than max, defaults are used for both min and max', () => {
+ createWrapper({ minValue: 100, maxValue: 0 });
+
+ expect(findGaugeChart().props('min')).toBe(MIN_DEFAULT);
+ expect(findGaugeChart().props('max')).toBe(MAX_DEFAULT);
+ });
+ });
+
+ describe('thresholds', () => {
+ it('thresholds are set on chart', () => {
+ createWrapper();
+
+ expect(findGaugeChart().props('thresholds')).toEqual([500, 800]);
+ });
+
+ it('when no thresholds are defined, a default threshold is defined at 95% of max_value', () => {
+ createWrapper({
+ minValue: 0,
+ maxValue: 100,
+ thresholds: {},
+ });
+
+ expect(findGaugeChart().props('thresholds')).toEqual([95]);
+ });
+
+ it('when out of min-max bounds thresholds are defined, a default threshold is defined at 95% of the range between min_value and max_value', () => {
+ createWrapper({
+ thresholds: {
+ values: [-10, 1500],
+ },
+ });
+
+ expect(findGaugeChart().props('thresholds')).toEqual([855]);
+ });
+
+ describe('when mode is absolute', () => {
+ it('only valid threshold values are used', () => {
+ createWrapper({
+ thresholds: {
+ mode: 'absolute',
+ values: [undefined, 10, 110, NaN, 'a string', 400],
+ },
+ });
+
+ expect(findGaugeChart().props('thresholds')).toEqual([110, 400]);
+ });
+
+ it('if all threshold values are invalid, a default threshold is defined at 95% of the range between min_value and max_value', () => {
+ createWrapper({
+ thresholds: {
+ mode: 'absolute',
+ values: [NaN, undefined, 'a string', 1500],
+ },
+ });
+
+ expect(findGaugeChart().props('thresholds')).toEqual([855]);
+ });
+ });
+
+ describe('when mode is percentage', () => {
+ it('when values outside of 0-100 bounds are used, a default threshold is defined at 95% of max_value', () => {
+ createWrapper({
+ thresholds: {
+ mode: 'percentage',
+ values: [110],
+ },
+ });
+
+ expect(findGaugeChart().props('thresholds')).toEqual([855]);
+ });
+
+ it('if all threshold values are invalid, a default threshold is defined at 95% of max_value', () => {
+ createWrapper({
+ thresholds: {
+ mode: 'percentage',
+ values: [NaN, undefined, 'a string', 1500],
+ },
+ });
+
+ expect(findGaugeChart().props('thresholds')).toEqual([855]);
+ });
+ });
+ });
+
+ describe('split (the number of ticks on the chart arc)', () => {
+ const SPLIT_DEFAULT = 10;
+
+ it('is passed to chart as prop', () => {
+ createWrapper();
+
+ expect(findGaugeChart().props('splitNumber')).toBe(20);
+ });
+
+ it('if not explicitly set, passes a default value to chart', () => {
+ createWrapper({ split: '' });
+
+ expect(findGaugeChart().props('splitNumber')).toBe(SPLIT_DEFAULT);
+ });
+
+ it('if set as a number that is not an integer, passes the default value to chart', () => {
+ createWrapper({ split: 10.5 });
+
+ expect(findGaugeChart().props('splitNumber')).toBe(SPLIT_DEFAULT);
+ });
+
+ it('if set as a negative number, passes the default value to chart', () => {
+ createWrapper({ split: -10 });
+
+ expect(findGaugeChart().props('splitNumber')).toBe(SPLIT_DEFAULT);
+ });
+ });
+
+ describe('text (the text displayed on the gauge for the current value)', () => {
+ it('displays the query result value when format is not set', () => {
+ createWrapper({ format: '' });
+
+ expect(findGaugeChart().props('text')).toBe('3');
+ });
+
+ it('displays the query result value when format is set to invalid value', () => {
+ createWrapper({ format: 'invalid' });
+
+ expect(findGaugeChart().props('text')).toBe('3');
+ });
+
+ it('displays a formatted query result value when format is set', () => {
+ createWrapper();
+
+ expect(findGaugeChart().props('text')).toBe('3kB');
+ });
+
+ it('displays a placeholder value when metric is empty', () => {
+ createWrapper({ metrics: [] });
+
+ expect(findGaugeChart().props('text')).toBe('--');
+ });
+ });
+
+ describe('value', () => {
+ it('correct value is passed', () => {
+ createWrapper();
+
+ expect(findGaugeChart().props('value')).toBe(3);
+ });
+ });
+});
diff --git a/spec/frontend/monitoring/components/charts/heatmap_spec.js b/spec/frontend/monitoring/components/charts/heatmap_spec.js
index 2a1c78025ae..27a2021e9be 100644
--- a/spec/frontend/monitoring/components/charts/heatmap_spec.js
+++ b/spec/frontend/monitoring/components/charts/heatmap_spec.js
@@ -2,7 +2,7 @@ import { shallowMount } from '@vue/test-utils';
import { GlHeatmap } from '@gitlab/ui/dist/charts';
import timezoneMock from 'timezone-mock';
import Heatmap from '~/monitoring/components/charts/heatmap.vue';
-import { graphDataPrometheusQueryRangeMultiTrack } from '../../mock_data';
+import { heatmapGraphData } from '../../graph_data';
describe('Heatmap component', () => {
let wrapper;
@@ -10,10 +10,12 @@ describe('Heatmap component', () => {
const findChart = () => wrapper.find(GlHeatmap);
+ const graphData = heatmapGraphData();
+
const createWrapper = (props = {}) => {
wrapper = shallowMount(Heatmap, {
propsData: {
- graphData: graphDataPrometheusQueryRangeMultiTrack,
+ graphData: heatmapGraphData(),
containerWidth: 100,
...props,
},
@@ -38,11 +40,11 @@ describe('Heatmap component', () => {
});
it('should display a label on the x axis', () => {
- expect(wrapper.vm.xAxisName).toBe(graphDataPrometheusQueryRangeMultiTrack.x_label);
+ expect(wrapper.vm.xAxisName).toBe(graphData.xLabel);
});
it('should display a label on the y axis', () => {
- expect(wrapper.vm.yAxisName).toBe(graphDataPrometheusQueryRangeMultiTrack.y_label);
+ expect(wrapper.vm.yAxisName).toBe(graphData.y_label);
});
// According to the echarts docs https://echarts.apache.org/en/option.html#series-heatmap.data
@@ -54,24 +56,24 @@ describe('Heatmap component', () => {
const row = wrapper.vm.chartData[0];
expect(row.length).toBe(3);
- expect(wrapper.vm.chartData.length).toBe(30);
+ expect(wrapper.vm.chartData.length).toBe(6);
});
it('returns a series of labels for the x axis', () => {
const { xAxisLabels } = wrapper.vm;
- expect(xAxisLabels.length).toBe(5);
+ expect(xAxisLabels.length).toBe(2);
});
describe('y axis labels', () => {
- const gmtLabels = ['3:00 PM', '4:00 PM', '5:00 PM', '6:00 PM', '7:00 PM', '8:00 PM'];
+ const gmtLabels = ['8:10 PM', '8:12 PM', '8:14 PM'];
it('y-axis labels are formatted in AM/PM format', () => {
expect(findChart().props('yAxisLabels')).toEqual(gmtLabels);
});
describe('when in PT timezone', () => {
- const ptLabels = ['8:00 AM', '9:00 AM', '10:00 AM', '11:00 AM', '12:00 PM', '1:00 PM'];
+ const ptLabels = ['1:10 PM', '1:12 PM', '1:14 PM'];
const utcLabels = gmtLabels; // Identical in this case
beforeAll(() => {
diff --git a/spec/frontend/monitoring/components/charts/options_spec.js b/spec/frontend/monitoring/components/charts/options_spec.js
index 1c8fdc01e3e..3372d27e4f9 100644
--- a/spec/frontend/monitoring/components/charts/options_spec.js
+++ b/spec/frontend/monitoring/components/charts/options_spec.js
@@ -1,5 +1,9 @@
import { SUPPORTED_FORMATS } from '~/lib/utils/unit_format';
-import { getYAxisOptions, getTooltipFormatter } from '~/monitoring/components/charts/options';
+import {
+ getYAxisOptions,
+ getTooltipFormatter,
+ getValidThresholds,
+} from '~/monitoring/components/charts/options';
describe('options spec', () => {
describe('getYAxisOptions', () => {
@@ -82,4 +86,242 @@ describe('options spec', () => {
expect(formatter(1)).toBe('1.000B');
});
});
+
+ describe('getValidThresholds', () => {
+ const invalidCases = [null, undefined, NaN, 'a string', true, false];
+
+ let thresholds;
+
+ afterEach(() => {
+ thresholds = null;
+ });
+
+ it('returns same thresholds when passed values within range', () => {
+ thresholds = getValidThresholds({
+ mode: 'absolute',
+ range: { min: 0, max: 100 },
+ values: [10, 50],
+ });
+
+ expect(thresholds).toEqual([10, 50]);
+ });
+
+ it('filters out thresholds that are out of range', () => {
+ thresholds = getValidThresholds({
+ mode: 'absolute',
+ range: { min: 0, max: 100 },
+ values: [-5, 10, 110],
+ });
+
+ expect(thresholds).toEqual([10]);
+ });
+ it('filters out duplicate thresholds', () => {
+ thresholds = getValidThresholds({
+ mode: 'absolute',
+ range: { min: 0, max: 100 },
+ values: [5, 5, 10, 10],
+ });
+
+ expect(thresholds).toEqual([5, 10]);
+ });
+
+ it('sorts passed thresholds and applies only the first two in ascending order', () => {
+ thresholds = getValidThresholds({
+ mode: 'absolute',
+ range: { min: 0, max: 100 },
+ values: [10, 1, 35, 20, 5],
+ });
+
+ expect(thresholds).toEqual([1, 5]);
+ });
+
+ it('thresholds equal to min or max are filtered out', () => {
+ thresholds = getValidThresholds({
+ mode: 'absolute',
+ range: { min: 0, max: 100 },
+ values: [0, 100],
+ });
+
+ expect(thresholds).toEqual([]);
+ });
+
+ it.each(invalidCases)('invalid values for thresholds are filtered out', invalidValue => {
+ thresholds = getValidThresholds({
+ mode: 'absolute',
+ range: { min: 0, max: 100 },
+ values: [10, invalidValue],
+ });
+
+ expect(thresholds).toEqual([10]);
+ });
+
+ describe('range', () => {
+ it('when range is not defined, empty result is returned', () => {
+ thresholds = getValidThresholds({
+ mode: 'absolute',
+ values: [10, 20],
+ });
+
+ expect(thresholds).toEqual([]);
+ });
+
+ it('when min is not defined, empty result is returned', () => {
+ thresholds = getValidThresholds({
+ mode: 'absolute',
+ range: { max: 100 },
+ values: [10, 20],
+ });
+
+ expect(thresholds).toEqual([]);
+ });
+
+ it('when max is not defined, empty result is returned', () => {
+ thresholds = getValidThresholds({
+ mode: 'absolute',
+ range: { min: 0 },
+ values: [10, 20],
+ });
+
+ expect(thresholds).toEqual([]);
+ });
+
+ it('when min is larger than max, empty result is returned', () => {
+ thresholds = getValidThresholds({
+ mode: 'absolute',
+ range: { min: 100, max: 0 },
+ values: [10, 20],
+ });
+
+ expect(thresholds).toEqual([]);
+ });
+
+ it.each(invalidCases)(
+ 'when min has invalid value, empty result is returned',
+ invalidValue => {
+ thresholds = getValidThresholds({
+ mode: 'absolute',
+ range: { min: invalidValue, max: 100 },
+ values: [10, 20],
+ });
+
+ expect(thresholds).toEqual([]);
+ },
+ );
+
+ it.each(invalidCases)(
+ 'when max has invalid value, empty result is returned',
+ invalidValue => {
+ thresholds = getValidThresholds({
+ mode: 'absolute',
+ range: { min: 0, max: invalidValue },
+ values: [10, 20],
+ });
+
+ expect(thresholds).toEqual([]);
+ },
+ );
+ });
+
+ describe('values', () => {
+ it('if values parameter is omitted, empty result is returned', () => {
+ thresholds = getValidThresholds({
+ mode: 'absolute',
+ range: { min: 0, max: 100 },
+ });
+
+ expect(thresholds).toEqual([]);
+ });
+
+ it('if there are no values passed, empty result is returned', () => {
+ thresholds = getValidThresholds({
+ mode: 'absolute',
+ range: { min: 0, max: 100 },
+ values: [],
+ });
+
+ expect(thresholds).toEqual([]);
+ });
+
+ it.each(invalidCases)(
+ 'if invalid values are passed, empty result is returned',
+ invalidValue => {
+ thresholds = getValidThresholds({
+ mode: 'absolute',
+ range: { min: 0, max: 100 },
+ values: [invalidValue],
+ });
+
+ expect(thresholds).toEqual([]);
+ },
+ );
+ });
+
+ describe('mode', () => {
+ it.each(invalidCases)(
+ 'if invalid values are passed, empty result is returned',
+ invalidValue => {
+ thresholds = getValidThresholds({
+ mode: invalidValue,
+ range: { min: 0, max: 100 },
+ values: [10, 50],
+ });
+
+ expect(thresholds).toEqual([]);
+ },
+ );
+
+ it('if mode is not passed, empty result is returned', () => {
+ thresholds = getValidThresholds({
+ range: { min: 0, max: 100 },
+ values: [10, 50],
+ });
+
+ expect(thresholds).toEqual([]);
+ });
+
+ describe('absolute mode', () => {
+ it('absolute mode behaves correctly', () => {
+ thresholds = getValidThresholds({
+ mode: 'absolute',
+ range: { min: 0, max: 100 },
+ values: [10, 50],
+ });
+
+ expect(thresholds).toEqual([10, 50]);
+ });
+ });
+
+ describe('percentage mode', () => {
+ it('percentage mode behaves correctly', () => {
+ thresholds = getValidThresholds({
+ mode: 'percentage',
+ range: { min: 0, max: 1000 },
+ values: [10, 50],
+ });
+
+ expect(thresholds).toEqual([100, 500]);
+ });
+
+ const outOfPercentBoundsValues = [-1, 0, 100, 101];
+ it.each(outOfPercentBoundsValues)(
+ 'when values out of 0-100 range are passed, empty result is returned',
+ invalidValue => {
+ thresholds = getValidThresholds({
+ mode: 'percentage',
+ range: { min: 0, max: 1000 },
+ values: [invalidValue],
+ });
+
+ expect(thresholds).toEqual([]);
+ },
+ );
+ });
+ });
+
+ it('calling without passing object parameter returns empty array', () => {
+ thresholds = getValidThresholds();
+
+ expect(thresholds).toEqual([]);
+ });
+ });
});
diff --git a/spec/frontend/monitoring/components/charts/single_stat_spec.js b/spec/frontend/monitoring/components/charts/single_stat_spec.js
index 3783b1eebd2..37712eb3012 100644
--- a/spec/frontend/monitoring/components/charts/single_stat_spec.js
+++ b/spec/frontend/monitoring/components/charts/single_stat_spec.js
@@ -1,71 +1,91 @@
import { shallowMount } from '@vue/test-utils';
+import { GlSingleStat } from '@gitlab/ui/dist/charts';
import SingleStatChart from '~/monitoring/components/charts/single_stat.vue';
import { singleStatGraphData } from '../../graph_data';
describe('Single Stat Chart component', () => {
- let singleStatChart;
+ let wrapper;
- beforeEach(() => {
- singleStatChart = shallowMount(SingleStatChart, {
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(SingleStatChart, {
propsData: {
graphData: singleStatGraphData({}, { unit: 'MB' }),
+ ...props,
},
});
+ };
+
+ const findChart = () => wrapper.find(GlSingleStat);
+
+ beforeEach(() => {
+ createComponent();
});
afterEach(() => {
- singleStatChart.destroy();
+ wrapper.destroy();
});
describe('computed', () => {
describe('statValue', () => {
it('should interpolate the value and unit props', () => {
- expect(singleStatChart.vm.statValue).toBe('1.00MB');
+ expect(findChart().props('value')).toBe('1.00MB');
});
it('should change the value representation to a percentile one', () => {
- singleStatChart.setProps({
+ createComponent({
graphData: singleStatGraphData({ max_value: 120 }, { value: 91 }),
});
- expect(singleStatChart.vm.statValue).toContain('75.83%');
+ expect(findChart().props('value')).toContain('75.83%');
});
it('should display NaN for non numeric maxValue values', () => {
- singleStatChart.setProps({
+ createComponent({
graphData: singleStatGraphData({ max_value: 'not a number' }),
});
- expect(singleStatChart.vm.statValue).toContain('NaN');
+ expect(findChart().props('value')).toContain('NaN');
});
it('should display NaN for missing query values', () => {
- singleStatChart.setProps({
+ createComponent({
graphData: singleStatGraphData({ max_value: 120 }, { value: 'NaN' }),
});
- expect(singleStatChart.vm.statValue).toContain('NaN');
+ expect(findChart().props('value')).toContain('NaN');
+ });
+
+ it('should not display `unit` when `unit` is undefined', () => {
+ createComponent({
+ graphData: singleStatGraphData({}, { unit: undefined }),
+ });
+
+ expect(findChart().props('value')).not.toContain('undefined');
});
- describe('field attribute', () => {
+ it('should not display `unit` when `unit` is null', () => {
+ createComponent({
+ graphData: singleStatGraphData({}, { unit: null }),
+ });
+
+ expect(findChart().props('value')).not.toContain('null');
+ });
+
+ describe('when a field attribute is set', () => {
it('displays a label value instead of metric value when field attribute is used', () => {
- singleStatChart.setProps({
+ createComponent({
graphData: singleStatGraphData({ field: 'job' }, { isVector: true }),
});
- return singleStatChart.vm.$nextTick(() => {
- expect(singleStatChart.vm.statValue).toContain('prometheus');
- });
+ expect(findChart().props('value')).toContain('prometheus');
});
it('displays No data to display if field attribute is not present', () => {
- singleStatChart.setProps({
+ createComponent({
graphData: singleStatGraphData({ field: 'this-does-not-exist' }),
});
- return singleStatChart.vm.$nextTick(() => {
- expect(singleStatChart.vm.statValue).toContain('No data to display');
- });
+ expect(findChart().props('value')).toContain('No data to display');
});
});
});
diff --git a/spec/frontend/monitoring/components/charts/time_series_spec.js b/spec/frontend/monitoring/components/charts/time_series_spec.js
index 97386be9e32..6f9a89feb3e 100644
--- a/spec/frontend/monitoring/components/charts/time_series_spec.js
+++ b/spec/frontend/monitoring/components/charts/time_series_spec.js
@@ -12,7 +12,12 @@ import {
import { shallowWrapperContainsSlotText } from 'helpers/vue_test_utils_helper';
import { panelTypes, chartHeight } from '~/monitoring/constants';
import TimeSeries from '~/monitoring/components/charts/time_series.vue';
-import { deploymentData, mockProjectDir, annotationsData } from '../../mock_data';
+import {
+ deploymentData,
+ mockProjectDir,
+ annotationsData,
+ mockFixedTimeRange,
+} from '../../mock_data';
import { timeSeriesGraphData } from '../../graph_data';
@@ -42,6 +47,7 @@ describe('Time series component', () => {
deploymentData,
annotations: annotationsData,
projectPath: `${TEST_HOST}${mockProjectDir}`,
+ timeRange: mockFixedTimeRange,
...props,
},
stubs: {
@@ -382,6 +388,25 @@ describe('Time series component', () => {
});
describe('chartOptions', () => {
+ describe('x-Axis bounds', () => {
+ it('is set to the time range bounds', () => {
+ expect(getChartOptions().xAxis).toMatchObject({
+ min: mockFixedTimeRange.start,
+ max: mockFixedTimeRange.end,
+ });
+ });
+
+ it('is not set if time range is not set or incorrectly set', () => {
+ wrapper.setProps({
+ timeRange: {},
+ });
+ return wrapper.vm.$nextTick(() => {
+ expect(getChartOptions().xAxis).not.toHaveProperty('min');
+ expect(getChartOptions().xAxis).not.toHaveProperty('max');
+ });
+ });
+ });
+
describe('dataZoom', () => {
it('renders with scroll handle icons', () => {
expect(getChartOptions().dataZoom).toHaveLength(1);
diff --git a/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js b/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js
new file mode 100644
index 00000000000..024b2cbd7f1
--- /dev/null
+++ b/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js
@@ -0,0 +1,440 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlNewDropdownItem } from '@gitlab/ui';
+import { createStore } from '~/monitoring/stores';
+import { DASHBOARD_PAGE, PANEL_NEW_PAGE } from '~/monitoring/router/constants';
+import { setupAllDashboards, setupStoreWithData } from '../store_utils';
+import { redirectTo } from '~/lib/utils/url_utility';
+import Tracking from '~/tracking';
+import ActionsMenu from '~/monitoring/components/dashboard_actions_menu.vue';
+import CustomMetricsFormFields from '~/custom_metrics/components/custom_metrics_form_fields.vue';
+import { dashboardActionsMenuProps, dashboardGitResponse } from '../mock_data';
+import * as types from '~/monitoring/stores/mutation_types';
+
+jest.mock('~/lib/utils/url_utility', () => ({
+ redirectTo: jest.fn(),
+ queryToObject: jest.fn(),
+}));
+
+describe('Actions menu', () => {
+ const ootbDashboards = [dashboardGitResponse[0], dashboardGitResponse[2]];
+ const customDashboard = dashboardGitResponse[1];
+
+ let store;
+ let wrapper;
+
+ const findAddMetricItem = () => wrapper.find('[data-testid="add-metric-item"]');
+ const findAddPanelItemEnabled = () => wrapper.find('[data-testid="add-panel-item-enabled"]');
+ const findAddPanelItemDisabled = () => wrapper.find('[data-testid="add-panel-item-disabled"]');
+ const findAddMetricModal = () => wrapper.find('[data-testid="add-metric-modal"]');
+ const findAddMetricModalSubmitButton = () =>
+ wrapper.find('[data-testid="add-metric-modal-submit-button"]');
+ const findStarDashboardItem = () => wrapper.find('[data-testid="star-dashboard-item"]');
+ const findEditDashboardItemEnabled = () =>
+ wrapper.find('[data-testid="edit-dashboard-item-enabled"]');
+ const findEditDashboardItemDisabled = () =>
+ wrapper.find('[data-testid="edit-dashboard-item-disabled"]');
+ const findDuplicateDashboardItem = () => wrapper.find('[data-testid="duplicate-dashboard-item"]');
+ const findDuplicateDashboardModal = () =>
+ wrapper.find('[data-testid="duplicate-dashboard-modal"]');
+ const findCreateDashboardItem = () => wrapper.find('[data-testid="create-dashboard-item"]');
+ const findCreateDashboardModal = () => wrapper.find('[data-testid="create-dashboard-modal"]');
+
+ const createShallowWrapper = (props = {}, options = {}) => {
+ wrapper = shallowMount(ActionsMenu, {
+ propsData: { ...dashboardActionsMenuProps, ...props },
+ store,
+ ...options,
+ });
+ };
+
+ beforeEach(() => {
+ store = createStore();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('add metric item', () => {
+ it('is rendered when custom metrics are available', () => {
+ createShallowWrapper();
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findAddMetricItem().exists()).toBe(true);
+ });
+ });
+
+ it('is not rendered when custom metrics are not available', () => {
+ createShallowWrapper({
+ addingMetricsAvailable: false,
+ });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findAddMetricItem().exists()).toBe(false);
+ });
+ });
+
+ describe('when available', () => {
+ beforeEach(() => {
+ createShallowWrapper();
+ });
+
+ it('modal for custom metrics form is rendered', () => {
+ expect(findAddMetricModal().exists()).toBe(true);
+ expect(findAddMetricModal().attributes().modalid).toBe('addMetric');
+ });
+
+ it('add metric modal submit button exists', () => {
+ expect(findAddMetricModalSubmitButton().exists()).toBe(true);
+ });
+
+ it('renders custom metrics form fields', () => {
+ expect(wrapper.find(CustomMetricsFormFields).exists()).toBe(true);
+ });
+ });
+
+ describe('when not available', () => {
+ beforeEach(() => {
+ createShallowWrapper({ addingMetricsAvailable: false });
+ });
+
+ it('modal for custom metrics form is not rendered', () => {
+ expect(findAddMetricModal().exists()).toBe(false);
+ });
+ });
+
+ describe('adding new metric from modal', () => {
+ let origPage;
+
+ beforeEach(done => {
+ jest.spyOn(Tracking, 'event').mockReturnValue();
+ createShallowWrapper();
+
+ setupStoreWithData(store);
+
+ origPage = document.body.dataset.page;
+ document.body.dataset.page = 'projects:environments:metrics';
+
+ wrapper.vm.$nextTick(done);
+ });
+
+ afterEach(() => {
+ document.body.dataset.page = origPage;
+ });
+
+ it('is tracked', done => {
+ const submitButton = findAddMetricModalSubmitButton().vm;
+
+ wrapper.vm.$nextTick(() => {
+ submitButton.$el.click();
+ wrapper.vm.$nextTick(() => {
+ expect(Tracking.event).toHaveBeenCalledWith(
+ document.body.dataset.page,
+ 'click_button',
+ {
+ label: 'add_new_metric',
+ property: 'modal',
+ value: undefined,
+ },
+ );
+ done();
+ });
+ });
+ });
+ });
+ });
+
+ describe('add panel item', () => {
+ const GlNewDropdownItemStub = {
+ extends: GlNewDropdownItem,
+ props: {
+ to: [String, Object],
+ },
+ };
+
+ let $route;
+
+ beforeEach(() => {
+ $route = { name: DASHBOARD_PAGE, params: { dashboard: 'my_dashboard.yml' } };
+
+ createShallowWrapper(
+ {
+ isOotbDashboard: false,
+ },
+ {
+ mocks: { $route },
+ stubs: { GlNewDropdownItem: GlNewDropdownItemStub },
+ },
+ );
+ });
+
+ it('is disabled for ootb dashboards', () => {
+ createShallowWrapper({
+ isOotbDashboard: true,
+ });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findAddPanelItemDisabled().exists()).toBe(true);
+ });
+ });
+
+ it('is visible for custom dashboards', () => {
+ expect(findAddPanelItemEnabled().exists()).toBe(true);
+ });
+
+ it('renders a link to the new panel page for custom dashboards', () => {
+ expect(findAddPanelItemEnabled().props('to')).toEqual({
+ name: PANEL_NEW_PAGE,
+ params: {
+ dashboard: 'my_dashboard.yml',
+ },
+ });
+ });
+ });
+
+ describe('edit dashboard yml item', () => {
+ beforeEach(() => {
+ createShallowWrapper();
+ });
+
+ describe('when current dashboard is custom', () => {
+ beforeEach(() => {
+ setupAllDashboards(store, customDashboard.path);
+ });
+
+ it('enabled item is rendered and has falsy disabled attribute', () => {
+ expect(findEditDashboardItemEnabled().exists()).toBe(true);
+ expect(findEditDashboardItemEnabled().attributes('disabled')).toBe(undefined);
+ });
+
+ it('enabled item links to their edit path', () => {
+ expect(findEditDashboardItemEnabled().attributes('href')).toBe(
+ customDashboard.project_blob_path,
+ );
+ });
+
+ it('disabled item is not rendered', () => {
+ expect(findEditDashboardItemDisabled().exists()).toBe(false);
+ });
+ });
+
+ describe.each(ootbDashboards)('when current dashboard is OOTB', dashboard => {
+ beforeEach(() => {
+ setupAllDashboards(store, dashboard.path);
+ });
+
+ it('disabled item is rendered and has disabled attribute set on it', () => {
+ expect(findEditDashboardItemDisabled().exists()).toBe(true);
+ expect(findEditDashboardItemDisabled().attributes('disabled')).toBe('');
+ });
+
+ it('enabled item is not rendered', () => {
+ expect(findEditDashboardItemEnabled().exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('duplicate dashboard item', () => {
+ beforeEach(() => {
+ createShallowWrapper();
+ });
+
+ describe.each(ootbDashboards)('when current dashboard is OOTB', dashboard => {
+ beforeEach(() => {
+ setupAllDashboards(store, dashboard.path);
+ });
+
+ it('is rendered', () => {
+ expect(findDuplicateDashboardItem().exists()).toBe(true);
+ });
+
+ it('duplicate dashboard modal is rendered', () => {
+ expect(findDuplicateDashboardModal().exists()).toBe(true);
+ });
+
+ it('clicking on item opens up the duplicate dashboard modal', () => {
+ const modalId = 'duplicateDashboard';
+ const modalTrigger = findDuplicateDashboardItem();
+ const rootEmit = jest.spyOn(wrapper.vm.$root, '$emit');
+
+ modalTrigger.trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(rootEmit.mock.calls[0]).toContainEqual(modalId);
+ });
+ });
+ });
+
+ describe('when current dashboard is custom', () => {
+ beforeEach(() => {
+ setupAllDashboards(store, customDashboard.path);
+ });
+
+ it('is not rendered', () => {
+ expect(findDuplicateDashboardItem().exists()).toBe(false);
+ });
+
+ it('duplicate dashboard modal is not rendered', () => {
+ expect(findDuplicateDashboardModal().exists()).toBe(false);
+ });
+ });
+
+ describe('when no dashboard is set', () => {
+ it('is not rendered', () => {
+ expect(findDuplicateDashboardItem().exists()).toBe(false);
+ });
+
+ it('duplicate dashboard modal is not rendered', () => {
+ expect(findDuplicateDashboardModal().exists()).toBe(false);
+ });
+ });
+
+ describe('when a dashboard has been duplicated in the duplicate dashboard modal', () => {
+ beforeEach(() => {
+ store.state.monitoringDashboard.projectPath = 'root/sandbox';
+
+ setupAllDashboards(store, dashboardGitResponse[0].path);
+ });
+
+ it('redirects to the newly created dashboard', () => {
+ delete window.location;
+ window.location = new URL('https://localhost');
+
+ const newDashboard = dashboardGitResponse[1];
+
+ const newDashboardUrl = 'root/sandbox/-/metrics/dashboard.yml';
+ findDuplicateDashboardModal().vm.$emit('dashboardDuplicated', newDashboard);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(redirectTo).toHaveBeenCalled();
+ expect(redirectTo).toHaveBeenCalledWith(newDashboardUrl);
+ });
+ });
+ });
+ });
+
+ describe('star dashboard item', () => {
+ beforeEach(() => {
+ createShallowWrapper();
+ setupAllDashboards(store);
+
+ jest.spyOn(store, 'dispatch').mockResolvedValue();
+ });
+
+ it('is shown', () => {
+ expect(findStarDashboardItem().exists()).toBe(true);
+ });
+
+ it('is not disabled', () => {
+ expect(findStarDashboardItem().attributes('disabled')).toBeFalsy();
+ });
+
+ it('is disabled when starring is taking place', () => {
+ store.commit(`monitoringDashboard/${types.REQUEST_DASHBOARD_STARRING}`);
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findStarDashboardItem().exists()).toBe(true);
+ expect(findStarDashboardItem().attributes('disabled')).toBe('true');
+ });
+ });
+
+ it('on click it dispatches a toggle star action', () => {
+ findStarDashboardItem().vm.$emit('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'monitoringDashboard/toggleStarredValue',
+ undefined,
+ );
+ });
+ });
+
+ describe('when dashboard is not starred', () => {
+ beforeEach(() => {
+ store.commit(`monitoringDashboard/${types.SET_INITIAL_STATE}`, {
+ currentDashboard: dashboardGitResponse[0].path,
+ });
+ return wrapper.vm.$nextTick();
+ });
+
+ it('item text shows "Star dashboard"', () => {
+ expect(findStarDashboardItem().html()).toMatch(/Star dashboard/);
+ });
+ });
+
+ describe('when dashboard is starred', () => {
+ beforeEach(() => {
+ store.commit(`monitoringDashboard/${types.SET_INITIAL_STATE}`, {
+ currentDashboard: dashboardGitResponse[1].path,
+ });
+ return wrapper.vm.$nextTick();
+ });
+
+ it('item text shows "Unstar dashboard"', () => {
+ expect(findStarDashboardItem().html()).toMatch(/Unstar dashboard/);
+ });
+ });
+ });
+
+ describe('create dashboard item', () => {
+ beforeEach(() => {
+ createShallowWrapper();
+ });
+
+ it('is rendered by default but it is disabled', () => {
+ expect(findCreateDashboardItem().attributes('disabled')).toBe('true');
+ });
+
+ describe('when project path is set', () => {
+ const mockProjectPath = 'root/sandbox';
+ const mockAddDashboardDocPath = '/doc/add-dashboard';
+
+ beforeEach(() => {
+ store.state.monitoringDashboard.projectPath = mockProjectPath;
+ store.state.monitoringDashboard.addDashboardDocumentationPath = mockAddDashboardDocPath;
+ });
+
+ it('is not disabled', () => {
+ expect(findCreateDashboardItem().attributes('disabled')).toBe(undefined);
+ });
+
+ it('renders a modal for creating a dashboard', () => {
+ expect(findCreateDashboardModal().exists()).toBe(true);
+ });
+
+ it('clicking opens up the modal', () => {
+ const modalId = 'createDashboard';
+ const modalTrigger = findCreateDashboardItem();
+ const rootEmit = jest.spyOn(wrapper.vm.$root, '$emit');
+
+ modalTrigger.trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(rootEmit.mock.calls[0]).toContainEqual(modalId);
+ });
+ });
+
+ it('modal gets passed correct props', () => {
+ expect(findCreateDashboardModal().props('projectPath')).toBe(mockProjectPath);
+ expect(findCreateDashboardModal().props('addDashboardDocumentationPath')).toBe(
+ mockAddDashboardDocPath,
+ );
+ });
+ });
+
+ describe('when project path is not set', () => {
+ beforeEach(() => {
+ store.state.monitoringDashboard.projectPath = null;
+ });
+
+ it('is disabled', () => {
+ expect(findCreateDashboardItem().attributes('disabled')).toBe('true');
+ });
+
+ it('does not render a modal for creating a dashboard', () => {
+ expect(findCreateDashboardModal().exists()).toBe(false);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/monitoring/components/dashboard_header_spec.js b/spec/frontend/monitoring/components/dashboard_header_spec.js
index 5a1a615c703..5cf24706ebd 100644
--- a/spec/frontend/monitoring/components/dashboard_header_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_header_spec.js
@@ -1,16 +1,23 @@
import { shallowMount } from '@vue/test-utils';
+import { GlNewDropdownItem, GlSearchBoxByType, GlLoadingIcon, GlButton } from '@gitlab/ui';
import { createStore } from '~/monitoring/stores';
+import * as types from '~/monitoring/stores/mutation_types';
+import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
+import RefreshButton from '~/monitoring/components/refresh_button.vue';
import DashboardHeader from '~/monitoring/components/dashboard_header.vue';
-import DuplicateDashboardModal from '~/monitoring/components/duplicate_dashboard_modal.vue';
-import CreateDashboardModal from '~/monitoring/components/create_dashboard_modal.vue';
-import { setupAllDashboards } from '../store_utils';
+import DashboardsDropdown from '~/monitoring/components/dashboards_dropdown.vue';
+import ActionsMenu from '~/monitoring/components/dashboard_actions_menu.vue';
+import { setupAllDashboards, setupStoreWithDashboard, setupStoreWithData } from '../store_utils';
import {
+ environmentData,
dashboardGitResponse,
selfMonitoringDashboardGitResponse,
dashboardHeaderProps,
} from '../mock_data';
import { redirectTo } from '~/lib/utils/url_utility';
+const mockProjectPath = 'https://path/to/project';
+
jest.mock('~/lib/utils/url_utility', () => ({
redirectTo: jest.fn(),
queryToObject: jest.fn(),
@@ -21,13 +28,22 @@ describe('Dashboard header', () => {
let store;
let wrapper;
- const findActionsMenu = () => wrapper.find('[data-testid="actions-menu"]');
- const findCreateDashboardMenuItem = () =>
- findActionsMenu().find('[data-testid="action-create-dashboard"]');
- const findCreateDashboardDuplicateItem = () =>
- findActionsMenu().find('[data-testid="action-duplicate-dashboard"]');
- const findDuplicateDashboardModal = () => wrapper.find(DuplicateDashboardModal);
- const findCreateDashboardModal = () => wrapper.find('[data-testid="create-dashboard-modal"]');
+ const findDashboardDropdown = () => wrapper.find(DashboardsDropdown);
+
+ const findEnvsDropdown = () => wrapper.find({ ref: 'monitorEnvironmentsDropdown' });
+ const findEnvsDropdownItems = () => findEnvsDropdown().findAll(GlNewDropdownItem);
+ const findEnvsDropdownSearch = () => findEnvsDropdown().find(GlSearchBoxByType);
+ const findEnvsDropdownSearchMsg = () => wrapper.find({ ref: 'monitorEnvironmentsDropdownMsg' });
+ const findEnvsDropdownLoadingIcon = () => findEnvsDropdown().find(GlLoadingIcon);
+
+ const findDateTimePicker = () => wrapper.find(DateTimePicker);
+ const findRefreshButton = () => wrapper.find(RefreshButton);
+
+ const findActionsMenu = () => wrapper.find(ActionsMenu);
+
+ const setSearchTerm = searchTerm => {
+ store.commit(`monitoringDashboard/${types.SET_ENVIRONMENTS_FILTER}`, searchTerm);
+ };
const createShallowWrapper = (props = {}, options = {}) => {
wrapper = shallowMount(DashboardHeader, {
@@ -45,139 +61,315 @@ describe('Dashboard header', () => {
wrapper.destroy();
});
- describe('when a dashboard has been duplicated in the duplicate dashboard modal', () => {
+ describe('dashboards dropdown', () => {
beforeEach(() => {
- store.state.monitoringDashboard.projectPath = 'root/sandbox';
+ store.commit(`monitoringDashboard/${types.SET_INITIAL_STATE}`, {
+ projectPath: mockProjectPath,
+ });
+
+ createShallowWrapper();
});
- /**
- * The duplicate dashboard modal gets called both by a menu item from the
- * dashboards dropdown and by an item from the actions menu.
- *
- * This spec is context agnostic, so it addresses all cases where the
- * duplicate dashboard modal gets called.
- */
- it('redirects to the newly created dashboard', () => {
- delete window.location;
- window.location = new URL('https://localhost');
- const newDashboard = dashboardGitResponse[1];
+ it('shows the dashboard dropdown', () => {
+ expect(findDashboardDropdown().exists()).toBe(true);
+ });
- createShallowWrapper();
+ it('when an out of the box dashboard is selected, encodes dashboard path', () => {
+ findDashboardDropdown().vm.$emit('selectDashboard', {
+ path: '.gitlab/dashboards/dashboard&copy.yml',
+ out_of_the_box_dashboard: true,
+ display_name: 'A display name',
+ });
- const newDashboardUrl = 'root/sandbox/-/metrics/dashboard.yml';
- findDuplicateDashboardModal().vm.$emit('dashboardDuplicated', newDashboard);
+ expect(redirectTo).toHaveBeenCalledWith(
+ `${mockProjectPath}/-/metrics/.gitlab%2Fdashboards%2Fdashboard%26copy.yml`,
+ );
+ });
- return wrapper.vm.$nextTick().then(() => {
- expect(redirectTo).toHaveBeenCalled();
- expect(redirectTo).toHaveBeenCalledWith(newDashboardUrl);
+ it('when a custom dashboard is selected, encodes dashboard display name', () => {
+ findDashboardDropdown().vm.$emit('selectDashboard', {
+ path: '.gitlab/dashboards/file&path.yml',
+ display_name: 'dashboard&copy.yml',
});
+
+ expect(redirectTo).toHaveBeenCalledWith(`${mockProjectPath}/-/metrics/dashboard%26copy.yml`);
});
});
- describe('actions menu', () => {
+ describe('environments dropdown', () => {
beforeEach(() => {
- store.state.monitoringDashboard.projectPath = '';
createShallowWrapper();
});
- it('is rendered if projectPath is set in store', () => {
- store.state.monitoringDashboard.projectPath = 'https://path/to/project';
+ it('shows the environments dropdown', () => {
+ expect(findEnvsDropdown().exists()).toBe(true);
+ });
- return wrapper.vm.$nextTick().then(() => {
- expect(findActionsMenu().exists()).toBe(true);
+ it('renders a search input', () => {
+ expect(findEnvsDropdownSearch().exists()).toBe(true);
+ });
+
+ describe('when environments data is not loaded', () => {
+ beforeEach(() => {
+ setupStoreWithDashboard(store);
+ return wrapper.vm.$nextTick();
+ });
+
+ it('there are no environments listed', () => {
+ expect(findEnvsDropdownItems()).toHaveLength(0);
+ });
+ });
+
+ describe('when environments data is loaded', () => {
+ const currentDashboard = dashboardGitResponse[0].path;
+ const currentEnvironmentName = environmentData[0].name;
+
+ beforeEach(() => {
+ setupStoreWithData(store);
+ store.state.monitoringDashboard.projectPath = mockProjectPath;
+ store.state.monitoringDashboard.currentDashboard = currentDashboard;
+ store.state.monitoringDashboard.currentEnvironmentName = currentEnvironmentName;
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('renders dropdown items with the environment name', () => {
+ const path = `${mockProjectPath}/-/metrics/${encodeURIComponent(currentDashboard)}`;
+
+ findEnvsDropdownItems().wrappers.forEach((itemWrapper, index) => {
+ const { name, id } = environmentData[index];
+ const idParam = encodeURIComponent(id);
+
+ expect(itemWrapper.text()).toBe(name);
+ expect(itemWrapper.attributes('href')).toBe(`${path}?environment=${idParam}`);
+ });
+ });
+
+ it('environments dropdown items can be checked', () => {
+ const items = findEnvsDropdownItems();
+ const checkItems = findEnvsDropdownItems().filter(item => item.props('isCheckItem'));
+
+ expect(items).toHaveLength(checkItems.length);
+ });
+
+ it('checks the currently selected environment', () => {
+ const selectedItems = findEnvsDropdownItems().filter(item => item.props('isChecked'));
+
+ expect(selectedItems).toHaveLength(1);
+ expect(selectedItems.at(0).text()).toBe(currentEnvironmentName);
+ });
+
+ it('filters rendered dropdown items', () => {
+ const searchTerm = 'production';
+ const resultEnvs = environmentData.filter(({ name }) => name.indexOf(searchTerm) !== -1);
+ setSearchTerm(searchTerm);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findEnvsDropdownItems()).toHaveLength(resultEnvs.length);
+ });
+ });
+
+ it('does not filter dropdown items if search term is empty string', () => {
+ const searchTerm = '';
+ setSearchTerm(searchTerm);
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findEnvsDropdownItems()).toHaveLength(environmentData.length);
+ });
+ });
+
+ it("shows error message if search term doesn't match", () => {
+ const searchTerm = 'does-not-exist';
+ setSearchTerm(searchTerm);
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findEnvsDropdownSearchMsg().isVisible()).toBe(true);
+ });
+ });
+
+ it('shows loading element when environments fetch is still loading', () => {
+ store.commit(`monitoringDashboard/${types.REQUEST_ENVIRONMENTS_DATA}`);
+
+ return wrapper.vm
+ .$nextTick()
+ .then(() => {
+ expect(findEnvsDropdownLoadingIcon().exists()).toBe(true);
+ })
+ .then(() => {
+ store.commit(
+ `monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`,
+ environmentData,
+ );
+ })
+ .then(() => {
+ expect(findEnvsDropdownLoadingIcon().exists()).toBe(false);
+ });
});
});
+ });
- it('is not rendered if projectPath is not set in store', () => {
- expect(findActionsMenu().exists()).toBe(false);
+ describe('date time picker', () => {
+ beforeEach(() => {
+ createShallowWrapper();
});
- it('contains a modal', () => {
- store.state.monitoringDashboard.projectPath = 'https://path/to/project';
+ it('is rendered', () => {
+ expect(findDateTimePicker().exists()).toBe(true);
+ });
- return wrapper.vm.$nextTick().then(() => {
- expect(findActionsMenu().contains(CreateDashboardModal)).toBe(true);
+ describe('timezone setting', () => {
+ const setupWithTimezone = value => {
+ store = createStore({ dashboardTimezone: value });
+ createShallowWrapper();
+ };
+
+ describe('local timezone is enabled by default', () => {
+ it('shows the data time picker in local timezone', () => {
+ expect(findDateTimePicker().props('utc')).toBe(false);
+ });
+ });
+
+ describe('when LOCAL timezone is enabled', () => {
+ beforeEach(() => {
+ setupWithTimezone('LOCAL');
+ });
+
+ it('shows the data time picker in local timezone', () => {
+ expect(findDateTimePicker().props('utc')).toBe(false);
+ });
+ });
+
+ describe('when UTC timezone is enabled', () => {
+ beforeEach(() => {
+ setupWithTimezone('UTC');
+ });
+
+ it('shows the data time picker in UTC format', () => {
+ expect(findDateTimePicker().props('utc')).toBe(true);
+ });
});
});
+ });
+
+ describe('refresh button', () => {
+ beforeEach(() => {
+ createShallowWrapper();
+ });
+
+ it('is rendered', () => {
+ expect(findRefreshButton().exists()).toBe(true);
+ });
+ });
+
+ describe('external dashboard link', () => {
+ beforeEach(() => {
+ store.state.monitoringDashboard.externalDashboardUrl = '/mockUrl';
+ createShallowWrapper();
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('shows the link', () => {
+ const externalDashboardButton = wrapper.find('.js-external-dashboard-link');
+
+ expect(externalDashboardButton.exists()).toBe(true);
+ expect(externalDashboardButton.is(GlButton)).toBe(true);
+ expect(externalDashboardButton.text()).toContain('View full dashboard');
+ });
+ });
- const duplicableCases = [
- null, // When no path is specified, it uses the default dashboard path.
+ describe('actions menu', () => {
+ const ootbDashboards = [
dashboardGitResponse[0].path,
- dashboardGitResponse[2].path,
selfMonitoringDashboardGitResponse[0].path,
];
+ const customDashboards = [
+ dashboardGitResponse[1].path,
+ selfMonitoringDashboardGitResponse[1].path,
+ ];
- describe.each(duplicableCases)(
- 'when the selected dashboard can be duplicated',
- dashboardPath => {
- it('contains a "Create New" menu item and a "Duplicate Dashboard" menu item', () => {
- store.state.monitoringDashboard.projectPath = 'https://path/to/project';
- setupAllDashboards(store, dashboardPath);
+ it('is rendered', () => {
+ createShallowWrapper();
- return wrapper.vm.$nextTick().then(() => {
- expect(findCreateDashboardMenuItem().exists()).toBe(true);
- expect(findCreateDashboardDuplicateItem().exists()).toBe(true);
- });
+ expect(findActionsMenu().exists()).toBe(true);
+ });
+
+ describe('adding metrics prop', () => {
+ it.each(ootbDashboards)('gets passed true if current dashboard is OOTB', dashboardPath => {
+ createShallowWrapper({ customMetricsAvailable: true });
+
+ store.state.monitoringDashboard.emptyState = false;
+ setupAllDashboards(store, dashboardPath);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findActionsMenu().props('addingMetricsAvailable')).toBe(true);
});
- },
- );
+ });
- const nonDuplicableCases = [
- dashboardGitResponse[1].path,
- selfMonitoringDashboardGitResponse[1].path,
- ];
+ it.each(customDashboards)(
+ 'gets passed false if current dashboard is custom',
+ dashboardPath => {
+ createShallowWrapper({ customMetricsAvailable: true });
- describe.each(nonDuplicableCases)(
- 'when the selected dashboard cannot be duplicated',
- dashboardPath => {
- it('contains a "Create New" menu item and no "Duplicate Dashboard" menu item', () => {
- store.state.monitoringDashboard.projectPath = 'https://path/to/project';
+ store.state.monitoringDashboard.emptyState = false;
setupAllDashboards(store, dashboardPath);
return wrapper.vm.$nextTick().then(() => {
- expect(findCreateDashboardMenuItem().exists()).toBe(true);
- expect(findCreateDashboardDuplicateItem().exists()).toBe(false);
+ expect(findActionsMenu().props('addingMetricsAvailable')).toBe(false);
});
+ },
+ );
+
+ it('gets passed false if empty state is shown', () => {
+ createShallowWrapper({ customMetricsAvailable: true });
+
+ store.state.monitoringDashboard.emptyState = true;
+ setupAllDashboards(store, ootbDashboards[0]);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findActionsMenu().props('addingMetricsAvailable')).toBe(false);
});
- },
- );
- });
+ });
- describe('actions menu modals', () => {
- const url = 'https://path/to/project';
+ it('gets passed false if custom metrics are not available', () => {
+ createShallowWrapper({ customMetricsAvailable: false });
- beforeEach(() => {
- store.state.monitoringDashboard.projectPath = url;
- setupAllDashboards(store);
+ store.state.monitoringDashboard.emptyState = false;
+ setupAllDashboards(store, ootbDashboards[0]);
- createShallowWrapper();
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findActionsMenu().props('addingMetricsAvailable')).toBe(false);
+ });
+ });
});
- it('Clicking on "Create New" opens up a modal', () => {
- const modalId = 'createDashboard';
- const modalTrigger = findCreateDashboardMenuItem();
- const rootEmit = jest.spyOn(wrapper.vm.$root, '$emit');
+ it('custom metrics path gets passed', () => {
+ const path = 'https://path/to/customMetrics';
- modalTrigger.trigger('click');
+ createShallowWrapper({ customMetricsPath: path });
return wrapper.vm.$nextTick().then(() => {
- expect(rootEmit.mock.calls[0]).toContainEqual(modalId);
+ expect(findActionsMenu().props('customMetricsPath')).toBe(path);
});
});
- it('"Create new dashboard" modal contains correct buttons', () => {
- expect(findCreateDashboardModal().props('projectPath')).toBe(url);
+ it('validate query path gets passed', () => {
+ const path = 'https://path/to/validateQuery';
+
+ createShallowWrapper({ validateQueryPath: path });
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findActionsMenu().props('validateQueryPath')).toBe(path);
+ });
});
- it('"Duplicate Dashboard" opens up a modal', () => {
- const modalId = 'duplicateDashboard';
- const modalTrigger = findCreateDashboardDuplicateItem();
- const rootEmit = jest.spyOn(wrapper.vm.$root, '$emit');
+ it('default branch gets passed', () => {
+ const branch = 'branchName';
- modalTrigger.trigger('click');
+ createShallowWrapper({ defaultBranch: branch });
return wrapper.vm.$nextTick().then(() => {
- expect(rootEmit.mock.calls[0]).toContainEqual(modalId);
+ expect(findActionsMenu().props('defaultBranch')).toBe(branch);
});
});
});
diff --git a/spec/frontend/monitoring/components/dashboard_panel_builder_spec.js b/spec/frontend/monitoring/components/dashboard_panel_builder_spec.js
new file mode 100644
index 00000000000..587ddd23d3f
--- /dev/null
+++ b/spec/frontend/monitoring/components/dashboard_panel_builder_spec.js
@@ -0,0 +1,234 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlCard, GlForm, GlFormTextarea, GlAlert } from '@gitlab/ui';
+import { createStore } from '~/monitoring/stores';
+import DashboardPanel from '~/monitoring/components/dashboard_panel.vue';
+import * as types from '~/monitoring/stores/mutation_types';
+import { metricsDashboardResponse } from '../fixture_data';
+import { mockTimeRange } from '../mock_data';
+
+import DashboardPanelBuilder from '~/monitoring/components/dashboard_panel_builder.vue';
+import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
+
+const mockPanel = metricsDashboardResponse.dashboard.panel_groups[0].panels[0];
+
+describe('dashboard invalid url parameters', () => {
+ let store;
+ let wrapper;
+ let mockShowToast;
+
+ const createComponent = (props = {}, options = {}) => {
+ wrapper = shallowMount(DashboardPanelBuilder, {
+ propsData: { ...props },
+ store,
+ stubs: {
+ GlCard,
+ },
+ mocks: {
+ $toast: {
+ show: mockShowToast,
+ },
+ },
+ options,
+ });
+ };
+
+ const findForm = () => wrapper.find(GlForm);
+ const findTxtArea = () => findForm().find(GlFormTextarea);
+ const findSubmitBtn = () => findForm().find('[type="submit"]');
+ const findClipboardCopyBtn = () => wrapper.find({ ref: 'clipboardCopyBtn' });
+ const findViewDocumentationBtn = () => wrapper.find({ ref: 'viewDocumentationBtn' });
+ const findOpenRepositoryBtn = () => wrapper.find({ ref: 'openRepositoryBtn' });
+ const findPanel = () => wrapper.find(DashboardPanel);
+ const findTimeRangePicker = () => wrapper.find(DateTimePicker);
+ const findRefreshButton = () => wrapper.find('[data-testid="previewRefreshButton"]');
+
+ beforeEach(() => {
+ mockShowToast = jest.fn();
+ store = createStore();
+ createComponent();
+ jest.spyOn(store, 'dispatch').mockResolvedValue();
+ });
+
+ afterEach(() => {});
+
+ it('is mounted', () => {
+ expect(wrapper.exists()).toBe(true);
+ });
+
+ it('displays an empty dashboard panel', () => {
+ expect(findPanel().exists()).toBe(true);
+ expect(findPanel().props('graphData')).toBe(null);
+ });
+
+ it('does not fetch initial data by default', () => {
+ expect(store.dispatch).not.toHaveBeenCalled();
+ });
+
+ describe('yml form', () => {
+ it('form exists and can be submitted', () => {
+ expect(findForm().exists()).toBe(true);
+ expect(findSubmitBtn().exists()).toBe(true);
+ expect(findSubmitBtn().is('[disabled]')).toBe(false);
+ });
+
+ it('form has a text area with a default value', () => {
+ expect(findTxtArea().exists()).toBe(true);
+
+ const value = findTxtArea().attributes('value');
+
+ // Panel definition should contain a title and a type
+ expect(value).toContain('title:');
+ expect(value).toContain('type:');
+ });
+
+ it('"copy to clipboard" button works', () => {
+ findClipboardCopyBtn().vm.$emit('click');
+ const clipboardText = findClipboardCopyBtn().attributes('data-clipboard-text');
+
+ expect(clipboardText).toContain('title:');
+ expect(clipboardText).toContain('type:');
+
+ expect(mockShowToast).toHaveBeenCalledTimes(1);
+ });
+
+ it('on submit fetches a panel preview', () => {
+ findForm().vm.$emit('submit', new Event('submit'));
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'monitoringDashboard/fetchPanelPreview',
+ expect.stringContaining('title:'),
+ );
+ });
+ });
+
+ describe('when form is submitted', () => {
+ beforeEach(() => {
+ store.commit(`monitoringDashboard/${types.REQUEST_PANEL_PREVIEW}`, 'mock yml content');
+ return wrapper.vm.$nextTick();
+ });
+
+ it('submit button is disabled', () => {
+ expect(findSubmitBtn().is('[disabled]')).toBe(true);
+ });
+ });
+ });
+
+ describe('time range picker', () => {
+ it('is visible by default', () => {
+ expect(findTimeRangePicker().exists()).toBe(true);
+ });
+
+ it('when changed does not trigger data fetch unless preview panel button is clicked', () => {
+ // mimic initial state where SET_PANEL_PREVIEW_IS_SHOWN is set to false
+ store.commit(`monitoringDashboard/${types.SET_PANEL_PREVIEW_IS_SHOWN}`, false);
+
+ return wrapper.vm.$nextTick(() => {
+ expect(store.dispatch).not.toHaveBeenCalled();
+ });
+ });
+
+ it('when changed triggers data fetch if preview panel button is clicked', () => {
+ findForm().vm.$emit('submit', new Event('submit'));
+
+ store.commit(`monitoringDashboard/${types.SET_PANEL_PREVIEW_TIME_RANGE}`, mockTimeRange);
+
+ return wrapper.vm.$nextTick(() => {
+ expect(store.dispatch).toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('refresh', () => {
+ it('is visible by default', () => {
+ expect(findRefreshButton().exists()).toBe(true);
+ });
+
+ it('when clicked does not trigger data fetch unless preview panel button is clicked', () => {
+ // mimic initial state where SET_PANEL_PREVIEW_IS_SHOWN is set to false
+ store.commit(`monitoringDashboard/${types.SET_PANEL_PREVIEW_IS_SHOWN}`, false);
+
+ return wrapper.vm.$nextTick(() => {
+ expect(store.dispatch).not.toHaveBeenCalled();
+ });
+ });
+
+ it('when clicked triggers data fetch if preview panel button is clicked', () => {
+ // mimic state where preview is visible. SET_PANEL_PREVIEW_IS_SHOWN is set to true
+ store.commit(`monitoringDashboard/${types.SET_PANEL_PREVIEW_IS_SHOWN}`, true);
+
+ findRefreshButton().vm.$emit('click');
+
+ return wrapper.vm.$nextTick(() => {
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'monitoringDashboard/fetchPanelPreviewMetrics',
+ undefined,
+ );
+ });
+ });
+ });
+
+ describe('instructions card', () => {
+ const mockDocsPath = '/docs-path';
+ const mockProjectPath = '/project-path';
+
+ beforeEach(() => {
+ store.state.monitoringDashboard.addDashboardDocumentationPath = mockDocsPath;
+ store.state.monitoringDashboard.projectPath = mockProjectPath;
+
+ createComponent();
+ });
+
+ it('displays next actions for the user', () => {
+ expect(findViewDocumentationBtn().exists()).toBe(true);
+ expect(findViewDocumentationBtn().attributes('href')).toBe(mockDocsPath);
+
+ expect(findOpenRepositoryBtn().exists()).toBe(true);
+ expect(findOpenRepositoryBtn().attributes('href')).toBe(mockProjectPath);
+ });
+ });
+
+ describe('when there is an error', () => {
+ const mockError = 'an error ocurred!';
+
+ beforeEach(() => {
+ store.commit(`monitoringDashboard/${types.RECEIVE_PANEL_PREVIEW_FAILURE}`, mockError);
+ return wrapper.vm.$nextTick();
+ });
+
+ it('displays an alert', () => {
+ expect(wrapper.find(GlAlert).exists()).toBe(true);
+ expect(wrapper.find(GlAlert).text()).toBe(mockError);
+ });
+
+ it('displays an empty dashboard panel', () => {
+ expect(findPanel().props('graphData')).toBe(null);
+ });
+
+ it('changing time range should not refetch data', () => {
+ store.commit(`monitoringDashboard/${types.SET_PANEL_PREVIEW_TIME_RANGE}`, mockTimeRange);
+
+ return wrapper.vm.$nextTick(() => {
+ expect(store.dispatch).not.toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('when panel data is available', () => {
+ beforeEach(() => {
+ store.commit(`monitoringDashboard/${types.RECEIVE_PANEL_PREVIEW_SUCCESS}`, mockPanel);
+ return wrapper.vm.$nextTick();
+ });
+
+ it('displays no alert', () => {
+ expect(wrapper.find(GlAlert).exists()).toBe(false);
+ });
+
+ it('displays panel with data', () => {
+ const { title, type } = wrapper.find(DashboardPanel).props('graphData');
+
+ expect(title).toBe(mockPanel.title);
+ expect(type).toBe(mockPanel.type);
+ });
+ });
+});
diff --git a/spec/frontend/monitoring/components/dashboard_panel_spec.js b/spec/frontend/monitoring/components/dashboard_panel_spec.js
index 693818aa55a..fb96bcc042f 100644
--- a/spec/frontend/monitoring/components/dashboard_panel_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_panel_spec.js
@@ -2,23 +2,23 @@ import Vuex from 'vuex';
import { shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
import { setTestTimeout } from 'helpers/timeout';
+import { GlNewDropdownItem as GlDropdownItem } from '@gitlab/ui';
import invalidUrl from '~/lib/utils/invalid_url';
import axios from '~/lib/utils/axios_utils';
-import { GlNewDropdownItem as GlDropdownItem } from '@gitlab/ui';
import AlertWidget from '~/monitoring/components/alert_widget.vue';
import DashboardPanel from '~/monitoring/components/dashboard_panel.vue';
import {
+ mockAlert,
mockLogsHref,
mockLogsPath,
mockNamespace,
mockNamespacedData,
mockTimeRange,
- graphDataPrometheusQueryRangeMultiTrack,
barMockData,
} from '../mock_data';
import { dashboardProps, graphData, graphDataEmpty } from '../fixture_data';
-import { anomalyGraphData, singleStatGraphData } from '../graph_data';
+import { anomalyGraphData, singleStatGraphData, heatmapGraphData } from '../graph_data';
import { panelTypes } from '~/monitoring/constants';
@@ -56,9 +56,10 @@ describe('Dashboard Panel', () => {
const findCtxMenu = () => wrapper.find({ ref: 'contextualMenu' });
const findMenuItems = () => wrapper.findAll(GlDropdownItem);
const findMenuItemByText = text => findMenuItems().filter(i => i.text() === text);
+ const findAlertsWidget = () => wrapper.find(AlertWidget);
- const createWrapper = (props, options) => {
- wrapper = shallowMount(DashboardPanel, {
+ const createWrapper = (props, { mountFn = shallowMount, ...options } = {}) => {
+ wrapper = mountFn(DashboardPanel, {
propsData: {
graphData,
settingsPath: dashboardProps.settingsPath,
@@ -79,6 +80,9 @@ describe('Dashboard Panel', () => {
});
};
+ const setMetricsSavedToDb = val =>
+ monitoringDashboard.getters.metricsSavedToDb.mockReturnValue(val);
+
beforeEach(() => {
setTestTimeout(1000);
@@ -235,7 +239,7 @@ describe('Dashboard Panel', () => {
${anomalyGraphData()} | ${MonitorAnomalyChart} | ${false}
${dataWithType(panelTypes.COLUMN)} | ${MonitorColumnChart} | ${false}
${dataWithType(panelTypes.STACKED_COLUMN)} | ${MonitorStackedColumnChart} | ${false}
- ${graphDataPrometheusQueryRangeMultiTrack} | ${MonitorHeatmapChart} | ${false}
+ ${heatmapGraphData()} | ${MonitorHeatmapChart} | ${false}
${barMockData} | ${MonitorBarChart} | ${false}
`('when $data.type data is provided', ({ data, component, hasCtxMenu }) => {
const attrs = { attr1: 'attr1Value', attr2: 'attr2Value' };
@@ -255,6 +259,35 @@ describe('Dashboard Panel', () => {
});
});
});
+
+ describe('computed', () => {
+ describe('fixedCurrentTimeRange', () => {
+ it('returns fixed time for valid time range', () => {
+ state.timeRange = mockTimeRange;
+ return wrapper.vm.$nextTick(() => {
+ expect(findTimeChart().props('timeRange')).toEqual(
+ expect.objectContaining({
+ start: expect.any(String),
+ end: expect.any(String),
+ }),
+ );
+ });
+ });
+
+ it.each`
+ input | output
+ ${''} | ${{}}
+ ${undefined} | ${{}}
+ ${null} | ${{}}
+ ${'2020-12-03'} | ${{}}
+ `('returns $output for invalid input like $input', ({ input, output }) => {
+ state.timeRange = input;
+ return wrapper.vm.$nextTick(() => {
+ expect(findTimeChart().props('timeRange')).toEqual(output);
+ });
+ });
+ });
+ });
});
describe('Edit custom metric dropdown item', () => {
@@ -444,7 +477,7 @@ describe('Dashboard Panel', () => {
describe('csvText', () => {
it('converts metrics data from json to csv', () => {
- const header = `timestamp,${graphData.y_label}`;
+ const header = `timestamp,"${graphData.y_label} > ${graphData.metrics[0].label}"`;
const data = graphData.metrics[0].result[0].values;
const firstRow = `${data[0][0]},${data[0][1]}`;
const secondRow = `${data[1][0]},${data[1][1]}`;
@@ -523,7 +556,7 @@ describe('Dashboard Panel', () => {
});
it('displays a heatmap in local timezone', () => {
- createWrapper({ graphData: graphDataPrometheusQueryRangeMultiTrack });
+ createWrapper({ graphData: heatmapGraphData() });
expect(wrapper.find(MonitorHeatmapChart).props('timezone')).toBe('LOCAL');
});
@@ -538,7 +571,7 @@ describe('Dashboard Panel', () => {
});
it('displays a heatmap with UTC', () => {
- createWrapper({ graphData: graphDataPrometheusQueryRangeMultiTrack });
+ createWrapper({ graphData: heatmapGraphData() });
expect(wrapper.find(MonitorHeatmapChart).props('timezone')).toBe('UTC');
});
});
@@ -573,10 +606,6 @@ describe('Dashboard Panel', () => {
});
describe('panel alerts', () => {
- const setMetricsSavedToDb = val =>
- monitoringDashboard.getters.metricsSavedToDb.mockReturnValue(val);
- const findAlertsWidget = () => wrapper.find(AlertWidget);
-
beforeEach(() => {
mockGetterReturnValue('metricsSavedToDb', []);
@@ -702,4 +731,60 @@ describe('Dashboard Panel', () => {
expect(findManageLinksItem().exists()).toBe(false);
});
});
+
+ describe('Runbook url', () => {
+ const findRunbookLinks = () => wrapper.findAll('[data-testid="runbookLink"]');
+ const { metricId } = graphData.metrics[0];
+ const { alert_path: alertPath } = mockAlert;
+
+ const mockRunbookAlert = {
+ ...mockAlert,
+ metricId,
+ };
+
+ beforeEach(() => {
+ mockGetterReturnValue('metricsSavedToDb', []);
+ });
+
+ it('does not show a runbook link when alerts are not present', () => {
+ createWrapper();
+
+ expect(findRunbookLinks().length).toBe(0);
+ });
+
+ describe('when alerts are present', () => {
+ beforeEach(() => {
+ setMetricsSavedToDb([metricId]);
+
+ createWrapper({
+ alertsEndpoint: '/endpoint',
+ prometheusAlertsAvailable: true,
+ });
+ });
+
+ it('does not show a runbook link when a runbook is not set', async () => {
+ findAlertsWidget().vm.$emit('setAlerts', alertPath, {
+ ...mockRunbookAlert,
+ runbookUrl: '',
+ });
+
+ await wrapper.vm.$nextTick();
+
+ expect(findRunbookLinks().length).toBe(0);
+ });
+
+ it('shows a runbook link when a runbook is set', async () => {
+ findAlertsWidget().vm.$emit('setAlerts', alertPath, mockRunbookAlert);
+
+ await wrapper.vm.$nextTick();
+
+ expect(findRunbookLinks().length).toBe(1);
+ expect(
+ findRunbookLinks()
+ .at(0)
+ .attributes('href'),
+ ).toBe(invalidUrl);
+ });
+ });
+ });
});
diff --git a/spec/frontend/monitoring/components/dashboard_spec.js b/spec/frontend/monitoring/components/dashboard_spec.js
index 4b7f7a9ddb3..f37d95317ab 100644
--- a/spec/frontend/monitoring/components/dashboard_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_spec.js
@@ -1,19 +1,14 @@
import { shallowMount, mount } from '@vue/test-utils';
-import Tracking from '~/tracking';
-import { ESC_KEY, ESC_KEY_IE11 } from '~/lib/utils/keys';
-import { GlModal, GlDropdownItem, GlDeprecatedButton, GlIcon } from '@gitlab/ui';
-import { objectToQuery } from '~/lib/utils/url_utility';
import VueDraggable from 'vuedraggable';
import MockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'helpers/test_constants';
+import { ESC_KEY } from '~/lib/utils/keys';
+import { objectToQuery } from '~/lib/utils/url_utility';
import axios from '~/lib/utils/axios_utils';
import { dashboardEmptyStates, metricStates } from '~/monitoring/constants';
import Dashboard from '~/monitoring/components/dashboard.vue';
import DashboardHeader from '~/monitoring/components/dashboard_header.vue';
-import RefreshButton from '~/monitoring/components/refresh_button.vue';
-import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
-import CustomMetricsFormFields from '~/custom_metrics/components/custom_metrics_form_fields.vue';
-import DashboardsDropdown from '~/monitoring/components/dashboards_dropdown.vue';
import EmptyState from '~/monitoring/components/empty_state.vue';
import GroupEmptyState from '~/monitoring/components/group_empty_state.vue';
import DashboardPanel from '~/monitoring/components/dashboard_panel.vue';
@@ -29,14 +24,13 @@ import {
setupStoreWithDataForPanelCount,
setupStoreWithLinks,
} from '../store_utils';
-import { environmentData, dashboardGitResponse, storeVariables } from '../mock_data';
+import { dashboardGitResponse, storeVariables } from '../mock_data';
import {
metricsDashboardViewModel,
metricsDashboardPanelCount,
dashboardProps,
} from '../fixture_data';
-import createFlash from '~/flash';
-import { TEST_HOST } from 'helpers/test_constants';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
jest.mock('~/flash');
@@ -45,14 +39,6 @@ describe('Dashboard', () => {
let wrapper;
let mock;
- const findDashboardHeader = () => wrapper.find(DashboardHeader);
- const findEnvironmentsDropdown = () =>
- findDashboardHeader().find({ ref: 'monitorEnvironmentsDropdown' });
- const findAllEnvironmentsDropdownItems = () => findEnvironmentsDropdown().findAll(GlDropdownItem);
- const setSearchTerm = searchTerm => {
- store.commit(`monitoringDashboard/${types.SET_ENVIRONMENTS_FILTER}`, searchTerm);
- };
-
const createShallowWrapper = (props = {}, options = {}) => {
wrapper = shallowMount(Dashboard, {
propsData: { ...dashboardProps, ...props },
@@ -90,28 +76,6 @@ describe('Dashboard', () => {
}
});
- describe('no metrics are available yet', () => {
- beforeEach(() => {
- createShallowWrapper();
- });
-
- it('shows the environment selector', () => {
- expect(findEnvironmentsDropdown().exists()).toBe(true);
- });
- });
-
- describe('no data found', () => {
- beforeEach(() => {
- createShallowWrapper();
-
- return wrapper.vm.$nextTick();
- });
-
- it('shows the environment selector dropdown', () => {
- expect(findEnvironmentsDropdown().exists()).toBe(true);
- });
- });
-
describe('request information to the server', () => {
it('calls to set time range and fetch data', () => {
createShallowWrapper({ hasMetrics: true });
@@ -149,17 +113,14 @@ describe('Dashboard', () => {
});
it('fetches the metrics data with proper time window', () => {
- jest.spyOn(store, 'dispatch');
-
createMountedWrapper({ hasMetrics: true });
- store.commit(
- `monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`,
- environmentData,
- );
-
return wrapper.vm.$nextTick().then(() => {
- expect(store.dispatch).toHaveBeenCalled();
+ expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/fetchData', undefined);
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'monitoringDashboard/setTimeRange',
+ expect.objectContaining({ duration: { seconds: 28800 } }),
+ );
});
});
});
@@ -427,37 +388,6 @@ describe('Dashboard', () => {
);
});
});
-
- describe('when custom dashboard is selected', () => {
- const windowLocation = window.location;
- const findDashboardDropdown = () => wrapper.find(DashboardHeader).find(DashboardsDropdown);
-
- beforeEach(() => {
- store.commit(`monitoringDashboard/${types.SET_INITIAL_STATE}`, {
- projectPath: TEST_HOST,
- });
-
- delete window.location;
- window.location = { ...windowLocation, assign: jest.fn() };
- createMountedWrapper();
-
- return wrapper.vm.$nextTick();
- });
-
- afterEach(() => {
- window.location = windowLocation;
- });
-
- it('encodes dashboard param', () => {
- findDashboardDropdown().vm.$emit('selectDashboard', {
- path: '.gitlab/dashboards/dashboard&copy.yml',
- display_name: 'dashboard&copy.yml',
- });
- expect(window.location.assign).toHaveBeenCalledWith(
- `${TEST_HOST}/-/metrics/dashboard%26copy.yml`,
- );
- });
- });
});
describe('when all panels in the first group are loading', () => {
@@ -500,21 +430,6 @@ describe('Dashboard', () => {
return wrapper.vm.$nextTick();
});
- it('renders the environments dropdown with a number of environments', () => {
- expect(findAllEnvironmentsDropdownItems().length).toEqual(environmentData.length);
-
- findAllEnvironmentsDropdownItems().wrappers.forEach((itemWrapper, index) => {
- const anchorEl = itemWrapper.find('a');
- if (anchorEl.exists()) {
- const href = anchorEl.attributes('href');
- const currentDashboard = encodeURIComponent(dashboardGitResponse[0].path);
- const environmentId = encodeURIComponent(environmentData[index].id);
- const url = `${TEST_HOST}/-/metrics/${currentDashboard}?environment=${environmentId}`;
- expect(href).toBe(url);
- }
- });
- });
-
it('it does not show loading icons in any group', () => {
setupStoreWithData(store);
@@ -524,127 +439,6 @@ describe('Dashboard', () => {
});
});
});
-
- // Note: This test is not working, .active does not show the active environment
- // eslint-disable-next-line jest/no-disabled-tests
- it.skip('renders the environments dropdown with a single active element', () => {
- const activeItem = findAllEnvironmentsDropdownItems().wrappers.filter(itemWrapper =>
- itemWrapper.find('.active').exists(),
- );
-
- expect(activeItem.length).toBe(1);
- });
- });
-
- describe('star dashboards', () => {
- const findToggleStar = () => wrapper.find(DashboardHeader).find({ ref: 'toggleStarBtn' });
- const findToggleStarIcon = () => findToggleStar().find(GlIcon);
-
- beforeEach(() => {
- createShallowWrapper();
- setupAllDashboards(store);
- });
-
- it('toggle star button is shown', () => {
- expect(findToggleStar().exists()).toBe(true);
- expect(findToggleStar().props('disabled')).toBe(false);
- });
-
- it('toggle star button is disabled when starring is taking place', () => {
- store.commit(`monitoringDashboard/${types.REQUEST_DASHBOARD_STARRING}`);
-
- return wrapper.vm.$nextTick(() => {
- expect(findToggleStar().exists()).toBe(true);
- expect(findToggleStar().props('disabled')).toBe(true);
- });
- });
-
- describe('when the dashboard list is loaded', () => {
- // Tooltip element should wrap directly
- const getToggleTooltip = () => findToggleStar().element.parentElement.getAttribute('title');
-
- beforeEach(() => {
- setupAllDashboards(store);
- jest.spyOn(store, 'dispatch');
- });
-
- it('dispatches a toggle star action', () => {
- findToggleStar().vm.$emit('click');
-
- return wrapper.vm.$nextTick().then(() => {
- expect(store.dispatch).toHaveBeenCalledWith(
- 'monitoringDashboard/toggleStarredValue',
- undefined,
- );
- });
- });
-
- describe('when dashboard is not starred', () => {
- beforeEach(() => {
- store.commit(`monitoringDashboard/${types.SET_INITIAL_STATE}`, {
- currentDashboard: dashboardGitResponse[0].path,
- });
- return wrapper.vm.$nextTick();
- });
-
- it('toggle star button shows "Star dashboard"', () => {
- expect(getToggleTooltip()).toBe('Star dashboard');
- });
-
- it('toggle star button shows an unstarred state', () => {
- expect(findToggleStarIcon().attributes('name')).toBe('star-o');
- });
- });
-
- describe('when dashboard is starred', () => {
- beforeEach(() => {
- store.commit(`monitoringDashboard/${types.SET_INITIAL_STATE}`, {
- currentDashboard: dashboardGitResponse[1].path,
- });
- return wrapper.vm.$nextTick();
- });
-
- it('toggle star button shows "Star dashboard"', () => {
- expect(getToggleTooltip()).toBe('Unstar dashboard');
- });
-
- it('toggle star button shows a starred state', () => {
- expect(findToggleStarIcon().attributes('name')).toBe('star');
- });
- });
- });
- });
-
- it('hides the environments dropdown list when there is no environments', () => {
- createMountedWrapper({ hasMetrics: true });
-
- setupStoreWithDashboard(store);
-
- return wrapper.vm.$nextTick().then(() => {
- expect(findAllEnvironmentsDropdownItems()).toHaveLength(0);
- });
- });
-
- it('renders the datetimepicker dropdown', () => {
- createMountedWrapper({ hasMetrics: true });
-
- setupStoreWithData(store);
-
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.find(DateTimePicker).exists()).toBe(true);
- });
- });
-
- it('renders the refresh dashboard button', () => {
- createMountedWrapper({ hasMetrics: true });
-
- setupStoreWithData(store);
-
- return wrapper.vm.$nextTick().then(() => {
- const refreshBtn = wrapper.find(DashboardHeader).find(RefreshButton);
-
- expect(refreshBtn.exists()).toBe(true);
- });
});
describe('variables section', () => {
@@ -772,15 +566,6 @@ describe('Dashboard', () => {
undefined,
);
});
-
- it('restores dashboard from full screen by typing the Escape key on IE11', () => {
- mockKeyup(ESC_KEY_IE11);
-
- expect(store.dispatch).toHaveBeenCalledWith(
- `monitoringDashboard/clearExpandedPanel`,
- undefined,
- );
- });
});
});
@@ -811,100 +596,6 @@ describe('Dashboard', () => {
});
});
- describe('searchable environments dropdown', () => {
- beforeEach(() => {
- createMountedWrapper({ hasMetrics: true }, { attachToDocument: true });
-
- setupStoreWithData(store);
-
- return wrapper.vm.$nextTick();
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('renders a search input', () => {
- expect(
- wrapper
- .find(DashboardHeader)
- .find({ ref: 'monitorEnvironmentsDropdownSearch' })
- .exists(),
- ).toBe(true);
- });
-
- it('renders dropdown items', () => {
- findAllEnvironmentsDropdownItems().wrappers.forEach((itemWrapper, index) => {
- const anchorEl = itemWrapper.find('a');
- if (anchorEl.exists()) {
- expect(anchorEl.text()).toBe(environmentData[index].name);
- }
- });
- });
-
- it('filters rendered dropdown items', () => {
- const searchTerm = 'production';
- const resultEnvs = environmentData.filter(({ name }) => name.indexOf(searchTerm) !== -1);
- setSearchTerm(searchTerm);
-
- return wrapper.vm.$nextTick().then(() => {
- expect(findAllEnvironmentsDropdownItems().length).toEqual(resultEnvs.length);
- });
- });
-
- it('does not filter dropdown items if search term is empty string', () => {
- const searchTerm = '';
- setSearchTerm(searchTerm);
-
- return wrapper.vm.$nextTick(() => {
- expect(findAllEnvironmentsDropdownItems().length).toEqual(environmentData.length);
- });
- });
-
- it("shows error message if search term doesn't match", () => {
- const searchTerm = 'does-not-exist';
- setSearchTerm(searchTerm);
-
- return wrapper.vm.$nextTick(() => {
- expect(
- wrapper
- .find(DashboardHeader)
- .find({ ref: 'monitorEnvironmentsDropdownMsg' })
- .isVisible(),
- ).toBe(true);
- });
- });
-
- it('shows loading element when environments fetch is still loading', () => {
- store.commit(`monitoringDashboard/${types.REQUEST_ENVIRONMENTS_DATA}`);
-
- return wrapper.vm
- .$nextTick()
- .then(() => {
- expect(
- wrapper
- .find(DashboardHeader)
- .find({ ref: 'monitorEnvironmentsDropdownLoading' })
- .exists(),
- ).toBe(true);
- })
- .then(() => {
- store.commit(
- `monitoringDashboard/${types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS}`,
- environmentData,
- );
- })
- .then(() => {
- expect(
- wrapper
- .find(DashboardHeader)
- .find({ ref: 'monitorEnvironmentsDropdownLoading' })
- .exists(),
- ).toBe(false);
- });
- });
- });
-
describe('drag and drop function', () => {
const findDraggables = () => wrapper.findAll(VueDraggable);
const findEnabledDraggables = () => findDraggables().filter(f => !f.attributes('disabled'));
@@ -998,57 +689,6 @@ describe('Dashboard', () => {
});
});
- describe('dashboard timezone', () => {
- const setupWithTimezone = value => {
- store = createStore({ dashboardTimezone: value });
- setupStoreWithData(store);
- createShallowWrapper({ hasMetrics: true });
- return wrapper.vm.$nextTick;
- };
-
- describe('local timezone is enabled by default', () => {
- beforeEach(() => {
- return setupWithTimezone();
- });
-
- it('shows the data time picker in local timezone', () => {
- expect(
- findDashboardHeader()
- .find(DateTimePicker)
- .props('utc'),
- ).toBe(false);
- });
- });
-
- describe('when LOCAL timezone is enabled', () => {
- beforeEach(() => {
- return setupWithTimezone('LOCAL');
- });
-
- it('shows the data time picker in local timezone', () => {
- expect(
- findDashboardHeader()
- .find(DateTimePicker)
- .props('utc'),
- ).toBe(false);
- });
- });
-
- describe('when UTC timezone is enabled', () => {
- beforeEach(() => {
- return setupWithTimezone('UTC');
- });
-
- it('shows the data time picker in UTC format', () => {
- expect(
- findDashboardHeader()
- .find(DateTimePicker)
- .props('utc'),
- ).toBe(true);
- });
- });
- });
-
describe('cluster health', () => {
beforeEach(() => {
createShallowWrapper({ hasMetrics: true, showHeader: false });
@@ -1068,36 +708,9 @@ describe('Dashboard', () => {
});
});
- describe('dashboard edit link', () => {
- const findEditLink = () => wrapper.find('.js-edit-link');
-
- beforeEach(() => {
- createShallowWrapper({ hasMetrics: true });
-
- setupAllDashboards(store);
- return wrapper.vm.$nextTick();
- });
-
- it('is not present for the default dashboard', () => {
- expect(findEditLink().exists()).toBe(false);
- });
-
- it('is present for a custom dashboard, and links to its edit_path', () => {
- const dashboard = dashboardGitResponse[1];
- store.commit(`monitoringDashboard/${types.SET_INITIAL_STATE}`, {
- currentDashboard: dashboard.path,
- });
-
- return wrapper.vm.$nextTick().then(() => {
- expect(findEditLink().exists()).toBe(true);
- expect(findEditLink().attributes('href')).toBe(dashboard.project_blob_path);
- });
- });
- });
-
describe('document title', () => {
const originalTitle = 'Original Title';
- const defaultDashboardName = dashboardGitResponse[0].display_name;
+ const overviewDashboardName = dashboardGitResponse[0].display_name;
beforeEach(() => {
document.title = originalTitle;
@@ -1108,11 +721,11 @@ describe('Dashboard', () => {
document.title = '';
});
- it('is prepended with default dashboard name by default', () => {
+ it('is prepended with the overview dashboard name by default', () => {
setupAllDashboards(store);
return wrapper.vm.$nextTick().then(() => {
- expect(document.title.startsWith(`${defaultDashboardName} · `)).toBe(true);
+ expect(document.title.startsWith(`${overviewDashboardName} · `)).toBe(true);
});
});
@@ -1127,11 +740,11 @@ describe('Dashboard', () => {
});
});
- it('is prepended with default dashboard name is path is not known', () => {
+ it('is prepended with the overview dashboard name if path is not known', () => {
setupAllDashboards(store, 'unknown/path');
return wrapper.vm.$nextTick().then(() => {
- expect(document.title.startsWith(`${defaultDashboardName} · `)).toBe(true);
+ expect(document.title.startsWith(`${overviewDashboardName} · `)).toBe(true);
});
});
@@ -1151,41 +764,6 @@ describe('Dashboard', () => {
});
});
- describe('Dashboard dropdown', () => {
- beforeEach(() => {
- createMountedWrapper({ hasMetrics: true });
- setupAllDashboards(store);
- return wrapper.vm.$nextTick();
- });
-
- it('shows the dashboard dropdown', () => {
- const dashboardDropdown = wrapper.find(DashboardsDropdown);
-
- expect(dashboardDropdown.exists()).toBe(true);
- });
- });
-
- describe('external dashboard link', () => {
- beforeEach(() => {
- createMountedWrapper({
- hasMetrics: true,
- showPanels: false,
- showTimeWindowDropdown: false,
- externalDashboardUrl: '/mockUrl',
- });
-
- return wrapper.vm.$nextTick();
- });
-
- it('shows the link', () => {
- const externalDashboardButton = wrapper.find('.js-external-dashboard-link');
-
- expect(externalDashboardButton.exists()).toBe(true);
- expect(externalDashboardButton.is(GlDeprecatedButton)).toBe(true);
- expect(externalDashboardButton.text()).toContain('View full dashboard');
- });
- });
-
describe('Clipboard text in panels', () => {
const currentDashboard = dashboardGitResponse[1].path;
const panelIndex = 1; // skip expanded panel
@@ -1243,74 +821,4 @@ describe('Dashboard', () => {
expect(dashboardPanel.exists()).toBe(true);
});
});
-
- describe('add custom metrics', () => {
- const findAddMetricButton = () => wrapper.find(DashboardHeader).find({ ref: 'addMetricBtn' });
-
- describe('when not available', () => {
- beforeEach(() => {
- createShallowWrapper({
- hasMetrics: true,
- customMetricsPath: '/endpoint',
- });
- });
- it('does not render add button on the dashboard', () => {
- expect(findAddMetricButton().exists()).toBe(false);
- });
- });
-
- describe('when available', () => {
- let origPage;
- beforeEach(done => {
- jest.spyOn(Tracking, 'event').mockReturnValue();
- createShallowWrapper({
- hasMetrics: true,
- customMetricsPath: '/endpoint',
- customMetricsAvailable: true,
- });
- setupStoreWithData(store);
-
- origPage = document.body.dataset.page;
- document.body.dataset.page = 'projects:environments:metrics';
-
- wrapper.vm.$nextTick(done);
- });
- afterEach(() => {
- document.body.dataset.page = origPage;
- });
-
- it('renders add button on the dashboard', () => {
- expect(findAddMetricButton()).toBeDefined();
- });
-
- it('uses modal for custom metrics form', () => {
- expect(wrapper.find(GlModal).exists()).toBe(true);
- expect(wrapper.find(GlModal).attributes().modalid).toBe('addMetric');
- });
- it('adding new metric is tracked', done => {
- const submitButton = wrapper
- .find(DashboardHeader)
- .find({ ref: 'submitCustomMetricsFormBtn' }).vm;
- wrapper.vm.$nextTick(() => {
- submitButton.$el.click();
- wrapper.vm.$nextTick(() => {
- expect(Tracking.event).toHaveBeenCalledWith(
- document.body.dataset.page,
- 'click_button',
- {
- label: 'add_new_metric',
- property: 'modal',
- value: undefined,
- },
- );
- done();
- });
- });
- });
-
- it('renders custom metrics form fields', () => {
- expect(wrapper.find(CustomMetricsFormFields).exists()).toBe(true);
- });
- });
- });
});
diff --git a/spec/frontend/monitoring/components/dashboard_url_time_spec.js b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
index 276e20bae6a..c4630bde32f 100644
--- a/spec/frontend/monitoring/components/dashboard_url_time_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
@@ -1,6 +1,6 @@
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import {
queryToObject,
redirectTo,
diff --git a/spec/frontend/monitoring/components/dashboards_dropdown_spec.js b/spec/frontend/monitoring/components/dashboards_dropdown_spec.js
index d09fcc92ee7..89adbad386f 100644
--- a/spec/frontend/monitoring/components/dashboards_dropdown_spec.js
+++ b/spec/frontend/monitoring/components/dashboards_dropdown_spec.js
@@ -1,12 +1,11 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDropdownItem, GlIcon } from '@gitlab/ui';
+import { GlNewDropdownItem, GlIcon } from '@gitlab/ui';
import DashboardsDropdown from '~/monitoring/components/dashboards_dropdown.vue';
-import { dashboardGitResponse, selfMonitoringDashboardGitResponse } from '../mock_data';
+import { dashboardGitResponse } from '../mock_data';
const defaultBranch = 'master';
-const modalId = 'duplicateDashboardModalId';
const starredDashboards = dashboardGitResponse.filter(({ starred }) => starred);
const notStarredDashboards = dashboardGitResponse.filter(({ starred }) => !starred);
@@ -17,20 +16,16 @@ describe('DashboardsDropdown', () => {
function createComponent(props, opts = {}) {
const storeOpts = {
- methods: {
- duplicateSystemDashboard: jest.fn(),
- },
computed: {
allDashboards: () => mockDashboards,
selectedDashboard: () => mockSelectedDashboard,
},
};
- return shallowMount(DashboardsDropdown, {
+ wrapper = shallowMount(DashboardsDropdown, {
propsData: {
...props,
defaultBranch,
- modalId,
},
sync: false,
...storeOpts,
@@ -38,8 +33,8 @@ describe('DashboardsDropdown', () => {
});
}
- const findItems = () => wrapper.findAll(GlDropdownItem);
- const findItemAt = i => wrapper.findAll(GlDropdownItem).at(i);
+ const findItems = () => wrapper.findAll(GlNewDropdownItem);
+ const findItemAt = i => wrapper.findAll(GlNewDropdownItem).at(i);
const findSearchInput = () => wrapper.find({ ref: 'monitorDashboardsDropdownSearch' });
const findNoItemsMsg = () => wrapper.find({ ref: 'monitorDashboardsDropdownMsg' });
const findStarredListDivider = () => wrapper.find({ ref: 'starredListDivider' });
@@ -52,7 +47,7 @@ describe('DashboardsDropdown', () => {
describe('when it receives dashboards data', () => {
beforeEach(() => {
- wrapper = createComponent();
+ createComponent();
});
it('displays an item for each dashboard', () => {
@@ -78,7 +73,7 @@ describe('DashboardsDropdown', () => {
});
it('filters dropdown items when searched for item exists in the list', () => {
- const searchTerm = 'Default';
+ const searchTerm = 'Overview';
setSearchTerm(searchTerm);
return wrapper.vm.$nextTick().then(() => {
@@ -96,10 +91,22 @@ describe('DashboardsDropdown', () => {
});
});
+ describe('when a dashboard is selected', () => {
+ beforeEach(() => {
+ [mockSelectedDashboard] = starredDashboards;
+ createComponent();
+ });
+
+ it('dashboard item is selected', () => {
+ expect(findItemAt(0).props('isChecked')).toBe(true);
+ expect(findItemAt(1).props('isChecked')).toBe(false);
+ });
+ });
+
describe('when the dashboard is missing a display name', () => {
beforeEach(() => {
mockDashboards = dashboardGitResponse.map(d => ({ ...d, display_name: undefined }));
- wrapper = createComponent();
+ createComponent();
});
it('displays items with the dashboard path, with starred dashboards first', () => {
@@ -112,7 +119,7 @@ describe('DashboardsDropdown', () => {
describe('when it receives starred dashboards', () => {
beforeEach(() => {
mockDashboards = starredDashboards;
- wrapper = createComponent();
+ createComponent();
});
it('displays an item for each dashboard', () => {
@@ -133,7 +140,7 @@ describe('DashboardsDropdown', () => {
describe('when it receives only not-starred dashboards', () => {
beforeEach(() => {
mockDashboards = notStarredDashboards;
- wrapper = createComponent();
+ createComponent();
});
it('displays an item for each dashboard', () => {
@@ -150,90 +157,9 @@ describe('DashboardsDropdown', () => {
});
});
- const duplicableCases = [
- dashboardGitResponse[0],
- dashboardGitResponse[2],
- selfMonitoringDashboardGitResponse[0],
- ];
-
- describe.each(duplicableCases)('when the selected dashboard can be duplicated', dashboard => {
- let duplicateDashboardAction;
- let modalDirective;
-
- beforeEach(() => {
- mockSelectedDashboard = dashboard;
- modalDirective = jest.fn();
- duplicateDashboardAction = jest.fn().mockResolvedValue();
-
- wrapper = createComponent(
- {},
- {
- directives: {
- GlModal: modalDirective,
- },
- methods: {
- // Mock vuex actions
- duplicateSystemDashboard: duplicateDashboardAction,
- },
- },
- );
- });
-
- it('displays a dropdown item for each dashboard', () => {
- expect(findItems().length).toEqual(dashboardGitResponse.length + 1);
- });
-
- it('displays one "duplicate dashboard" dropdown item with a directive attached', () => {
- const item = wrapper.findAll('[data-testid="duplicateDashboardItem"]');
-
- expect(item.length).toBe(1);
- });
-
- it('"duplicate dashboard" dropdown item directive works', () => {
- const item = wrapper.find('[data-testid="duplicateDashboardItem"]');
-
- item.trigger('click');
-
- return wrapper.vm.$nextTick().then(() => {
- expect(modalDirective).toHaveBeenCalled();
- });
- });
-
- it('id is correct, as the value of modal directive binding matches modal id', () => {
- expect(modalDirective).toHaveBeenCalledTimes(1);
-
- // Binding's second argument contains the modal id
- expect(modalDirective.mock.calls[0][1]).toEqual(
- expect.objectContaining({
- value: modalId,
- }),
- );
- });
- });
-
- const nonDuplicableCases = [dashboardGitResponse[1], selfMonitoringDashboardGitResponse[1]];
-
- describe.each(nonDuplicableCases)(
- 'when the selected dashboard can not be duplicated',
- dashboard => {
- beforeEach(() => {
- mockSelectedDashboard = dashboard;
-
- wrapper = createComponent();
- });
-
- it('displays a dropdown list item for each dashboard, but no list item for "duplicate dashboard"', () => {
- const item = wrapper.findAll('[data-testid="duplicateDashboardItem"]');
-
- expect(findItems()).toHaveLength(dashboardGitResponse.length);
- expect(item.length).toBe(0);
- });
- },
- );
-
describe('when a dashboard gets selected by the user', () => {
beforeEach(() => {
- wrapper = createComponent();
+ createComponent();
findItemAt(1).vm.$emit('click');
});
diff --git a/spec/frontend/monitoring/components/embeds/metric_embed_spec.js b/spec/frontend/monitoring/components/embeds/metric_embed_spec.js
index 4e7fee81d66..74f265930b1 100644
--- a/spec/frontend/monitoring/components/embeds/metric_embed_spec.js
+++ b/spec/frontend/monitoring/components/embeds/metric_embed_spec.js
@@ -1,10 +1,10 @@
import { createLocalVue, shallowMount } from '@vue/test-utils';
import Vuex from 'vuex';
-import DashboardPanel from '~/monitoring/components/dashboard_panel.vue';
import { TEST_HOST } from 'helpers/test_constants';
+import { setHTMLFixture } from 'helpers/fixtures';
+import DashboardPanel from '~/monitoring/components/dashboard_panel.vue';
import MetricEmbed from '~/monitoring/components/embeds/metric_embed.vue';
import { groups, initialState, metricsData, metricsWithData } from './mock_data';
-import { setHTMLFixture } from 'helpers/fixtures';
const localVue = createLocalVue();
localVue.use(Vuex);
diff --git a/spec/frontend/monitoring/components/graph_group_spec.js b/spec/frontend/monitoring/components/graph_group_spec.js
index 81f5d90c310..86e2523f708 100644
--- a/spec/frontend/monitoring/components/graph_group_spec.js
+++ b/spec/frontend/monitoring/components/graph_group_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import GraphGroup from '~/monitoring/components/graph_group.vue';
import { GlLoadingIcon, GlIcon } from '@gitlab/ui';
+import GraphGroup from '~/monitoring/components/graph_group.vue';
describe('Graph group component', () => {
let wrapper;
diff --git a/spec/frontend/monitoring/components/group_empty_state_spec.js b/spec/frontend/monitoring/components/group_empty_state_spec.js
index e8ef8192067..90bd6f67196 100644
--- a/spec/frontend/monitoring/components/group_empty_state_spec.js
+++ b/spec/frontend/monitoring/components/group_empty_state_spec.js
@@ -24,7 +24,7 @@ describe('GroupEmptyState', () => {
'FOO STATE', // does not fail with unknown states
];
- test.each(supportedStates)('Renders an empty state for %s', selectedState => {
+ it.each(supportedStates)('Renders an empty state for %s', selectedState => {
const wrapper = createComponent({ selectedState });
expect(wrapper.element).toMatchSnapshot();
diff --git a/spec/frontend/monitoring/components/refresh_button_spec.js b/spec/frontend/monitoring/components/refresh_button_spec.js
index 29615638453..a9b8295f38e 100644
--- a/spec/frontend/monitoring/components/refresh_button_spec.js
+++ b/spec/frontend/monitoring/components/refresh_button_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
-import { createStore } from '~/monitoring/stores';
+import Visibility from 'visibilityjs';
import { GlNewDropdown, GlNewDropdownItem, GlButton } from '@gitlab/ui';
-
+import { createStore } from '~/monitoring/stores';
import RefreshButton from '~/monitoring/components/refresh_button.vue';
describe('RefreshButton', () => {
@@ -10,8 +10,8 @@ describe('RefreshButton', () => {
let dispatch;
let documentHidden;
- const createWrapper = () => {
- wrapper = shallowMount(RefreshButton, { store });
+ const createWrapper = (options = {}) => {
+ wrapper = shallowMount(RefreshButton, { store, ...options });
};
const findRefreshBtn = () => wrapper.find(GlButton);
@@ -31,14 +31,8 @@ describe('RefreshButton', () => {
jest.spyOn(store, 'dispatch').mockResolvedValue();
dispatch = store.dispatch;
- // Document can be mock hidden by overriding the `hidden` property
documentHidden = false;
- Object.defineProperty(document, 'hidden', {
- configurable: true,
- get() {
- return documentHidden;
- },
- });
+ jest.spyOn(Visibility, 'hidden').mockImplementation(() => documentHidden);
createWrapper();
});
@@ -57,6 +51,20 @@ describe('RefreshButton', () => {
expect(findDropdown().props('text')).toBe('Off');
});
+ describe('when feature flag disable_metric_dashboard_refresh_rate is on', () => {
+ beforeEach(() => {
+ createWrapper({
+ provide: {
+ glFeatures: { disableMetricDashboardRefreshRate: true },
+ },
+ });
+ });
+
+ it('refresh rate is not available', () => {
+ expect(findDropdown().exists()).toBe(false);
+ });
+ });
+
describe('refresh rate options', () => {
it('presents multiple options', () => {
expect(findOptions().length).toBeGreaterThan(1);
diff --git a/spec/frontend/monitoring/components/variables/dropdown_field_spec.js b/spec/frontend/monitoring/components/variables/dropdown_field_spec.js
index cc384aef231..788f3abf617 100644
--- a/spec/frontend/monitoring/components/variables/dropdown_field_spec.js
+++ b/spec/frontend/monitoring/components/variables/dropdown_field_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlDeprecatedDropdown, GlDeprecatedDropdownItem } from '@gitlab/ui';
import DropdownField from '~/monitoring/components/variables/dropdown_field.vue';
describe('Custom variable component', () => {
@@ -23,8 +23,8 @@ describe('Custom variable component', () => {
});
};
- const findDropdown = () => wrapper.find(GlDropdown);
- const findDropdownItems = () => wrapper.findAll(GlDropdownItem);
+ const findDropdown = () => wrapper.find(GlDeprecatedDropdown);
+ const findDropdownItems = () => wrapper.findAll(GlDeprecatedDropdownItem);
it('renders dropdown element when all necessary props are passed', () => {
createShallowWrapper();
diff --git a/spec/frontend/monitoring/csv_export_spec.js b/spec/frontend/monitoring/csv_export_spec.js
new file mode 100644
index 00000000000..eb2a6e40243
--- /dev/null
+++ b/spec/frontend/monitoring/csv_export_spec.js
@@ -0,0 +1,126 @@
+import { timeSeriesGraphData } from './graph_data';
+import { graphDataToCsv } from '~/monitoring/csv_export';
+
+describe('monitoring export_csv', () => {
+ describe('graphDataToCsv', () => {
+ const expectCsvToMatchLines = (csv, lines) => expect(`${lines.join('\r\n')}\r\n`).toEqual(csv);
+
+ it('should return a csv with 0 metrics', () => {
+ const data = timeSeriesGraphData({}, { metricCount: 0 });
+
+ expect(graphDataToCsv(data)).toEqual('');
+ });
+
+ it('should return a csv with 1 metric with no data', () => {
+ const data = timeSeriesGraphData({}, { metricCount: 1 });
+
+ // When state is NO_DATA, result is null
+ data.metrics[0].result = null;
+
+ expect(graphDataToCsv(data)).toEqual('');
+ });
+
+ it('should return a csv with 1 metric', () => {
+ const data = timeSeriesGraphData({}, { metricCount: 1 });
+
+ expectCsvToMatchLines(graphDataToCsv(data), [
+ `timestamp,"Y Axis > Metric 1"`,
+ '2015-07-01T20:10:50.000Z,1',
+ '2015-07-01T20:12:50.000Z,2',
+ '2015-07-01T20:14:50.000Z,3',
+ ]);
+ });
+
+ it('should return a csv with multiple metrics and one with no data', () => {
+ const data = timeSeriesGraphData({}, { metricCount: 2 });
+
+ // When state is NO_DATA, result is null
+ data.metrics[0].result = null;
+
+ expectCsvToMatchLines(graphDataToCsv(data), [
+ `timestamp,"Y Axis > Metric 2"`,
+ '2015-07-01T20:10:50.000Z,1',
+ '2015-07-01T20:12:50.000Z,2',
+ '2015-07-01T20:14:50.000Z,3',
+ ]);
+ });
+
+ it('should return a csv when not all metrics have the same timestamps', () => {
+ const data = timeSeriesGraphData({}, { metricCount: 3 });
+
+ // Add an "odd" timestamp that is not in the dataset
+ Object.assign(data.metrics[2].result[0], {
+ value: ['2016-01-01T00:00:00.000Z', 9],
+ values: [['2016-01-01T00:00:00.000Z', 9]],
+ });
+
+ expectCsvToMatchLines(graphDataToCsv(data), [
+ `timestamp,"Y Axis > Metric 1","Y Axis > Metric 2","Y Axis > Metric 3"`,
+ '2015-07-01T20:10:50.000Z,1,1,',
+ '2015-07-01T20:12:50.000Z,2,2,',
+ '2015-07-01T20:14:50.000Z,3,3,',
+ '2016-01-01T00:00:00.000Z,,,9',
+ ]);
+ });
+
+ it('should escape double quotes in metric labels with two double quotes ("")', () => {
+ const data = timeSeriesGraphData({}, { metricCount: 1 });
+
+ data.metrics[0].label = 'My "quoted" metric';
+
+ expectCsvToMatchLines(graphDataToCsv(data), [
+ `timestamp,"Y Axis > My ""quoted"" metric"`,
+ '2015-07-01T20:10:50.000Z,1',
+ '2015-07-01T20:12:50.000Z,2',
+ '2015-07-01T20:14:50.000Z,3',
+ ]);
+ });
+
+ it('should return a csv with multiple metrics', () => {
+ const data = timeSeriesGraphData({}, { metricCount: 3 });
+
+ expectCsvToMatchLines(graphDataToCsv(data), [
+ `timestamp,"Y Axis > Metric 1","Y Axis > Metric 2","Y Axis > Metric 3"`,
+ '2015-07-01T20:10:50.000Z,1,1,1',
+ '2015-07-01T20:12:50.000Z,2,2,2',
+ '2015-07-01T20:14:50.000Z,3,3,3',
+ ]);
+ });
+
+ it('should return a csv with 1 metric and multiple series with labels', () => {
+ const data = timeSeriesGraphData({}, { isMultiSeries: true });
+
+ expectCsvToMatchLines(graphDataToCsv(data), [
+ `timestamp,"Y Axis > Metric 1","Y Axis > Metric 1"`,
+ '2015-07-01T20:10:50.000Z,1,4',
+ '2015-07-01T20:12:50.000Z,2,5',
+ '2015-07-01T20:14:50.000Z,3,6',
+ ]);
+ });
+
+ it('should return a csv with 1 metric and multiple series', () => {
+ const data = timeSeriesGraphData({}, { isMultiSeries: true, withLabels: false });
+
+ expectCsvToMatchLines(graphDataToCsv(data), [
+ `timestamp,"Y Axis > __name__: up, job: prometheus, instance: localhost:9090","Y Axis > __name__: up, job: node, instance: localhost:9091"`,
+ '2015-07-01T20:10:50.000Z,1,4',
+ '2015-07-01T20:12:50.000Z,2,5',
+ '2015-07-01T20:14:50.000Z,3,6',
+ ]);
+ });
+
+ it('should return a csv with multiple metrics and multiple series', () => {
+ const data = timeSeriesGraphData(
+ {},
+ { metricCount: 3, isMultiSeries: true, withLabels: false },
+ );
+
+ expectCsvToMatchLines(graphDataToCsv(data), [
+ `timestamp,"Y Axis > __name__: up, job: prometheus, instance: localhost:9090","Y Axis > __name__: up, job: node, instance: localhost:9091","Y Axis > __name__: up, job: prometheus, instance: localhost:9090","Y Axis > __name__: up, job: node, instance: localhost:9091","Y Axis > __name__: up, job: prometheus, instance: localhost:9090","Y Axis > __name__: up, job: node, instance: localhost:9091"`,
+ '2015-07-01T20:10:50.000Z,1,4,1,4,1,4',
+ '2015-07-01T20:12:50.000Z,2,5,2,5,2,5',
+ '2015-07-01T20:14:50.000Z,3,6,3,6,3,6',
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/monitoring/fixture_data.js b/spec/frontend/monitoring/fixture_data.js
index 97edf7bda74..30040d3f89f 100644
--- a/spec/frontend/monitoring/fixture_data.js
+++ b/spec/frontend/monitoring/fixture_data.js
@@ -29,36 +29,12 @@ const datasetState = stateAndPropsFromDataset(
// https://gitlab.com/gitlab-org/gitlab/-/issues/229256
export const dashboardProps = {
...datasetState.dataProps,
- addDashboardDocumentationPath: 'https://path/to/docs',
alertsEndpoint: null,
};
export const metricsDashboardViewModel = mapToDashboardViewModel(metricsDashboardPayload);
export const metricsDashboardPanelCount = 22;
-export const metricResultStatus = {
- // First metric in fixture `metrics_dashboard/environment_metrics_dashboard.json`
- metricId: 'NO_DB_response_metrics_nginx_ingress_throughput_status_code',
- data: {
- resultType: 'matrix',
- result: metricsResult,
- },
-};
-export const metricResultPods = {
- // Second metric in fixture `metrics_dashboard/environment_metrics_dashboard.json`
- metricId: 'NO_DB_response_metrics_nginx_ingress_latency_pod_average',
- data: {
- resultType: 'matrix',
- result: metricsResult,
- },
-};
-export const metricResultEmpty = {
- metricId: 'NO_DB_response_metrics_nginx_ingress_16_throughput_status_code',
- data: {
- resultType: 'matrix',
- result: [],
- },
-};
// Graph data
diff --git a/spec/frontend/monitoring/graph_data.js b/spec/frontend/monitoring/graph_data.js
index e1b95723f3d..f85351e55d7 100644
--- a/spec/frontend/monitoring/graph_data.js
+++ b/spec/frontend/monitoring/graph_data.js
@@ -1,10 +1,38 @@
import { mapPanelToViewModel, normalizeQueryResponseData } from '~/monitoring/stores/utils';
import { panelTypes, metricStates } from '~/monitoring/constants';
-const initTime = 1435781451.781;
+const initTime = 1435781450; // "Wed, 01 Jul 2015 20:10:50 GMT"
+const intervalSeconds = 120;
const makeValue = val => [initTime, val];
-const makeValues = vals => vals.map((val, i) => [initTime + 15 * i, val]);
+const makeValues = vals => vals.map((val, i) => [initTime + intervalSeconds * i, val]);
+
+// Raw Promethues Responses
+
+export const prometheusMatrixMultiResult = ({
+ values1 = ['1', '2', '3'],
+ values2 = ['4', '5', '6'],
+} = {}) => ({
+ resultType: 'matrix',
+ result: [
+ {
+ metric: {
+ __name__: 'up',
+ job: 'prometheus',
+ instance: 'localhost:9090',
+ },
+ values: makeValues(values1),
+ },
+ {
+ metric: {
+ __name__: 'up',
+ job: 'node',
+ instance: 'localhost:9091',
+ },
+ values: makeValues(values2),
+ },
+ ],
+});
// Normalized Prometheus Responses
@@ -82,7 +110,7 @@ const matrixMultiResult = ({ values1 = ['1', '2', '3'], values2 = ['4', '5', '6'
* @param {Object} dataOptions.isMultiSeries
*/
export const timeSeriesGraphData = (panelOptions = {}, dataOptions = {}) => {
- const { metricCount = 1, isMultiSeries = false } = dataOptions;
+ const { metricCount = 1, isMultiSeries = false, withLabels = true } = dataOptions;
return mapPanelToViewModel({
title: 'Time Series Panel',
@@ -90,7 +118,7 @@ export const timeSeriesGraphData = (panelOptions = {}, dataOptions = {}) => {
x_label: 'X Axis',
y_label: 'Y Axis',
metrics: Array.from(Array(metricCount), (_, i) => ({
- label: `Metric ${i + 1}`,
+ label: withLabels ? `Metric ${i + 1}` : undefined,
state: metricStates.OK,
result: isMultiSeries ? matrixMultiResult() : matrixSingleResult(),
})),
@@ -162,3 +190,59 @@ export const anomalyGraphData = (panelOptions = {}, dataOptions = {}) => {
...panelOptions,
});
};
+
+/**
+ * Generate mock graph data for heatmaps according to options
+ */
+export const heatmapGraphData = (panelOptions = {}, dataOptions = {}) => {
+ const { metricCount = 1 } = dataOptions;
+
+ return mapPanelToViewModel({
+ title: 'Heatmap Panel',
+ type: panelTypes.HEATMAP,
+ x_label: 'X Axis',
+ y_label: 'Y Axis',
+ metrics: Array.from(Array(metricCount), (_, i) => ({
+ label: `Metric ${i + 1}`,
+ state: metricStates.OK,
+ result: matrixMultiResult(),
+ })),
+ ...panelOptions,
+ });
+};
+
+/**
+ * Generate gauge chart mock graph data according to options
+ *
+ * @param {Object} panelOptions - Panel options as in YML.
+ *
+ */
+export const gaugeChartGraphData = (panelOptions = {}) => {
+ const {
+ minValue = 100,
+ maxValue = 1000,
+ split = 20,
+ thresholds = {
+ mode: 'absolute',
+ values: [500, 800],
+ },
+ format = 'kilobytes',
+ } = panelOptions;
+
+ return mapPanelToViewModel({
+ title: 'Gauge Chart Panel',
+ type: panelTypes.GAUGE_CHART,
+ min_value: minValue,
+ max_value: maxValue,
+ split,
+ thresholds,
+ format,
+ metrics: [
+ {
+ label: `Metric`,
+ state: metricStates.OK,
+ result: matrixSingleResult(),
+ },
+ ],
+ });
+};
diff --git a/spec/frontend/monitoring/mock_data.js b/spec/frontend/monitoring/mock_data.js
index 49ad33402c6..28a7dd1af4f 100644
--- a/spec/frontend/monitoring/mock_data.js
+++ b/spec/frontend/monitoring/mock_data.js
@@ -1,3 +1,4 @@
+import invalidUrl from '~/lib/utils/invalid_url';
// This import path needs to be relative for now because this mock data is used in
// Karma specs too, where the helpers/test_constants alias can not be resolved
import { TEST_HOST } from '../helpers/test_constants';
@@ -170,7 +171,7 @@ export const environmentData = [
export const dashboardGitResponse = [
{
default: true,
- display_name: 'Default',
+ display_name: 'Overview',
can_edit: false,
system_dashboard: true,
out_of_the_box_dashboard: true,
@@ -209,7 +210,7 @@ export const selfMonitoringDashboardGitResponse = [
default: true,
display_name: 'Default',
can_edit: false,
- system_dashboard: false,
+ system_dashboard: true,
out_of_the_box_dashboard: true,
project_blob_path: null,
path: 'config/prometheus/self_monitoring_default.yml',
@@ -244,83 +245,6 @@ export const metricsResult = [
},
];
-export const graphDataPrometheusQueryRangeMultiTrack = {
- title: 'Super Chart A3',
- type: 'heatmap',
- weight: 3,
- x_label: 'Status Code',
- y_label: 'Time',
- metrics: [
- {
- metricId: '1_metric_b',
- id: 'response_metrics_nginx_ingress_throughput_status_code',
- query_range:
- 'sum(rate(nginx_upstream_responses_total{upstream=~"%{kube_namespace}-%{ci_environment_slug}-.*"}[60m])) by (status_code)',
- unit: 'req / sec',
- label: 'Status Code',
- prometheus_endpoint_path:
- '/root/rails_nodb/environments/3/prometheus/api/v1/query_range?query=sum%28rate%28nginx_upstream_responses_total%7Bupstream%3D~%22%25%7Bkube_namespace%7D-%25%7Bci_environment_slug%7D-.%2A%22%7D%5B2m%5D%29%29+by+%28status_code%29',
- result: [
- {
- metric: { status_code: '1xx' },
- values: [
- ['2019-08-30T15:00:00.000Z', 0],
- ['2019-08-30T16:00:00.000Z', 2],
- ['2019-08-30T17:00:00.000Z', 0],
- ['2019-08-30T18:00:00.000Z', 0],
- ['2019-08-30T19:00:00.000Z', 0],
- ['2019-08-30T20:00:00.000Z', 3],
- ],
- },
- {
- metric: { status_code: '2xx' },
- values: [
- ['2019-08-30T15:00:00.000Z', 1],
- ['2019-08-30T16:00:00.000Z', 3],
- ['2019-08-30T17:00:00.000Z', 6],
- ['2019-08-30T18:00:00.000Z', 10],
- ['2019-08-30T19:00:00.000Z', 8],
- ['2019-08-30T20:00:00.000Z', 6],
- ],
- },
- {
- metric: { status_code: '3xx' },
- values: [
- ['2019-08-30T15:00:00.000Z', 1],
- ['2019-08-30T16:00:00.000Z', 2],
- ['2019-08-30T17:00:00.000Z', 3],
- ['2019-08-30T18:00:00.000Z', 3],
- ['2019-08-30T19:00:00.000Z', 2],
- ['2019-08-30T20:00:00.000Z', 1],
- ],
- },
- {
- metric: { status_code: '4xx' },
- values: [
- ['2019-08-30T15:00:00.000Z', 2],
- ['2019-08-30T16:00:00.000Z', 0],
- ['2019-08-30T17:00:00.000Z', 0],
- ['2019-08-30T18:00:00.000Z', 2],
- ['2019-08-30T19:00:00.000Z', 0],
- ['2019-08-30T20:00:00.000Z', 2],
- ],
- },
- {
- metric: { status_code: '5xx' },
- values: [
- ['2019-08-30T15:00:00.000Z', 0],
- ['2019-08-30T16:00:00.000Z', 1],
- ['2019-08-30T17:00:00.000Z', 0],
- ['2019-08-30T18:00:00.000Z', 0],
- ['2019-08-30T19:00:00.000Z', 0],
- ['2019-08-30T20:00:00.000Z', 2],
- ],
- },
- ],
- },
- ],
-};
-
export const stackedColumnMockedData = {
title: 'memories',
type: 'stacked-column',
@@ -420,6 +344,11 @@ export const mockNamespaces = [`${baseNamespace}/1`, `${baseNamespace}/2`];
export const mockTimeRange = { duration: { seconds: 120 } };
+export const mockFixedTimeRange = {
+ start: '2020-06-17T19:59:08.659Z',
+ end: '2020-07-17T19:59:08.659Z',
+};
+
export const mockNamespacedData = {
mockDeploymentData: ['mockDeploymentData'],
mockProjectPath: '/mockProjectPath',
@@ -688,10 +617,28 @@ export const storeVariables = [
export const dashboardHeaderProps = {
defaultBranch: 'master',
- addDashboardDocumentationPath: 'https://path/to/docs',
isRearrangingPanels: false,
selectedTimeRange: {
start: '2020-01-01T00:00:00.000Z',
end: '2020-01-01T01:00:00.000Z',
},
};
+
+export const dashboardActionsMenuProps = {
+ defaultBranch: 'master',
+ addingMetricsAvailable: true,
+ customMetricsPath: 'https://path/to/customMetrics',
+ validateQueryPath: 'https://path/to/validateQuery',
+ isOotbDashboard: true,
+};
+
+export const mockAlert = {
+ alert_path: 'alert_path',
+ id: 8,
+ metricId: 'mock_metric_id',
+ operator: '>',
+ query: 'testQuery',
+ runbookUrl: invalidUrl,
+ threshold: 5,
+ title: 'alert title',
+};
diff --git a/spec/frontend/monitoring/pages/panel_new_page_spec.js b/spec/frontend/monitoring/pages/panel_new_page_spec.js
new file mode 100644
index 00000000000..83365b754d9
--- /dev/null
+++ b/spec/frontend/monitoring/pages/panel_new_page_spec.js
@@ -0,0 +1,98 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+import { DASHBOARD_PAGE, PANEL_NEW_PAGE } from '~/monitoring/router/constants';
+import { createStore } from '~/monitoring/stores';
+import DashboardPanelBuilder from '~/monitoring/components/dashboard_panel_builder.vue';
+
+import PanelNewPage from '~/monitoring/pages/panel_new_page.vue';
+
+const dashboard = 'dashboard.yml';
+
+// Button stub that can accept `to` as router links do
+// https://bootstrap-vue.org/docs/components/button#comp-ref-b-button-props
+const GlButtonStub = {
+ extends: GlButton,
+ props: {
+ to: [String, Object],
+ },
+};
+
+describe('monitoring/pages/panel_new_page', () => {
+ let store;
+ let wrapper;
+ let $route;
+ let $router;
+
+ const mountComponent = (propsData = {}, route) => {
+ $route = route ?? { name: PANEL_NEW_PAGE, params: { dashboard } };
+ $router = {
+ push: jest.fn(),
+ };
+
+ wrapper = shallowMount(PanelNewPage, {
+ propsData,
+ store,
+ stubs: {
+ GlButton: GlButtonStub,
+ },
+ mocks: {
+ $router,
+ $route,
+ },
+ });
+ };
+
+ const findBackButton = () => wrapper.find(GlButtonStub);
+ const findPanelBuilder = () => wrapper.find(DashboardPanelBuilder);
+
+ beforeEach(() => {
+ store = createStore();
+ mountComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('back to dashboard button', () => {
+ it('is rendered', () => {
+ expect(findBackButton().exists()).toBe(true);
+ expect(findBackButton().props('icon')).toBe('go-back');
+ });
+
+ it('links back to the dashboard', () => {
+ expect(findBackButton().props('to')).toEqual({
+ name: DASHBOARD_PAGE,
+ params: { dashboard },
+ });
+ });
+
+ it('links back to the dashboard while preserving query params', () => {
+ $route = {
+ name: PANEL_NEW_PAGE,
+ params: { dashboard },
+ query: { another: 'param' },
+ };
+
+ mountComponent({}, $route);
+
+ expect(findBackButton().props('to')).toEqual({
+ name: DASHBOARD_PAGE,
+ params: { dashboard },
+ query: { another: 'param' },
+ });
+ });
+ });
+
+ describe('dashboard panel builder', () => {
+ it('is rendered', () => {
+ expect(findPanelBuilder().exists()).toBe(true);
+ });
+ });
+
+ describe('page routing', () => {
+ it('route is not updated by default', () => {
+ expect($router.push).not.toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/monitoring/requests/index_spec.js b/spec/frontend/monitoring/requests/index_spec.js
new file mode 100644
index 00000000000..a91c209875a
--- /dev/null
+++ b/spec/frontend/monitoring/requests/index_spec.js
@@ -0,0 +1,149 @@
+import MockAdapter from 'axios-mock-adapter';
+import { backoffMockImplementation } from 'jest/helpers/backoff_helper';
+import axios from '~/lib/utils/axios_utils';
+import statusCodes from '~/lib/utils/http_status';
+import * as commonUtils from '~/lib/utils/common_utils';
+import { metricsDashboardResponse } from '../fixture_data';
+import { getDashboard, getPrometheusQueryData } from '~/monitoring/requests';
+
+describe('monitoring metrics_requests', () => {
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ jest.spyOn(commonUtils, 'backOff').mockImplementation(backoffMockImplementation);
+ });
+
+ afterEach(() => {
+ mock.reset();
+
+ commonUtils.backOff.mockReset();
+ });
+
+ describe('getDashboard', () => {
+ const response = metricsDashboardResponse;
+ const dashboardEndpoint = '/dashboard';
+ const params = {
+ start_time: 'start_time',
+ end_time: 'end_time',
+ };
+
+ it('returns a dashboard response', () => {
+ mock.onGet(dashboardEndpoint).reply(statusCodes.OK, response);
+
+ return getDashboard(dashboardEndpoint, params).then(data => {
+ expect(data).toEqual(metricsDashboardResponse);
+ });
+ });
+
+ it('returns a dashboard response after retrying twice', () => {
+ mock.onGet(dashboardEndpoint).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(dashboardEndpoint).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(dashboardEndpoint).reply(statusCodes.OK, response);
+
+ return getDashboard(dashboardEndpoint, params).then(data => {
+ expect(data).toEqual(metricsDashboardResponse);
+ expect(mock.history.get).toHaveLength(3);
+ });
+ });
+
+ it('rejects after getting an error', () => {
+ mock.onGet(dashboardEndpoint).reply(500);
+
+ return getDashboard(dashboardEndpoint, params).catch(error => {
+ expect(error).toEqual(expect.any(Error));
+ expect(mock.history.get).toHaveLength(1);
+ });
+ });
+ });
+
+ describe('getPrometheusQueryData', () => {
+ const response = {
+ status: 'success',
+ data: {
+ resultType: 'matrix',
+ result: [],
+ },
+ };
+ const prometheusEndpoint = '/query_range';
+ const params = {
+ start_time: 'start_time',
+ end_time: 'end_time',
+ };
+
+ it('returns a dashboard response', () => {
+ mock.onGet(prometheusEndpoint).reply(statusCodes.OK, response);
+
+ return getPrometheusQueryData(prometheusEndpoint, params).then(data => {
+ expect(data).toEqual(response.data);
+ });
+ });
+
+ it('returns a dashboard response after retrying twice', () => {
+ // Mock multiple attempts while the cache is filling up
+ mock.onGet(prometheusEndpoint).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpoint).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpoint).reply(statusCodes.OK, response); // 3rd attempt
+
+ return getPrometheusQueryData(prometheusEndpoint, params).then(data => {
+ expect(data).toEqual(response.data);
+ expect(mock.history.get).toHaveLength(3);
+ });
+ });
+
+ it('rejects after getting an HTTP 500 error', () => {
+ mock.onGet(prometheusEndpoint).reply(500, {
+ status: 'error',
+ error: 'An error ocurred',
+ });
+
+ return getPrometheusQueryData(prometheusEndpoint, params).catch(error => {
+ expect(error).toEqual(new Error('Request failed with status code 500'));
+ });
+ });
+
+ it('rejects after retrying twice and getting an HTTP 401 error', () => {
+ // Mock multiple attempts while the cache is filling up and fails
+ mock.onGet(prometheusEndpoint).reply(statusCodes.UNAUTHORIZED, {
+ status: 'error',
+ error: 'An error ocurred',
+ });
+
+ return getPrometheusQueryData(prometheusEndpoint, params).catch(error => {
+ expect(error).toEqual(new Error('Request failed with status code 401'));
+ });
+ });
+
+ it('rejects after retrying twice and getting an HTTP 500 error', () => {
+ // Mock multiple attempts while the cache is filling up and fails
+ mock.onGet(prometheusEndpoint).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpoint).replyOnce(statusCodes.NO_CONTENT);
+ mock.onGet(prometheusEndpoint).reply(500, {
+ status: 'error',
+ error: 'An error ocurred',
+ }); // 3rd attempt
+
+ return getPrometheusQueryData(prometheusEndpoint, params).catch(error => {
+ expect(error).toEqual(new Error('Request failed with status code 500'));
+ expect(mock.history.get).toHaveLength(3);
+ });
+ });
+
+ test.each`
+ code | reason
+ ${statusCodes.BAD_REQUEST} | ${'Parameters are missing or incorrect'}
+ ${statusCodes.UNPROCESSABLE_ENTITY} | ${"Expression can't be executed"}
+ ${statusCodes.SERVICE_UNAVAILABLE} | ${'Query timed out or aborted'}
+ `('rejects with details: "$reason" after getting an HTTP $code error', ({ code, reason }) => {
+ mock.onGet(prometheusEndpoint).reply(code, {
+ status: 'error',
+ error: reason,
+ });
+
+ return getPrometheusQueryData(prometheusEndpoint, params).catch(error => {
+ expect(error).toEqual(new Error(reason));
+ expect(mock.history.get).toHaveLength(1);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/monitoring/router_spec.js b/spec/frontend/monitoring/router_spec.js
index 5b8f4b3c83e..8b97c8ed125 100644
--- a/spec/frontend/monitoring/router_spec.js
+++ b/spec/frontend/monitoring/router_spec.js
@@ -1,18 +1,28 @@
import { mount, createLocalVue } from '@vue/test-utils';
import VueRouter from 'vue-router';
import DashboardPage from '~/monitoring/pages/dashboard_page.vue';
+import PanelNewPage from '~/monitoring/pages/panel_new_page.vue';
import Dashboard from '~/monitoring/components/dashboard.vue';
import { createStore } from '~/monitoring/stores';
import createRouter from '~/monitoring/router';
import { dashboardProps } from './fixture_data';
import { dashboardHeaderProps } from './mock_data';
+const LEGACY_BASE_PATH = '/project/my-group/test-project/-/environments/71146/metrics';
+const BASE_PATH = '/project/my-group/test-project/-/metrics';
+
+const MockApp = {
+ data() {
+ return {
+ dashboardProps: { ...dashboardProps, ...dashboardHeaderProps },
+ };
+ },
+ template: `<router-view :dashboard-props="dashboardProps"/>`,
+};
+
describe('Monitoring router', () => {
let router;
let store;
- const propsData = { dashboardProps: { ...dashboardProps, ...dashboardHeaderProps } };
- const NEW_BASE_PATH = '/project/my-group/test-project/-/metrics';
- const OLD_BASE_PATH = '/project/my-group/test-project/-/environments/71146/metrics';
const createWrapper = (basePath, routeArg) => {
const localVue = createLocalVue();
@@ -23,11 +33,10 @@ describe('Monitoring router', () => {
router.push(routeArg);
}
- return mount(DashboardPage, {
+ return mount(MockApp, {
localVue,
store,
router,
- propsData,
});
};
@@ -40,26 +49,32 @@ describe('Monitoring router', () => {
window.location.hash = '';
});
- describe('support old URL with full dashboard path', () => {
+ describe('support legacy URLs with full dashboard path to visit dashboard page', () => {
it.each`
- route | currentDashboard
+ path | currentDashboard
${'/dashboard.yml'} | ${'dashboard.yml'}
${'/folder1/dashboard.yml'} | ${'folder1/dashboard.yml'}
${'/?dashboard=dashboard.yml'} | ${'dashboard.yml'}
- `('sets component as $componentName for path "$route"', ({ route, currentDashboard }) => {
- const wrapper = createWrapper(OLD_BASE_PATH, route);
+ `('"$path" renders page with dashboard "$currentDashboard"', ({ path, currentDashboard }) => {
+ const wrapper = createWrapper(LEGACY_BASE_PATH, path);
expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/setCurrentDashboard', {
currentDashboard,
});
- expect(wrapper.find(Dashboard)).toExist();
+ expect(wrapper.find(DashboardPage).exists()).toBe(true);
+ expect(
+ wrapper
+ .find(DashboardPage)
+ .find(Dashboard)
+ .exists(),
+ ).toBe(true);
});
});
- describe('supports new URL with short dashboard path', () => {
+ describe('supports URLs to visit dashboard page', () => {
it.each`
- route | currentDashboard
+ path | currentDashboard
${'/'} | ${null}
${'/dashboard.yml'} | ${'dashboard.yml'}
${'/folder1/dashboard.yml'} | ${'folder1/dashboard.yml'}
@@ -68,14 +83,35 @@ describe('Monitoring router', () => {
${'/config/prometheus/common_metrics.yml'} | ${'config/prometheus/common_metrics.yml'}
${'/config/prometheus/pod_metrics.yml'} | ${'config/prometheus/pod_metrics.yml'}
${'/config%2Fprometheus%2Fpod_metrics.yml'} | ${'config/prometheus/pod_metrics.yml'}
- `('sets component as $componentName for path "$route"', ({ route, currentDashboard }) => {
- const wrapper = createWrapper(NEW_BASE_PATH, route);
+ `('"$path" renders page with dashboard "$currentDashboard"', ({ path, currentDashboard }) => {
+ const wrapper = createWrapper(BASE_PATH, path);
expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/setCurrentDashboard', {
currentDashboard,
});
- expect(wrapper.find(Dashboard)).toExist();
+ expect(wrapper.find(DashboardPage).exists()).toBe(true);
+ expect(
+ wrapper
+ .find(DashboardPage)
+ .find(Dashboard)
+ .exists(),
+ ).toBe(true);
+ });
+ });
+
+ describe('supports URLs to visit new panel page', () => {
+ it.each`
+ path | currentDashboard
+ ${'/panel/new'} | ${undefined}
+ ${'/dashboard.yml/panel/new'} | ${'dashboard.yml'}
+ ${'/config/prometheus/common_metrics.yml/panel/new'} | ${'config/prometheus/common_metrics.yml'}
+ ${'/config%2Fprometheus%2Fcommon_metrics.yml/panel/new'} | ${'config/prometheus/common_metrics.yml'}
+ `('"$path" renders page with dashboard "$currentDashboard"', ({ path, currentDashboard }) => {
+ const wrapper = createWrapper(BASE_PATH, path);
+
+ expect(wrapper.vm.$route.params.dashboard).toBe(currentDashboard);
+ expect(wrapper.find(PanelNewPage).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/monitoring/store/actions_spec.js b/spec/frontend/monitoring/store/actions_spec.js
index 22f2b2e3c77..5c7ab4e6a1f 100644
--- a/spec/frontend/monitoring/store/actions_spec.js
+++ b/spec/frontend/monitoring/store/actions_spec.js
@@ -1,10 +1,11 @@
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
+import { backoffMockImplementation } from 'jest/helpers/backoff_helper';
import Tracking from '~/tracking';
import axios from '~/lib/utils/axios_utils';
import statusCodes from '~/lib/utils/http_status';
import * as commonUtils from '~/lib/utils/common_utils';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import { defaultTimeRange } from '~/vue_shared/constants';
import * as getters from '~/monitoring/stores/getters';
import { ENVIRONMENT_AVAILABLE_STATE } from '~/monitoring/constants';
@@ -30,6 +31,7 @@ import {
duplicateSystemDashboard,
updateVariablesAndFetchData,
fetchVariableMetricLabelValues,
+ fetchPanelPreview,
} from '~/monitoring/stores/actions';
import {
gqClient,
@@ -73,19 +75,7 @@ describe('Monitoring store actions', () => {
commit = jest.fn();
dispatch = jest.fn();
- jest.spyOn(commonUtils, 'backOff').mockImplementation(callback => {
- const q = new Promise((resolve, reject) => {
- const stop = arg => (arg instanceof Error ? reject(arg) : resolve(arg));
- const next = () => callback(next, stop);
- // Define a timeout based on a mock timer
- setTimeout(() => {
- callback(next, stop);
- });
- });
- // Run all resolved promises in chain
- jest.runOnlyPendingTimers();
- return q;
- });
+ jest.spyOn(commonUtils, 'backOff').mockImplementation(backoffMockImplementation);
});
afterEach(() => {
@@ -483,7 +473,6 @@ describe('Monitoring store actions', () => {
],
[],
() => {
- expect(mock.history.get).toHaveLength(1);
done();
},
).catch(done.fail);
@@ -569,46 +558,8 @@ describe('Monitoring store actions', () => {
});
});
- it('commits result, when waiting for results', done => {
- // Mock multiple attempts while the cache is filling up
- mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
- mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
- mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
- mock.onGet(prometheusEndpointPath).reply(200, { data }); // 4th attempt
-
- testAction(
- fetchPrometheusMetric,
- { metric, defaultQueryParams },
- state,
- [
- {
- type: types.REQUEST_METRIC_RESULT,
- payload: {
- metricId: metric.metricId,
- },
- },
- {
- type: types.RECEIVE_METRIC_RESULT_SUCCESS,
- payload: {
- metricId: metric.metricId,
- data,
- },
- },
- ],
- [],
- () => {
- expect(mock.history.get).toHaveLength(4);
- done();
- },
- ).catch(done.fail);
- });
-
it('commits failure, when waiting for results and getting a server error', done => {
- // Mock multiple attempts while the cache is filling up and fails
- mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
- mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
- mock.onGet(prometheusEndpointPath).replyOnce(statusCodes.NO_CONTENT);
- mock.onGet(prometheusEndpointPath).reply(500); // 4th attempt
+ mock.onGet(prometheusEndpointPath).reply(500);
const error = new Error('Request failed with status code 500');
@@ -633,7 +584,6 @@ describe('Monitoring store actions', () => {
],
[],
).catch(e => {
- expect(mock.history.get).toHaveLength(4);
expect(e).toEqual(error);
done();
});
@@ -1205,4 +1155,69 @@ describe('Monitoring store actions', () => {
);
});
});
+
+ describe('fetchPanelPreview', () => {
+ const panelPreviewEndpoint = '/builder.json';
+ const mockYmlContent = 'mock yml content';
+
+ beforeEach(() => {
+ state.panelPreviewEndpoint = panelPreviewEndpoint;
+ });
+
+ it('should not commit or dispatch if payload is empty', () => {
+ testAction(fetchPanelPreview, '', state, [], []);
+ });
+
+ it('should store the panel and fetch metric results', () => {
+ const mockPanel = {
+ title: 'Go heap size',
+ type: 'area-chart',
+ };
+
+ mock
+ .onPost(panelPreviewEndpoint, { panel_yaml: mockYmlContent })
+ .reply(statusCodes.OK, mockPanel);
+
+ testAction(
+ fetchPanelPreview,
+ mockYmlContent,
+ state,
+ [
+ { type: types.SET_PANEL_PREVIEW_IS_SHOWN, payload: true },
+ { type: types.REQUEST_PANEL_PREVIEW, payload: mockYmlContent },
+ { type: types.RECEIVE_PANEL_PREVIEW_SUCCESS, payload: mockPanel },
+ ],
+ [{ type: 'fetchPanelPreviewMetrics' }],
+ );
+ });
+
+ it('should display a validation error when the backend cannot process the yml', () => {
+ const mockErrorMsg = 'Each "metric" must define one of :query or :query_range';
+
+ mock
+ .onPost(panelPreviewEndpoint, { panel_yaml: mockYmlContent })
+ .reply(statusCodes.UNPROCESSABLE_ENTITY, {
+ message: mockErrorMsg,
+ });
+
+ testAction(fetchPanelPreview, mockYmlContent, state, [
+ { type: types.SET_PANEL_PREVIEW_IS_SHOWN, payload: true },
+ { type: types.REQUEST_PANEL_PREVIEW, payload: mockYmlContent },
+ { type: types.RECEIVE_PANEL_PREVIEW_FAILURE, payload: mockErrorMsg },
+ ]);
+ });
+
+ it('should display a generic error when the backend fails', () => {
+ mock.onPost(panelPreviewEndpoint, { panel_yaml: mockYmlContent }).reply(500);
+
+ testAction(fetchPanelPreview, mockYmlContent, state, [
+ { type: types.SET_PANEL_PREVIEW_IS_SHOWN, payload: true },
+ { type: types.REQUEST_PANEL_PREVIEW, payload: mockYmlContent },
+ {
+ type: types.RECEIVE_PANEL_PREVIEW_FAILURE,
+ payload: 'Request failed with status code 500',
+ },
+ ]);
+ });
+ });
});
diff --git a/spec/frontend/monitoring/store/getters_spec.js b/spec/frontend/monitoring/store/getters_spec.js
index a69f5265ea7..509de8a4596 100644
--- a/spec/frontend/monitoring/store/getters_spec.js
+++ b/spec/frontend/monitoring/store/getters_spec.js
@@ -11,37 +11,36 @@ import {
storeVariables,
mockLinks,
} from '../mock_data';
-import {
- metricsDashboardPayload,
- metricResultStatus,
- metricResultPods,
- metricResultEmpty,
-} from '../fixture_data';
+import { metricsDashboardPayload } from '../fixture_data';
describe('Monitoring store Getters', () => {
+ let state;
+
+ const getMetric = ({ group = 0, panel = 0, metric = 0 } = {}) =>
+ state.dashboard.panelGroups[group].panels[panel].metrics[metric];
+
+ const setMetricSuccess = ({ group, panel, metric, result = metricsResult } = {}) => {
+ const { metricId } = getMetric({ group, panel, metric });
+ mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, {
+ metricId,
+ data: {
+ resultType: 'matrix',
+ result,
+ },
+ });
+ };
+
+ const setMetricFailure = ({ group, panel, metric } = {}) => {
+ const { metricId } = getMetric({ group, panel, metric });
+ mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
+ metricId,
+ });
+ };
+
describe('getMetricStates', () => {
let setupState;
- let state;
let getMetricStates;
- const setMetricSuccess = ({ result = metricsResult, group = 0, panel = 0, metric = 0 }) => {
- const { metricId } = state.dashboard.panelGroups[group].panels[panel].metrics[metric];
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, {
- metricId,
- data: {
- resultType: 'matrix',
- result,
- },
- });
- };
-
- const setMetricFailure = ({ group = 0, panel = 0, metric = 0 }) => {
- const { metricId } = state.dashboard.panelGroups[group].panels[panel].metrics[metric];
- mutations[types.RECEIVE_METRIC_RESULT_FAILURE](state, {
- metricId,
- });
- };
-
beforeEach(() => {
setupState = (initState = {}) => {
state = initState;
@@ -81,7 +80,7 @@ describe('Monitoring store Getters', () => {
it('on an empty metric with no result, returns NO_DATA', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- setMetricSuccess({ result: [], group: 2 });
+ setMetricSuccess({ group: 2, result: [] });
expect(getMetricStates()).toEqual([metricStates.NO_DATA]);
});
@@ -147,7 +146,6 @@ describe('Monitoring store Getters', () => {
describe('metricsWithData', () => {
let metricsWithData;
let setupState;
- let state;
beforeEach(() => {
setupState = (initState = {}) => {
@@ -191,35 +189,39 @@ describe('Monitoring store Getters', () => {
it('an empty metric, returns empty', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultEmpty);
+ setMetricSuccess({ result: [] });
expect(metricsWithData()).toEqual([]);
});
it('a metric with results, it returns a metric', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultStatus);
+ setMetricSuccess();
- expect(metricsWithData()).toEqual([metricResultStatus.metricId]);
+ expect(metricsWithData()).toEqual([getMetric().metricId]);
});
it('multiple metrics with results, it return multiple metrics', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultStatus);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultPods);
+ setMetricSuccess({ panel: 0 });
+ setMetricSuccess({ panel: 1 });
- expect(metricsWithData()).toEqual([metricResultStatus.metricId, metricResultPods.metricId]);
+ expect(metricsWithData()).toEqual([
+ getMetric({ panel: 0 }).metricId,
+ getMetric({ panel: 1 }).metricId,
+ ]);
});
it('multiple metrics with results, it returns metrics filtered by group', () => {
mutations[types.RECEIVE_METRICS_DASHBOARD_SUCCESS](state, metricsDashboardPayload);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultStatus);
- mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, metricResultPods);
+
+ setMetricSuccess({ group: 1 });
+ setMetricSuccess({ group: 1, panel: 1 });
// First group has metrics
expect(metricsWithData(state.dashboard.panelGroups[1].key)).toEqual([
- metricResultStatus.metricId,
- metricResultPods.metricId,
+ getMetric({ group: 1 }).metricId,
+ getMetric({ group: 1, panel: 1 }).metricId,
]);
// Second group has no metrics
@@ -229,7 +231,6 @@ describe('Monitoring store Getters', () => {
});
describe('filteredEnvironments', () => {
- let state;
const setupState = (initState = {}) => {
state = {
...state,
@@ -284,7 +285,6 @@ describe('Monitoring store Getters', () => {
describe('metricsSavedToDb', () => {
let metricsSavedToDb;
- let state;
let mockData;
beforeEach(() => {
@@ -335,8 +335,6 @@ describe('Monitoring store Getters', () => {
});
describe('getCustomVariablesParams', () => {
- let state;
-
beforeEach(() => {
state = {
variables: {},
@@ -367,58 +365,65 @@ describe('Monitoring store Getters', () => {
describe('selectedDashboard', () => {
const { selectedDashboard } = getters;
- const localGetters = state => ({
- fullDashboardPath: getters.fullDashboardPath(state),
+ const localGetters = localState => ({
+ fullDashboardPath: getters.fullDashboardPath(localState),
});
it('returns a dashboard', () => {
- const state = {
+ const localState = {
allDashboards: dashboardGitResponse,
currentDashboard: dashboardGitResponse[0].path,
customDashboardBasePath,
};
- expect(selectedDashboard(state, localGetters(state))).toEqual(dashboardGitResponse[0]);
+ expect(selectedDashboard(localState, localGetters(localState))).toEqual(
+ dashboardGitResponse[0],
+ );
});
- it('returns a non-default dashboard', () => {
- const state = {
+ it('returns a dashboard different from the overview dashboard', () => {
+ const localState = {
allDashboards: dashboardGitResponse,
currentDashboard: dashboardGitResponse[1].path,
customDashboardBasePath,
};
- expect(selectedDashboard(state, localGetters(state))).toEqual(dashboardGitResponse[1]);
+ expect(selectedDashboard(localState, localGetters(localState))).toEqual(
+ dashboardGitResponse[1],
+ );
});
- it('returns a default dashboard when no dashboard is selected', () => {
- const state = {
+ it('returns the overview dashboard when no dashboard is selected', () => {
+ const localState = {
allDashboards: dashboardGitResponse,
currentDashboard: null,
customDashboardBasePath,
};
- expect(selectedDashboard(state, localGetters(state))).toEqual(dashboardGitResponse[0]);
+ expect(selectedDashboard(localState, localGetters(localState))).toEqual(
+ dashboardGitResponse[0],
+ );
});
- it('returns a default dashboard when dashboard cannot be found', () => {
- const state = {
+ it('returns the overview dashboard when dashboard cannot be found', () => {
+ const localState = {
allDashboards: dashboardGitResponse,
currentDashboard: 'wrong_path',
customDashboardBasePath,
};
- expect(selectedDashboard(state, localGetters(state))).toEqual(dashboardGitResponse[0]);
+ expect(selectedDashboard(localState, localGetters(localState))).toEqual(
+ dashboardGitResponse[0],
+ );
});
it('returns null when no dashboards are present', () => {
- const state = {
+ const localState = {
allDashboards: [],
currentDashboard: dashboardGitResponse[0].path,
customDashboardBasePath,
};
- expect(selectedDashboard(state, localGetters(state))).toEqual(null);
+ expect(selectedDashboard(localState, localGetters(localState))).toEqual(null);
});
});
describe('linksWithMetadata', () => {
- let state;
const setupState = (initState = {}) => {
state = {
...state,
diff --git a/spec/frontend/monitoring/store/mutations_spec.js b/spec/frontend/monitoring/store/mutations_spec.js
index 14b38d79aa2..8d1351fc909 100644
--- a/spec/frontend/monitoring/store/mutations_spec.js
+++ b/spec/frontend/monitoring/store/mutations_spec.js
@@ -4,8 +4,8 @@ import mutations from '~/monitoring/stores/mutations';
import * as types from '~/monitoring/stores/mutation_types';
import state from '~/monitoring/stores/state';
import { dashboardEmptyStates, metricStates } from '~/monitoring/constants';
-
import { deploymentData, dashboardGitResponse, storeTextVariables } from '../mock_data';
+import { prometheusMatrixMultiResult } from '../graph_data';
import { metricsDashboardPayload } from '../fixture_data';
describe('Monitoring mutations', () => {
@@ -259,27 +259,6 @@ describe('Monitoring mutations', () => {
describe('Individual panel/metric results', () => {
const metricId = 'NO_DB_response_metrics_nginx_ingress_throughput_status_code';
- const data = {
- resultType: 'matrix',
- result: [
- {
- metric: {
- __name__: 'up',
- job: 'prometheus',
- instance: 'localhost:9090',
- },
- values: [[1435781430.781, '1'], [1435781445.781, '1'], [1435781460.781, '1']],
- },
- {
- metric: {
- __name__: 'up',
- job: 'node',
- instance: 'localhost:9091',
- },
- values: [[1435781430.781, '0'], [1435781445.781, '0'], [1435781460.781, '1']],
- },
- ],
- };
const dashboard = metricsDashboardPayload;
const getMetric = () => stateCopy.dashboard.panelGroups[1].panels[0].metrics[0];
@@ -307,6 +286,8 @@ describe('Monitoring mutations', () => {
});
it('adds results to the store', () => {
+ const data = prometheusMatrixMultiResult();
+
expect(getMetric().result).toBe(null);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](stateCopy, {
@@ -488,4 +469,128 @@ describe('Monitoring mutations', () => {
});
});
});
+
+ describe('REQUEST_PANEL_PREVIEW', () => {
+ it('saves yml content and resets other preview data', () => {
+ const mockYmlContent = 'mock yml content';
+ mutations[types.REQUEST_PANEL_PREVIEW](stateCopy, mockYmlContent);
+
+ expect(stateCopy.panelPreviewIsLoading).toBe(true);
+ expect(stateCopy.panelPreviewYml).toBe(mockYmlContent);
+ expect(stateCopy.panelPreviewGraphData).toBe(null);
+ expect(stateCopy.panelPreviewError).toBe(null);
+ });
+ });
+
+ describe('RECEIVE_PANEL_PREVIEW_SUCCESS', () => {
+ it('saves graph data', () => {
+ mutations[types.RECEIVE_PANEL_PREVIEW_SUCCESS](stateCopy, {
+ title: 'My Title',
+ type: 'area-chart',
+ });
+
+ expect(stateCopy.panelPreviewIsLoading).toBe(false);
+ expect(stateCopy.panelPreviewGraphData).toMatchObject({
+ title: 'My Title',
+ type: 'area-chart',
+ });
+ expect(stateCopy.panelPreviewError).toBe(null);
+ });
+ });
+
+ describe('RECEIVE_PANEL_PREVIEW_FAILURE', () => {
+ it('saves graph data', () => {
+ mutations[types.RECEIVE_PANEL_PREVIEW_FAILURE](stateCopy, 'Error!');
+
+ expect(stateCopy.panelPreviewIsLoading).toBe(false);
+ expect(stateCopy.panelPreviewGraphData).toBe(null);
+ expect(stateCopy.panelPreviewError).toBe('Error!');
+ });
+ });
+
+ describe('panel preview metric', () => {
+ const getPreviewMetricAt = i => stateCopy.panelPreviewGraphData.metrics[i];
+
+ beforeEach(() => {
+ stateCopy.panelPreviewGraphData = {
+ title: 'Preview panel title',
+ metrics: [
+ {
+ query: 'query',
+ },
+ ],
+ };
+ });
+
+ describe('REQUEST_PANEL_PREVIEW_METRIC_RESULT', () => {
+ it('sets the metric to loading for the first time', () => {
+ mutations[types.REQUEST_PANEL_PREVIEW_METRIC_RESULT](stateCopy, { index: 0 });
+
+ expect(getPreviewMetricAt(0).loading).toBe(true);
+ expect(getPreviewMetricAt(0).state).toBe(metricStates.LOADING);
+ });
+
+ it('sets the metric to loading and keeps the result', () => {
+ getPreviewMetricAt(0).result = [[0, 1]];
+ getPreviewMetricAt(0).state = metricStates.OK;
+
+ mutations[types.REQUEST_PANEL_PREVIEW_METRIC_RESULT](stateCopy, { index: 0 });
+
+ expect(getPreviewMetricAt(0)).toMatchObject({
+ loading: true,
+ result: [[0, 1]],
+ state: metricStates.OK,
+ });
+ });
+ });
+
+ describe('RECEIVE_PANEL_PREVIEW_METRIC_RESULT_SUCCESS', () => {
+ it('saves the result in the metric', () => {
+ const data = prometheusMatrixMultiResult();
+
+ mutations[types.RECEIVE_PANEL_PREVIEW_METRIC_RESULT_SUCCESS](stateCopy, {
+ index: 0,
+ data,
+ });
+
+ expect(getPreviewMetricAt(0)).toMatchObject({
+ loading: false,
+ state: metricStates.OK,
+ result: expect.any(Array),
+ });
+ expect(getPreviewMetricAt(0).result).toHaveLength(data.result.length);
+ });
+ });
+
+ describe('RECEIVE_PANEL_PREVIEW_METRIC_RESULT_FAILURE', () => {
+ it('stores an error in the metric', () => {
+ mutations[types.RECEIVE_PANEL_PREVIEW_METRIC_RESULT_FAILURE](stateCopy, {
+ index: 0,
+ });
+
+ expect(getPreviewMetricAt(0).loading).toBe(false);
+ expect(getPreviewMetricAt(0).state).toBe(metricStates.UNKNOWN_ERROR);
+ expect(getPreviewMetricAt(0).result).toBe(null);
+
+ expect(getPreviewMetricAt(0)).toMatchObject({
+ loading: false,
+ result: null,
+ state: metricStates.UNKNOWN_ERROR,
+ });
+ });
+
+ it('stores a timeout error in a metric', () => {
+ mutations[types.RECEIVE_PANEL_PREVIEW_METRIC_RESULT_FAILURE](stateCopy, {
+ index: 0,
+ error: { message: 'BACKOFF_TIMEOUT' },
+ });
+
+ expect(getPreviewMetricAt(0)).toMatchObject({
+ loading: false,
+ result: null,
+ state: metricStates.TIMEOUT,
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/monitoring/utils_spec.js b/spec/frontend/monitoring/utils_spec.js
index 35ca6ba9b52..fd7d09f7f72 100644
--- a/spec/frontend/monitoring/utils_spec.js
+++ b/spec/frontend/monitoring/utils_spec.js
@@ -1,6 +1,6 @@
+import { TEST_HOST } from 'jest/helpers/test_constants';
import * as monitoringUtils from '~/monitoring/utils';
import * as urlUtils from '~/lib/utils/url_utility';
-import { TEST_HOST } from 'jest/helpers/test_constants';
import { mockProjectDir, barMockData } from './mock_data';
import { singleStatGraphData, anomalyGraphData } from './graph_data';
import { metricsDashboardViewModel, graphData } from './fixture_data';
diff --git a/spec/frontend/notebook/cells/output/html_sanitize_fixtures.js b/spec/frontend/notebook/cells/output/html_sanitize_fixtures.js
new file mode 100644
index 00000000000..a886715ce4b
--- /dev/null
+++ b/spec/frontend/notebook/cells/output/html_sanitize_fixtures.js
@@ -0,0 +1,114 @@
+export default [
+ [
+ 'protocol-based JS injection: simple, no spaces',
+ {
+ input: `<a href="javascript:alert('XSS');">foo</a>`,
+ output: '<a>foo</a>',
+ },
+ ],
+ [
+ 'protocol-based JS injection: simple, spaces before',
+ {
+ input: `<a href="javascript :alert('XSS');">foo</a>`,
+ output: '<a>foo</a>',
+ },
+ ],
+ [
+ 'protocol-based JS injection: simple, spaces after',
+ {
+ input: `<a href="javascript: alert('XSS');">foo</a>`,
+ output: '<a>foo</a>',
+ },
+ ],
+ [
+ 'protocol-based JS injection: simple, spaces before and after',
+ {
+ input: `<a href="javascript : alert('XSS');">foo</a>`,
+ output: '<a>foo</a>',
+ },
+ ],
+ [
+ 'protocol-based JS injection: preceding colon',
+ {
+ input: `<a href=":javascript:alert('XSS');">foo</a>`,
+ output: '<a>foo</a>',
+ },
+ ],
+ [
+ 'protocol-based JS injection: UTF-8 encoding',
+ {
+ input: '<a href="javascript&#58;">foo</a>',
+ output: '<a>foo</a>',
+ },
+ ],
+ [
+ 'protocol-based JS injection: long UTF-8 encoding',
+ {
+ input: '<a href="javascript&#0058;">foo</a>',
+ output: '<a>foo</a>',
+ },
+ ],
+ [
+ 'protocol-based JS injection: long UTF-8 encoding without semicolons',
+ {
+ input:
+ '<a href=&#0000106&#0000097&#0000118&#0000097&#0000115&#0000099&#0000114&#0000105&#0000112&#0000116&#0000058&#0000097&#0000108&#0000101&#0000114&#0000116&#0000040&#0000039&#0000088&#0000083&#0000083&#0000039&#0000041>foo</a>',
+ output: '<a>foo</a>',
+ },
+ ],
+ [
+ 'protocol-based JS injection: hex encoding',
+ {
+ input: '<a href="javascript&#x3A;">foo</a>',
+ output: '<a>foo</a>',
+ },
+ ],
+ [
+ 'protocol-based JS injection: long hex encoding',
+ {
+ input: '<a href="javascript&#x003A;">foo</a>',
+ output: '<a>foo</a>',
+ },
+ ],
+ [
+ 'protocol-based JS injection: hex encoding without semicolons',
+ {
+ input:
+ '<a href=&#x6A&#x61&#x76&#x61&#x73&#x63&#x72&#x69&#x70&#x74&#x3A&#x61&#x6C&#x65&#x72&#x74&#x28&#x27&#x58&#x53&#x53&#x27&#x29>foo</a>',
+ output: '<a>foo</a>',
+ },
+ ],
+ [
+ 'protocol-based JS injection: null char',
+ {
+ input: '<a href=java\u0000script:alert("XSS")>foo</a>',
+ output: '<a>foo</a>',
+ },
+ ],
+ [
+ 'protocol-based JS injection: invalid URL char',
+ { input: '<img src=javascript:alert("XSS")>', output: '<img>' },
+ ],
+ [
+ 'protocol-based JS injection: Unicode',
+ {
+ input: `<a href="\u0001java\u0003script:alert('XSS')">foo</a>`,
+ output: '<a>foo</a>',
+ },
+ ],
+ [
+ 'protocol-based JS injection: spaces and entities',
+ {
+ input: `<a href=" &#14; javascript:alert('XSS');">foo</a>`,
+ output: '<a>foo</a>',
+ },
+ ],
+ [
+ 'img on error',
+ {
+ input: '<img src="x" onerror="alert(document.domain)" />',
+ output: '<img src="x">',
+ },
+ ],
+ ['style tags are removed', { input: '<style>.foo {}</style> Foo', output: 'Foo' }],
+];
diff --git a/spec/frontend/notebook/cells/output/html_sanitize_tests.js b/spec/frontend/notebook/cells/output/html_sanitize_tests.js
deleted file mode 100644
index 74c48f04367..00000000000
--- a/spec/frontend/notebook/cells/output/html_sanitize_tests.js
+++ /dev/null
@@ -1,68 +0,0 @@
-export default {
- 'protocol-based JS injection: simple, no spaces': {
- input: '<a href="javascript:alert(\'XSS\');">foo</a>',
- output: '<a>foo</a>',
- },
- 'protocol-based JS injection: simple, spaces before': {
- input: '<a href="javascript :alert(\'XSS\');">foo</a>',
- output: '<a>foo</a>',
- },
- 'protocol-based JS injection: simple, spaces after': {
- input: '<a href="javascript: alert(\'XSS\');">foo</a>',
- output: '<a>foo</a>',
- },
- 'protocol-based JS injection: simple, spaces before and after': {
- input: '<a href="javascript : alert(\'XSS\');">foo</a>',
- output: '<a>foo</a>',
- },
- 'protocol-based JS injection: preceding colon': {
- input: '<a href=":javascript:alert(\'XSS\');">foo</a>',
- output: '<a>foo</a>',
- },
- 'protocol-based JS injection: UTF-8 encoding': {
- input: '<a href="javascript&#58;">foo</a>',
- output: '<a>foo</a>',
- },
- 'protocol-based JS injection: long UTF-8 encoding': {
- input: '<a href="javascript&#0058;">foo</a>',
- output: '<a>foo</a>',
- },
- 'protocol-based JS injection: long UTF-8 encoding without semicolons': {
- input:
- '<a href=&#0000106&#0000097&#0000118&#0000097&#0000115&#0000099&#0000114&#0000105&#0000112&#0000116&#0000058&#0000097&#0000108&#0000101&#0000114&#0000116&#0000040&#0000039&#0000088&#0000083&#0000083&#0000039&#0000041>foo</a>',
- output: '<a>foo</a>',
- },
- 'protocol-based JS injection: hex encoding': {
- input: '<a href="javascript&#x3A;">foo</a>',
- output: '<a>foo</a>',
- },
- 'protocol-based JS injection: long hex encoding': {
- input: '<a href="javascript&#x003A;">foo</a>',
- output: '<a>foo</a>',
- },
- 'protocol-based JS injection: hex encoding without semicolons': {
- input:
- '<a href=&#x6A&#x61&#x76&#x61&#x73&#x63&#x72&#x69&#x70&#x74&#x3A&#x61&#x6C&#x65&#x72&#x74&#x28&#x27&#x58&#x53&#x53&#x27&#x29>foo</a>',
- output: '<a>foo</a>',
- },
- 'protocol-based JS injection: null char': {
- input: '<a href=java\0script:alert("XSS")>foo</a>',
- output: '<a>foo</a>',
- },
- 'protocol-based JS injection: invalid URL char': {
- input: '<img src=javascript:alert("XSS")>',
- output: '<img>',
- },
- 'protocol-based JS injection: Unicode': {
- input: '<a href="\u0001java\u0003script:alert(\'XSS\')">foo</a>',
- output: '<a>foo</a>',
- },
- 'protocol-based JS injection: spaces and entities': {
- input: '<a href=" &#14; javascript:alert(\'XSS\');">foo</a>',
- output: '<a>foo</a>',
- },
- 'img on error': {
- input: '<img src="x" onerror="alert(document.domain)" />',
- output: '<img src="x">',
- },
-};
diff --git a/spec/frontend/notebook/cells/output/html_spec.js b/spec/frontend/notebook/cells/output/html_spec.js
index 3ee404fb187..48d62d74a50 100644
--- a/spec/frontend/notebook/cells/output/html_spec.js
+++ b/spec/frontend/notebook/cells/output/html_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
import htmlOutput from '~/notebook/cells/output/html.vue';
-import sanitizeTests from './html_sanitize_tests';
+import sanitizeTests from './html_sanitize_fixtures';
describe('html output cell', () => {
function createComponent(rawCode) {
@@ -15,17 +15,12 @@ describe('html output cell', () => {
}).$mount();
}
- describe('sanitizes output', () => {
- Object.keys(sanitizeTests).forEach(key => {
- it(key, () => {
- const test = sanitizeTests[key];
- const vm = createComponent(test.input);
- const outputEl = [...vm.$el.querySelectorAll('div')].pop();
+ it.each(sanitizeTests)('sanitizes output for: %p', (name, { input, output }) => {
+ const vm = createComponent(input);
+ const outputEl = [...vm.$el.querySelectorAll('div')].pop();
- expect(outputEl.innerHTML).toEqual(test.output);
+ expect(outputEl.innerHTML).toEqual(output);
- vm.$destroy();
- });
- });
+ vm.$destroy();
});
});
diff --git a/spec/frontend/notebook/cells/output/index_spec.js b/spec/frontend/notebook/cells/output/index_spec.js
index 2b1aa5317c5..b9a2dfb8f34 100644
--- a/spec/frontend/notebook/cells/output/index_spec.js
+++ b/spec/frontend/notebook/cells/output/index_spec.js
@@ -34,7 +34,7 @@ describe('Output component', () => {
expect(vm.$el.querySelector('pre')).not.toBeNull();
});
- it('renders promot', () => {
+ it('renders prompt', () => {
expect(vm.$el.querySelector('.prompt span')).not.toBeNull();
});
});
diff --git a/spec/frontend/notes/components/discussion_actions_spec.js b/spec/frontend/notes/components/discussion_actions_spec.js
index 44dc148933c..3e1e43d0c6a 100644
--- a/spec/frontend/notes/components/discussion_actions_spec.js
+++ b/spec/frontend/notes/components/discussion_actions_spec.js
@@ -21,7 +21,7 @@ const createUnallowedNote = () =>
describe('DiscussionActions', () => {
let wrapper;
- const createComponentFactory = (shallow = true) => props => {
+ const createComponentFactory = (shallow = true) => (props, options) => {
const store = createStore();
const mountFn = shallow ? shallowMount : mount;
@@ -35,6 +35,11 @@ describe('DiscussionActions', () => {
shouldShowJumpToNextDiscussion: true,
...props,
},
+ provide: {
+ glFeatures: {
+ hideJumpToNextUnresolvedInThreads: options?.hideJumpToNextUnresolvedInThreads,
+ },
+ },
});
};
@@ -96,6 +101,13 @@ describe('DiscussionActions', () => {
});
});
+ it('does not render jump to next discussion button if feature flag is enabled', () => {
+ const createComponent = createComponentFactory();
+ createComponent({}, { hideJumpToNextUnresolvedInThreads: true });
+
+ expect(wrapper.find(JumpToNextDiscussionButton).exists()).toBe(false);
+ });
+
describe('events handling', () => {
const createComponent = createComponentFactory(false);
diff --git a/spec/frontend/notes/components/discussion_filter_spec.js b/spec/frontend/notes/components/discussion_filter_spec.js
index 7f042c0e9de..9a7896475e6 100644
--- a/spec/frontend/notes/components/discussion_filter_spec.js
+++ b/spec/frontend/notes/components/discussion_filter_spec.js
@@ -1,8 +1,8 @@
-import createEventHub from '~/helpers/event_hub_factory';
import Vuex from 'vuex';
-
import { createLocalVue, mount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'jest/helpers/test_constants';
+import createEventHub from '~/helpers/event_hub_factory';
import axios from '~/lib/utils/axios_utils';
import notesModule from '~/notes/stores/modules';
@@ -10,7 +10,6 @@ import DiscussionFilter from '~/notes/components/discussion_filter.vue';
import { DISCUSSION_FILTERS_DEFAULT_VALUE, DISCUSSION_FILTER_TYPES } from '~/notes/constants';
import { discussionFiltersMock, discussionMock } from '../mock_data';
-import { TEST_HOST } from 'jest/helpers/test_constants';
const localVue = createLocalVue();
diff --git a/spec/frontend/notes/components/discussion_keyboard_navigator_spec.js b/spec/frontend/notes/components/discussion_navigator_spec.js
index e932133b869..122814b8e3f 100644
--- a/spec/frontend/notes/components/discussion_keyboard_navigator_spec.js
+++ b/spec/frontend/notes/components/discussion_navigator_spec.js
@@ -1,9 +1,11 @@
/* global Mousetrap */
import 'mousetrap';
+import Vue from 'vue';
import { shallowMount, createLocalVue } from '@vue/test-utils';
-import DiscussionKeyboardNavigator from '~/notes/components/discussion_keyboard_navigator.vue';
+import DiscussionNavigator from '~/notes/components/discussion_navigator.vue';
+import eventHub from '~/notes/event_hub';
-describe('notes/components/discussion_keyboard_navigator', () => {
+describe('notes/components/discussion_navigator', () => {
const localVue = createLocalVue();
let wrapper;
@@ -11,7 +13,7 @@ describe('notes/components/discussion_keyboard_navigator', () => {
let jumpToPreviousDiscussion;
const createComponent = () => {
- wrapper = shallowMount(DiscussionKeyboardNavigator, {
+ wrapper = shallowMount(DiscussionNavigator, {
mixins: [
localVue.extend({
methods: {
@@ -29,10 +31,29 @@ describe('notes/components/discussion_keyboard_navigator', () => {
});
afterEach(() => {
- wrapper.destroy();
+ if (wrapper) {
+ wrapper.destroy();
+ }
wrapper = null;
});
+ describe('on create', () => {
+ let onSpy;
+ let vm;
+
+ beforeEach(() => {
+ onSpy = jest.spyOn(eventHub, '$on');
+ vm = new (Vue.extend(DiscussionNavigator))();
+ });
+
+ it('listens for jumpToFirstUnresolvedDiscussion events', () => {
+ expect(onSpy).toHaveBeenCalledWith(
+ 'jumpToFirstUnresolvedDiscussion',
+ vm.jumpToFirstUnresolvedDiscussion,
+ );
+ });
+ });
+
describe('on mount', () => {
beforeEach(() => {
createComponent();
@@ -52,11 +73,16 @@ describe('notes/components/discussion_keyboard_navigator', () => {
});
describe('on destroy', () => {
+ let jumpFn;
+
beforeEach(() => {
jest.spyOn(Mousetrap, 'unbind');
+ jest.spyOn(eventHub, '$off');
createComponent();
+ jumpFn = wrapper.vm.jumpToFirstUnresolvedDiscussion;
+
wrapper.destroy();
});
@@ -65,6 +91,10 @@ describe('notes/components/discussion_keyboard_navigator', () => {
expect(Mousetrap.unbind).toHaveBeenCalledWith('p');
});
+ it('unbinds event hub listeners', () => {
+ expect(eventHub.$off).toHaveBeenCalledWith('jumpToFirstUnresolvedDiscussion', jumpFn);
+ });
+
it('does not call jumpToNextDiscussion when pressing `n`', () => {
Mousetrap.trigger('n');
diff --git a/spec/frontend/notes/components/discussion_notes_spec.js b/spec/frontend/notes/components/discussion_notes_spec.js
index 5a10deefd09..8cc98f978c2 100644
--- a/spec/frontend/notes/components/discussion_notes_spec.js
+++ b/spec/frontend/notes/components/discussion_notes_spec.js
@@ -1,4 +1,5 @@
import { shallowMount } from '@vue/test-utils';
+import { getByRole } from '@testing-library/dom';
import '~/behaviors/markdown/render_gfm';
import { SYSTEM_NOTE } from '~/notes/constants';
import DiscussionNotes from '~/notes/components/discussion_notes.vue';
@@ -9,14 +10,20 @@ import SystemNote from '~/vue_shared/components/notes/system_note.vue';
import createStore from '~/notes/stores';
import { noteableDataMock, discussionMock, notesDataMock } from '../mock_data';
+const LINE_RANGE = {};
+const DISCUSSION_WITH_LINE_RANGE = {
+ ...discussionMock,
+ position: {
+ line_range: LINE_RANGE,
+ },
+};
+
describe('DiscussionNotes', () => {
+ let store;
let wrapper;
- const createComponent = props => {
- const store = createStore();
- store.dispatch('setNoteableData', noteableDataMock);
- store.dispatch('setNotesData', notesDataMock);
-
+ const getList = () => getByRole(wrapper.element, 'list');
+ const createComponent = (props, features = {}) => {
wrapper = shallowMount(DiscussionNotes, {
store,
propsData: {
@@ -31,11 +38,21 @@ describe('DiscussionNotes', () => {
slots: {
'avatar-badge': '<span class="avatar-badge-slot-content" />',
},
+ provide: {
+ glFeatures: { multilineComments: true, ...features },
+ },
});
};
+ beforeEach(() => {
+ store = createStore();
+ store.dispatch('setNoteableData', noteableDataMock);
+ store.dispatch('setNotesData', notesDataMock);
+ });
+
afterEach(() => {
wrapper.destroy();
+ wrapper = null;
});
describe('rendering', () => {
@@ -160,6 +177,26 @@ describe('DiscussionNotes', () => {
});
});
+ describe.each`
+ desc | props | features | event | expectedCalls
+ ${'with `discussion.position`'} | ${{ discussion: DISCUSSION_WITH_LINE_RANGE }} | ${{}} | ${'mouseenter'} | ${[['setSelectedCommentPositionHover', LINE_RANGE]]}
+ ${'with `discussion.position`'} | ${{ discussion: DISCUSSION_WITH_LINE_RANGE }} | ${{}} | ${'mouseleave'} | ${[['setSelectedCommentPositionHover']]}
+ ${'with `discussion.position`'} | ${{ discussion: DISCUSSION_WITH_LINE_RANGE }} | ${{ multilineComments: false }} | ${'mouseenter'} | ${[]}
+ ${'with `discussion.position`'} | ${{ discussion: DISCUSSION_WITH_LINE_RANGE }} | ${{ multilineComments: false }} | ${'mouseleave'} | ${[]}
+ ${'without `discussion.position`'} | ${{}} | ${{}} | ${'mouseenter'} | ${[]}
+ ${'without `discussion.position`'} | ${{}} | ${{}} | ${'mouseleave'} | ${[]}
+ `('$desc and features $features', ({ props, event, features, expectedCalls }) => {
+ beforeEach(() => {
+ createComponent(props, features);
+ jest.spyOn(store, 'dispatch');
+ });
+
+ it(`calls store ${expectedCalls.length} times on ${event}`, () => {
+ getList().dispatchEvent(new MouseEvent(event));
+ expect(store.dispatch.mock.calls).toEqual(expectedCalls);
+ });
+ });
+
describe('componentData', () => {
beforeEach(() => {
createComponent();
diff --git a/spec/frontend/notes/components/discussion_resolve_with_issue_button_spec.js b/spec/frontend/notes/components/discussion_resolve_with_issue_button_spec.js
index e62fb5db2c0..4348445f7ca 100644
--- a/spec/frontend/notes/components/discussion_resolve_with_issue_button_spec.js
+++ b/spec/frontend/notes/components/discussion_resolve_with_issue_button_spec.js
@@ -1,4 +1,4 @@
-import { GlDeprecatedButton } from '@gitlab/ui';
+import { GlButton } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import { TEST_HOST } from 'spec/test_constants';
import ResolveWithIssueButton from '~/notes/components/discussion_resolve_with_issue_button.vue';
@@ -23,7 +23,7 @@ describe('ResolveWithIssueButton', () => {
});
it('it should have a link with the provided link property as href', () => {
- const button = wrapper.find(GlDeprecatedButton);
+ const button = wrapper.find(GlButton);
expect(button.attributes().href).toBe(url);
});
diff --git a/spec/frontend/notes/components/note_actions_spec.js b/spec/frontend/notes/components/note_actions_spec.js
index 5cc56cdefae..97d1752726b 100644
--- a/spec/frontend/notes/components/note_actions_spec.js
+++ b/spec/frontend/notes/components/note_actions_spec.js
@@ -1,10 +1,10 @@
import Vue from 'vue';
import { shallowMount, createLocalVue, createWrapper } from '@vue/test-utils';
import { TEST_HOST } from 'spec/test_constants';
+import AxiosMockAdapter from 'axios-mock-adapter';
import createStore from '~/notes/stores';
import noteActions from '~/notes/components/note_actions.vue';
import { userDataMock } from '../mock_data';
-import AxiosMockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
describe('noteActions', () => {
diff --git a/spec/frontend/notes/components/note_awards_list_spec.js b/spec/frontend/notes/components/note_awards_list_spec.js
index 822b1f9efce..dce5424f154 100644
--- a/spec/frontend/notes/components/note_awards_list_spec.js
+++ b/spec/frontend/notes/components/note_awards_list_spec.js
@@ -1,10 +1,10 @@
import Vue from 'vue';
import AxiosMockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'jest/helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
import createStore from '~/notes/stores';
import awardsNote from '~/notes/components/note_awards_list.vue';
import { noteableDataMock, notesDataMock } from '../mock_data';
-import { TEST_HOST } from 'jest/helpers/test_constants';
describe('note_awards_list component', () => {
let store;
diff --git a/spec/frontend/notes/components/noteable_discussion_spec.js b/spec/frontend/notes/components/noteable_discussion_spec.js
index b14ec2a65be..1c6603899d3 100644
--- a/spec/frontend/notes/components/noteable_discussion_spec.js
+++ b/spec/frontend/notes/components/noteable_discussion_spec.js
@@ -1,4 +1,6 @@
import { mount, createLocalVue } from '@vue/test-utils';
+import mockDiffFile from 'jest/diffs/mock_data/diff_file';
+import { trimText } from 'helpers/text_helper';
import createStore from '~/notes/stores';
import noteableDiscussion from '~/notes/components/noteable_discussion.vue';
import ReplyPlaceholder from '~/notes/components/discussion_reply_placeholder.vue';
@@ -12,8 +14,6 @@ import {
loggedOutnoteableData,
userDataMock,
} from '../mock_data';
-import mockDiffFile from 'jest/diffs/mock_data/diff_file';
-import { trimText } from 'helpers/text_helper';
const discussionWithTwoUnresolvedNotes = 'merge_requests/resolved_diff_discussion.json';
@@ -89,6 +89,23 @@ describe('noteable_discussion component', () => {
});
});
+ it('should expand discussion', async () => {
+ const expandDiscussion = jest.fn();
+ const discussion = { ...discussionMock };
+ discussion.expanded = false;
+
+ wrapper.setProps({ discussion });
+ wrapper.setMethods({ expandDiscussion });
+
+ await wrapper.vm.$nextTick();
+
+ wrapper.vm.showReplyForm();
+
+ await wrapper.vm.$nextTick();
+
+ expect(expandDiscussion).toHaveBeenCalledWith({ discussionId: discussion.id });
+ });
+
it('does not render jump to thread button', () => {
expect(wrapper.find('*[data-original-title="Jump to next unresolved thread"]').exists()).toBe(
false,
diff --git a/spec/frontend/notes/components/noteable_note_spec.js b/spec/frontend/notes/components/noteable_note_spec.js
index fc238feb974..a08e86d92d3 100644
--- a/spec/frontend/notes/components/noteable_note_spec.js
+++ b/spec/frontend/notes/components/noteable_note_spec.js
@@ -83,18 +83,34 @@ describe('issue_note', () => {
});
});
- it('should render multiline comment if editing discussion root', () => {
- wrapper.setProps({ discussionRoot: true });
- wrapper.vm.isEditing = true;
-
- return wrapper.vm.$nextTick().then(() => {
- expect(findMultilineComment().exists()).toBe(true);
+ it('should only render if it has everything it needs', () => {
+ const position = {
+ line_range: {
+ start: {
+ line_code: 'abc_1_1',
+ type: null,
+ old_line: '',
+ new_line: '',
+ },
+ end: {
+ line_code: 'abc_2_2',
+ type: null,
+ old_line: '2',
+ new_line: '2',
+ },
+ },
+ };
+ const line = {
+ line_code: 'abc_1_1',
+ type: null,
+ old_line: '1',
+ new_line: '1',
+ };
+ wrapper.setProps({
+ note: { ...note, position },
+ discussionRoot: true,
+ line,
});
- });
-
- it('should not render multiline comment form unless it is the discussion root', () => {
- wrapper.setProps({ discussionRoot: false });
- wrapper.vm.isEditing = true;
return wrapper.vm.$nextTick().then(() => {
expect(findMultilineComment().exists()).toBe(false);
diff --git a/spec/frontend/notes/mixins/discussion_navigation_spec.js b/spec/frontend/notes/mixins/discussion_navigation_spec.js
index ecff95b6fe0..11c0bbfefc9 100644
--- a/spec/frontend/notes/mixins/discussion_navigation_spec.js
+++ b/spec/frontend/notes/mixins/discussion_navigation_spec.js
@@ -1,11 +1,11 @@
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { setHTMLFixture } from 'helpers/fixtures';
import * as utils from '~/lib/utils/common_utils';
import discussionNavigation from '~/notes/mixins/discussion_navigation';
import eventHub from '~/notes/event_hub';
import createEventHub from '~/helpers/event_hub_factory';
import notesModule from '~/notes/stores/modules';
-import { setHTMLFixture } from 'helpers/fixtures';
const discussion = (id, index) => ({
id,
@@ -66,6 +66,35 @@ describe('Discussion navigation mixin', () => {
const findDiscussion = (selector, id) =>
document.querySelector(`${selector}[data-discussion-id="${id}"]`);
+ describe('jumpToFirstUnresolvedDiscussion method', () => {
+ let vm;
+
+ beforeEach(() => {
+ createComponent();
+
+ ({ vm } = wrapper);
+
+ jest.spyOn(store, 'dispatch');
+ jest.spyOn(vm, 'jumpToNextDiscussion');
+ });
+
+ it('triggers the setCurrentDiscussionId action with null as the value', () => {
+ vm.jumpToFirstUnresolvedDiscussion();
+
+ expect(store.dispatch).toHaveBeenCalledWith('setCurrentDiscussionId', null);
+ });
+
+ it('triggers the jumpToNextDiscussion action when the previous store action succeeds', () => {
+ store.dispatch.mockResolvedValue();
+
+ vm.jumpToFirstUnresolvedDiscussion();
+
+ return vm.$nextTick().then(() => {
+ expect(vm.jumpToNextDiscussion).toHaveBeenCalled();
+ });
+ });
+ });
+
describe('cycle through discussions', () => {
beforeEach(() => {
window.mrTabs = { eventHub: createEventHub(), tabShown: jest.fn() };
diff --git a/spec/frontend/notes/stores/actions_spec.js b/spec/frontend/notes/stores/actions_spec.js
index 909a4a797ae..6b8d0790669 100644
--- a/spec/frontend/notes/stores/actions_spec.js
+++ b/spec/frontend/notes/stores/actions_spec.js
@@ -1,7 +1,7 @@
import { TEST_HOST } from 'spec/test_constants';
import AxiosMockAdapter from 'axios-mock-adapter';
import Api from '~/api';
-import Flash from '~/flash';
+import { deprecatedCreateFlash as Flash } from '~/flash';
import * as actions from '~/notes/stores/actions';
import * as mutationTypes from '~/notes/stores/mutation_types';
import * as notesConstants from '~/notes/constants';
@@ -19,7 +19,9 @@ import {
} from '../mock_data';
import axios from '~/lib/utils/axios_utils';
import * as utils from '~/notes/stores/utils';
-import updateIssueConfidentialMutation from '~/sidebar/components/confidential/queries/update_issue_confidential.mutation.graphql';
+import updateIssueConfidentialMutation from '~/sidebar/components/confidential/mutations/update_issue_confidential.mutation.graphql';
+import updateMergeRequestLockMutation from '~/sidebar/components/lock/mutations/update_merge_request_lock.mutation.graphql';
+import updateIssueLockMutation from '~/sidebar/components/lock/mutations/update_issue_lock.mutation.graphql';
const TEST_ERROR_MESSAGE = 'Test error message';
jest.mock('~/flash');
@@ -1219,7 +1221,7 @@ describe('Actions Notes Store', () => {
});
});
- describe('updateConfidentialityOnIssue', () => {
+ describe('updateConfidentialityOnIssuable', () => {
state = { noteableData: { confidential: false } };
const iid = '1';
const projectPath = 'full/path';
@@ -1234,13 +1236,13 @@ describe('Actions Notes Store', () => {
});
it('calls gqClient mutation one time', () => {
- actions.updateConfidentialityOnIssue({ commit: () => {}, state, getters }, actionArgs);
+ actions.updateConfidentialityOnIssuable({ commit: () => {}, state, getters }, actionArgs);
expect(utils.gqClient.mutate).toHaveBeenCalledTimes(1);
});
it('calls gqClient mutation with the correct values', () => {
- actions.updateConfidentialityOnIssue({ commit: () => {}, state, getters }, actionArgs);
+ actions.updateConfidentialityOnIssuable({ commit: () => {}, state, getters }, actionArgs);
expect(utils.gqClient.mutate).toHaveBeenCalledWith({
mutation: updateIssueConfidentialMutation,
@@ -1253,7 +1255,7 @@ describe('Actions Notes Store', () => {
const commitSpy = jest.fn();
return actions
- .updateConfidentialityOnIssue({ commit: commitSpy, state, getters }, actionArgs)
+ .updateConfidentialityOnIssuable({ commit: commitSpy, state, getters }, actionArgs)
.then(() => {
expect(commitSpy).toHaveBeenCalledWith(
mutationTypes.SET_ISSUE_CONFIDENTIAL,
@@ -1263,4 +1265,75 @@ describe('Actions Notes Store', () => {
});
});
});
+
+ describe.each`
+ issuableType
+ ${'issue'} | ${'merge_request'}
+ `('updateLockedAttribute for issuableType=$issuableType', ({ issuableType }) => {
+ // Payload for mutation query
+ state = { noteableData: { discussion_locked: false } };
+ const targetType = issuableType;
+ const getters = { getNoteableData: { iid: '1', targetType } };
+
+ // Target state after mutation
+ const locked = true;
+ const actionArgs = { fullPath: 'full/path', locked };
+ const input = { iid: '1', projectPath: 'full/path', locked: true };
+
+ // Helper functions
+ const targetMutation = () => {
+ return targetType === 'issue' ? updateIssueLockMutation : updateMergeRequestLockMutation;
+ };
+
+ const mockResolvedValue = () => {
+ return targetType === 'issue'
+ ? { data: { issueSetLocked: { issue: { discussionLocked: locked } } } }
+ : { data: { mergeRequestSetLocked: { mergeRequest: { discussionLocked: locked } } } };
+ };
+
+ beforeEach(() => {
+ jest.spyOn(utils.gqClient, 'mutate').mockResolvedValue(mockResolvedValue());
+ });
+
+ it('calls gqClient mutation one time', () => {
+ actions.updateLockedAttribute({ commit: () => {}, state, getters }, actionArgs);
+
+ expect(utils.gqClient.mutate).toHaveBeenCalledTimes(1);
+ });
+
+ it('calls gqClient mutation with the correct values', () => {
+ actions.updateLockedAttribute({ commit: () => {}, state, getters }, actionArgs);
+
+ expect(utils.gqClient.mutate).toHaveBeenCalledWith({
+ mutation: targetMutation(),
+ variables: { input },
+ });
+ });
+
+ describe('on success of mutation', () => {
+ it('calls commit with the correct values', () => {
+ const commitSpy = jest.fn();
+
+ return actions
+ .updateLockedAttribute({ commit: commitSpy, state, getters }, actionArgs)
+ .then(() => {
+ expect(commitSpy).toHaveBeenCalledWith(mutationTypes.SET_ISSUABLE_LOCK, locked);
+ });
+ });
+ });
+ });
+
+ describe('updateDiscussionPosition', () => {
+ it('update the assignees state', done => {
+ const updatedPosition = { discussionId: 1, position: { test: true } };
+ testAction(
+ actions.updateDiscussionPosition,
+ updatedPosition,
+ { state: { discussions: [] } },
+ [{ type: mutationTypes.UPDATE_DISCUSSION_POSITION, payload: updatedPosition }],
+ [],
+ done,
+ );
+ });
+ });
});
diff --git a/spec/frontend/notes/stores/mutation_spec.js b/spec/frontend/notes/stores/mutation_spec.js
index 0ad18ba9b6a..b953bffc4fe 100644
--- a/spec/frontend/notes/stores/mutation_spec.js
+++ b/spec/frontend/notes/stores/mutation_spec.js
@@ -833,13 +833,27 @@ describe('Notes Store mutations', () => {
state = { noteableData: { confidential: false } };
});
- it('sets sort order', () => {
+ it('should set issuable as confidential', () => {
mutations.SET_ISSUE_CONFIDENTIAL(state, true);
expect(state.noteableData.confidential).toBe(true);
});
});
+ describe('SET_ISSUABLE_LOCK', () => {
+ let state;
+
+ beforeEach(() => {
+ state = { noteableData: { discussion_locked: false } };
+ });
+
+ it('should set issuable as locked', () => {
+ mutations.SET_ISSUABLE_LOCK(state, true);
+
+ expect(state.noteableData.discussion_locked).toBe(true);
+ });
+ });
+
describe('UPDATE_ASSIGNEES', () => {
it('should update assignees', () => {
const state = {
@@ -851,4 +865,20 @@ describe('Notes Store mutations', () => {
expect(state.noteableData.assignees).toEqual([userDataMock.id]);
});
});
+
+ describe('UPDATE_DISCUSSION_POSITION', () => {
+ it('should upate the discusion position', () => {
+ const discussion1 = { id: 1, position: { line_code: 'abc_1_1' } };
+ const discussion2 = { id: 2, position: { line_code: 'abc_2_2' } };
+ const discussion3 = { id: 3, position: { line_code: 'abc_3_3' } };
+ const state = {
+ discussions: [discussion1, discussion2, discussion3],
+ };
+ const discussion1Position = { ...discussion1.position };
+ const position = { ...discussion1Position, test: true };
+
+ mutations.UPDATE_DISCUSSION_POSITION(state, { discussionId: discussion1.id, position });
+ expect(state.discussions[0].position).toEqual(position);
+ });
+ });
});
diff --git a/spec/frontend/onboarding_issues/index_spec.js b/spec/frontend/onboarding_issues/index_spec.js
index b844caa07aa..d476ba1cf5a 100644
--- a/spec/frontend/onboarding_issues/index_spec.js
+++ b/spec/frontend/onboarding_issues/index_spec.js
@@ -1,7 +1,7 @@
import $ from 'jquery';
+import setWindowLocation from 'helpers/set_window_location_helper';
import { showLearnGitLabIssuesPopover } from '~/onboarding_issues';
import { getCookie, setCookie, removeCookie } from '~/lib/utils/common_utils';
-import setWindowLocation from 'helpers/set_window_location_helper';
import Tracking from '~/tracking';
describe('Onboarding Issues Popovers', () => {
diff --git a/spec/frontend/operation_settings/components/metrics_settings_spec.js b/spec/frontend/operation_settings/components/metrics_settings_spec.js
index 398b61ec693..c7ea23f9913 100644
--- a/spec/frontend/operation_settings/components/metrics_settings_spec.js
+++ b/spec/frontend/operation_settings/components/metrics_settings_spec.js
@@ -1,5 +1,5 @@
import { mount, shallowMount } from '@vue/test-utils';
-import { GlDeprecatedButton, GlLink, GlFormGroup, GlFormInput, GlFormSelect } from '@gitlab/ui';
+import { GlButton, GlLink, GlFormGroup, GlFormInput, GlFormSelect } from '@gitlab/ui';
import { TEST_HOST } from 'helpers/test_constants';
import MetricsSettings from '~/operation_settings/components/metrics_settings.vue';
@@ -9,7 +9,7 @@ import { timezones } from '~/monitoring/format_date';
import store from '~/operation_settings/store';
import axios from '~/lib/utils/axios_utils';
import { refreshCurrentPage } from '~/lib/utils/url_utility';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
jest.mock('~/lib/utils/url_utility');
jest.mock('~/flash');
@@ -56,12 +56,12 @@ describe('operation settings external dashboard component', () => {
it('renders header text', () => {
mountComponent();
- expect(wrapper.find('.js-section-header').text()).toBe('Metrics Dashboard');
+ expect(wrapper.find('.js-section-header').text()).toBe('Metrics dashboard');
});
describe('expand/collapse button', () => {
it('renders as an expand button by default', () => {
- const button = wrapper.find(GlDeprecatedButton);
+ const button = wrapper.find(GlButton);
expect(button.text()).toBe('Expand');
});
@@ -160,8 +160,7 @@ describe('operation settings external dashboard component', () => {
});
describe('submit button', () => {
- const findSubmitButton = () =>
- wrapper.find('.settings-content form').find(GlDeprecatedButton);
+ const findSubmitButton = () => wrapper.find('.settings-content form').find(GlButton);
const endpointRequest = [
operationsSettingsEndpoint,
diff --git a/spec/frontend/packages/details/components/__snapshots__/code_instruction_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/code_instruction_spec.js.snap
new file mode 100644
index 00000000000..172b8919673
--- /dev/null
+++ b/spec/frontend/packages/details/components/__snapshots__/code_instruction_spec.js.snap
@@ -0,0 +1,46 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Package code instruction multiline to match the snapshot 1`] = `
+<div>
+ <pre
+ class="js-instruction-pre"
+ >
+ this is some
+multiline text
+ </pre>
+</div>
+`;
+
+exports[`Package code instruction single line to match the default snapshot 1`] = `
+<div
+ class="input-group gl-mb-3"
+>
+ <input
+ class="form-control monospace js-instruction-input"
+ readonly="readonly"
+ type="text"
+ />
+
+ <span
+ class="input-group-append js-instruction-button"
+ >
+ <button
+ class="btn input-group-text btn-secondary btn-md btn-default"
+ data-clipboard-text="npm i @my-package"
+ title="Copy npm install command"
+ type="button"
+ >
+ <!---->
+
+ <svg
+ class="gl-icon s16"
+ data-testid="copy-to-clipboard-icon"
+ >
+ <use
+ href="#copy-to-clipboard"
+ />
+ </svg>
+ </button>
+ </span>
+</div>
+`;
diff --git a/spec/frontend/packages/details/components/__snapshots__/conan_installation_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/conan_installation_spec.js.snap
new file mode 100644
index 00000000000..852292e084b
--- /dev/null
+++ b/spec/frontend/packages/details/components/__snapshots__/conan_installation_spec.js.snap
@@ -0,0 +1,49 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`ConanInstallation renders all the messages 1`] = `
+<div>
+ <h3
+ class="gl-font-lg"
+ >
+ Installation
+ </h3>
+
+ <h4
+ class="gl-font-base"
+ >
+
+ Conan Command
+
+ </h4>
+
+ <code-instruction-stub
+ copytext="Copy Conan Command"
+ instruction="foo/command"
+ trackingaction="copy_conan_command"
+ />
+
+ <h3
+ class="gl-font-lg"
+ >
+ Registry setup
+ </h3>
+
+ <h4
+ class="gl-font-base"
+ >
+
+ Add Conan Remote
+
+ </h4>
+
+ <code-instruction-stub
+ copytext="Copy Conan Setup Command"
+ instruction="foo/setup"
+ trackingaction="copy_conan_setup_command"
+ />
+
+ <gl-sprintf-stub
+ message="For more information on the Conan registry, %{linkStart}see the documentation%{linkEnd}."
+ />
+</div>
+`;
diff --git a/spec/frontend/packages/details/components/__snapshots__/dependency_row_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/dependency_row_spec.js.snap
new file mode 100644
index 00000000000..28b7ca442eb
--- /dev/null
+++ b/spec/frontend/packages/details/components/__snapshots__/dependency_row_spec.js.snap
@@ -0,0 +1,34 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`DependencyRow renders full dependency 1`] = `
+<div
+ class="gl-responsive-table-row"
+>
+ <div
+ class="table-section section-50"
+ >
+ <strong
+ class="gl-text-body"
+ >
+ Test.Dependency
+ </strong>
+
+ <span
+ data-testid="target-framework"
+ >
+ (.NETStandard2.0)
+ </span>
+ </div>
+
+ <div
+ class="table-section section-50 gl-display-flex justify-content-md-end"
+ data-testid="version-pattern"
+ >
+ <span
+ class="gl-text-body"
+ >
+ 2.3.7
+ </span>
+ </div>
+</div>
+`;
diff --git a/spec/frontend/packages/details/components/__snapshots__/history_element_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/history_element_spec.js.snap
new file mode 100644
index 00000000000..a1751d69c70
--- /dev/null
+++ b/spec/frontend/packages/details/components/__snapshots__/history_element_spec.js.snap
@@ -0,0 +1,38 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`History Element renders the correct markup 1`] = `
+<li
+ class="timeline-entry system-note note-wrapper gl-mb-6!"
+>
+ <div
+ class="timeline-entry-inner"
+ >
+ <div
+ class="timeline-icon"
+ >
+ <gl-icon-stub
+ name="pencil"
+ size="16"
+ />
+ </div>
+
+ <div
+ class="timeline-content"
+ >
+ <div
+ class="note-header"
+ >
+ <span>
+ <div
+ data-testid="default-slot"
+ />
+ </span>
+ </div>
+
+ <div
+ class="note-body"
+ />
+ </div>
+ </div>
+</li>
+`;
diff --git a/spec/frontend/packages/details/components/__snapshots__/maven_installation_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/maven_installation_spec.js.snap
new file mode 100644
index 00000000000..10e54500797
--- /dev/null
+++ b/spec/frontend/packages/details/components/__snapshots__/maven_installation_spec.js.snap
@@ -0,0 +1,69 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`MavenInstallation renders all the messages 1`] = `
+<div>
+ <h3
+ class="gl-font-lg"
+ >
+ Installation
+ </h3>
+
+ <h4
+ class="gl-font-base"
+ >
+
+ Maven XML
+
+ </h4>
+
+ <p>
+ <gl-sprintf-stub
+ message="Copy and paste this inside your %{codeStart}pom.xml%{codeEnd} %{codeStart}dependencies%{codeEnd} block."
+ />
+ </p>
+
+ <code-instruction-stub
+ copytext="Copy Maven XML"
+ instruction="foo/xml"
+ multiline="true"
+ trackingaction="copy_maven_xml"
+ />
+
+ <h4
+ class="gl-font-base"
+ >
+
+ Maven Command
+
+ </h4>
+
+ <code-instruction-stub
+ copytext="Copy Maven command"
+ instruction="foo/command"
+ trackingaction="copy_maven_command"
+ />
+
+ <h3
+ class="gl-font-lg"
+ >
+ Registry setup
+ </h3>
+
+ <p>
+ <gl-sprintf-stub
+ message="If you haven't already done so, you will need to add the below to your %{codeStart}pom.xml%{codeEnd} file."
+ />
+ </p>
+
+ <code-instruction-stub
+ copytext="Copy Maven registry XML"
+ instruction="foo/setup"
+ multiline="true"
+ trackingaction="copy_maven_setup_xml"
+ />
+
+ <gl-sprintf-stub
+ message="For more information on the Maven registry, %{linkStart}see the documentation%{linkEnd}."
+ />
+</div>
+`;
diff --git a/spec/frontend/packages/details/components/__snapshots__/npm_installation_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/npm_installation_spec.js.snap
new file mode 100644
index 00000000000..58a509e6847
--- /dev/null
+++ b/spec/frontend/packages/details/components/__snapshots__/npm_installation_spec.js.snap
@@ -0,0 +1,69 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`NpmInstallation renders all the messages 1`] = `
+<div>
+ <h3
+ class="gl-font-lg"
+ >
+ Installation
+ </h3>
+
+ <h4
+ class="gl-font-base"
+ >
+ npm command
+ </h4>
+
+ <code-instruction-stub
+ copytext="Copy npm command"
+ instruction="npm i @Test/package"
+ trackingaction="copy_npm_install_command"
+ />
+
+ <h4
+ class="gl-font-base"
+ >
+ yarn command
+ </h4>
+
+ <code-instruction-stub
+ copytext="Copy yarn command"
+ instruction="yarn add @Test/package"
+ trackingaction="copy_yarn_install_command"
+ />
+
+ <h3
+ class="gl-font-lg"
+ >
+ Registry setup
+ </h3>
+
+ <h4
+ class="gl-font-base"
+ >
+ npm command
+ </h4>
+
+ <code-instruction-stub
+ copytext="Copy npm setup command"
+ instruction="echo @Test:registry=undefined >> .npmrc"
+ trackingaction="copy_npm_setup_command"
+ />
+
+ <h4
+ class="gl-font-base"
+ >
+ yarn command
+ </h4>
+
+ <code-instruction-stub
+ copytext="Copy yarn setup command"
+ instruction="echo \\\\\\"@Test:registry\\\\\\" \\\\\\"undefined\\\\\\" >> .yarnrc"
+ trackingaction="copy_yarn_setup_command"
+ />
+
+ <gl-sprintf-stub
+ message="You may also need to setup authentication using an auth token. %{linkStart}See the documentation%{linkEnd} to find out more."
+ />
+</div>
+`;
diff --git a/spec/frontend/packages/details/components/__snapshots__/nuget_installation_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/nuget_installation_spec.js.snap
new file mode 100644
index 00000000000..67810290c62
--- /dev/null
+++ b/spec/frontend/packages/details/components/__snapshots__/nuget_installation_spec.js.snap
@@ -0,0 +1,49 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`NugetInstallation renders all the messages 1`] = `
+<div>
+ <h3
+ class="gl-font-lg"
+ >
+ Installation
+ </h3>
+
+ <h4
+ class="gl-font-base"
+ >
+
+ NuGet Command
+
+ </h4>
+
+ <code-instruction-stub
+ copytext="Copy NuGet Command"
+ instruction="foo/command"
+ trackingaction="copy_nuget_install_command"
+ />
+
+ <h3
+ class="gl-font-lg"
+ >
+ Registry setup
+ </h3>
+
+ <h4
+ class="gl-font-base"
+ >
+
+ Add NuGet Source
+
+ </h4>
+
+ <code-instruction-stub
+ copytext="Copy NuGet Setup Command"
+ instruction="foo/setup"
+ trackingaction="copy_nuget_setup_command"
+ />
+
+ <gl-sprintf-stub
+ message="For more information on the NuGet registry, %{linkStart}see the documentation%{linkEnd}."
+ />
+</div>
+`;
diff --git a/spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap
new file mode 100644
index 00000000000..bdcd4a9e077
--- /dev/null
+++ b/spec/frontend/packages/details/components/__snapshots__/package_title_spec.js.snap
@@ -0,0 +1,172 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`PackageTitle renders with tags 1`] = `
+<div
+ class="gl-flex-direction-column"
+>
+ <div
+ class="gl-display-flex"
+ >
+ <!---->
+
+ <div
+ class="gl-display-flex gl-flex-direction-column"
+ >
+ <h1
+ class="gl-font-size-h1 gl-mt-3 gl-mb-2"
+ >
+
+ Test package
+
+ </h1>
+
+ <div
+ class="gl-display-flex gl-align-items-center gl-text-gray-500"
+ >
+ <gl-icon-stub
+ class="gl-mr-3"
+ name="eye"
+ size="16"
+ />
+
+ <gl-sprintf-stub
+ message="v%{version} published %{timeAgo}"
+ />
+ </div>
+ </div>
+ </div>
+
+ <div
+ class="gl-display-flex gl-flex-wrap gl-align-items-center gl-mb-3"
+ >
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <gl-icon-stub
+ class="gl-text-gray-500 gl-mr-3"
+ name="package"
+ size="16"
+ />
+
+ <span
+ class="gl-font-weight-bold"
+ data-testid="package-type"
+ >
+ maven
+ </span>
+ </div>
+
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <package-tags-stub
+ tagdisplaylimit="1"
+ tags="[object Object],[object Object],[object Object],[object Object]"
+ />
+ </div>
+
+ <!---->
+
+ <!---->
+
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <gl-icon-stub
+ class="gl-text-gray-500 gl-mr-3"
+ name="disk"
+ size="16"
+ />
+
+ <span
+ class="gl-font-weight-bold"
+ data-testid="package-size"
+ >
+ 300 bytes
+ </span>
+ </div>
+ </div>
+</div>
+`;
+
+exports[`PackageTitle renders without tags 1`] = `
+<div
+ class="gl-flex-direction-column"
+>
+ <div
+ class="gl-display-flex"
+ >
+ <!---->
+
+ <div
+ class="gl-display-flex gl-flex-direction-column"
+ >
+ <h1
+ class="gl-font-size-h1 gl-mt-3 gl-mb-2"
+ >
+
+ Test package
+
+ </h1>
+
+ <div
+ class="gl-display-flex gl-align-items-center gl-text-gray-500"
+ >
+ <gl-icon-stub
+ class="gl-mr-3"
+ name="eye"
+ size="16"
+ />
+
+ <gl-sprintf-stub
+ message="v%{version} published %{timeAgo}"
+ />
+ </div>
+ </div>
+ </div>
+
+ <div
+ class="gl-display-flex gl-flex-wrap gl-align-items-center gl-mb-3"
+ >
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <gl-icon-stub
+ class="gl-text-gray-500 gl-mr-3"
+ name="package"
+ size="16"
+ />
+
+ <span
+ class="gl-font-weight-bold"
+ data-testid="package-type"
+ >
+ maven
+ </span>
+ </div>
+
+ <!---->
+
+ <!---->
+
+ <!---->
+
+ <div
+ class="gl-display-flex gl-align-items-center gl-mr-5"
+ >
+ <gl-icon-stub
+ class="gl-text-gray-500 gl-mr-3"
+ name="disk"
+ size="16"
+ />
+
+ <span
+ class="gl-font-weight-bold"
+ data-testid="package-size"
+ >
+ 300 bytes
+ </span>
+ </div>
+ </div>
+</div>
+`;
diff --git a/spec/frontend/packages/details/components/__snapshots__/pypi_installation_spec.js.snap b/spec/frontend/packages/details/components/__snapshots__/pypi_installation_spec.js.snap
new file mode 100644
index 00000000000..5c1e74d73af
--- /dev/null
+++ b/spec/frontend/packages/details/components/__snapshots__/pypi_installation_spec.js.snap
@@ -0,0 +1,50 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`PypiInstallation renders all the messages 1`] = `
+<div>
+ <h3
+ class="gl-font-lg"
+ >
+ Installation
+ </h3>
+
+ <h4
+ class="gl-font-base"
+ >
+
+ Pip Command
+
+ </h4>
+
+ <code-instruction-stub
+ copytext="Copy Pip command"
+ data-testid="pip-command"
+ instruction="pip install"
+ trackingaction="copy_pip_install_command"
+ />
+
+ <h3
+ class="gl-font-lg"
+ >
+ Registry setup
+ </h3>
+
+ <p>
+ <gl-sprintf-stub
+ message="If you haven't already done so, you will need to add the below to your %{codeStart}.pypirc%{codeEnd} file."
+ />
+ </p>
+
+ <code-instruction-stub
+ copytext="Copy .pypirc content"
+ data-testid="pypi-setup-content"
+ instruction="python setup"
+ multiline="true"
+ trackingaction="copy_pypi_setup_command"
+ />
+
+ <gl-sprintf-stub
+ message="For more information on the PyPi registry, %{linkStart}see the documentation%{linkEnd}."
+ />
+</div>
+`;
diff --git a/spec/frontend/packages/details/components/additional_metadata_spec.js b/spec/frontend/packages/details/components/additional_metadata_spec.js
new file mode 100644
index 00000000000..b2337b86740
--- /dev/null
+++ b/spec/frontend/packages/details/components/additional_metadata_spec.js
@@ -0,0 +1,119 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLink, GlSprintf } from '@gitlab/ui';
+import DetailsRow from '~/registry/shared/components/details_row.vue';
+import component from '~/packages/details/components/additional_metadata.vue';
+
+import { mavenPackage, conanPackage, nugetPackage, npmPackage } from '../../mock_data';
+
+describe('Package Additional Metadata', () => {
+ let wrapper;
+ const defaultProps = {
+ packageEntity: { ...mavenPackage },
+ };
+
+ const mountComponent = props => {
+ wrapper = shallowMount(component, {
+ propsData: { ...defaultProps, ...props },
+ stubs: {
+ DetailsRow,
+ GlSprintf,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findTitle = () => wrapper.find('[data-testid="title"]');
+ const findMainArea = () => wrapper.find('[data-testid="main"]');
+ const findNugetSource = () => wrapper.find('[data-testid="nuget-source"]');
+ const findNugetLicense = () => wrapper.find('[data-testid="nuget-license"]');
+ const findConanRecipe = () => wrapper.find('[data-testid="conan-recipe"]');
+ const findMavenApp = () => wrapper.find('[data-testid="maven-app"]');
+ const findMavenGroup = () => wrapper.find('[data-testid="maven-group"]');
+ const findElementLink = container => container.find(GlLink);
+
+ it('has the correct title', () => {
+ mountComponent();
+
+ const title = findTitle();
+
+ expect(title.exists()).toBe(true);
+ expect(title.text()).toBe('Additional Metadata');
+ });
+
+ describe.each`
+ packageEntity | visible | metadata
+ ${mavenPackage} | ${true} | ${'maven_metadatum'}
+ ${conanPackage} | ${true} | ${'conan_metadatum'}
+ ${nugetPackage} | ${true} | ${'nuget_metadatum'}
+ ${npmPackage} | ${false} | ${null}
+ `('Component visibility', ({ packageEntity, visible, metadata }) => {
+ it(`Is ${visible} that the component markup is visible when the package is ${packageEntity.package_type}`, () => {
+ mountComponent({ packageEntity });
+
+ expect(findTitle().exists()).toBe(visible);
+ expect(findMainArea().exists()).toBe(visible);
+ });
+
+ it(`The component is hidden if ${metadata} is missing`, () => {
+ mountComponent({ packageEntity: { ...packageEntity, [metadata]: null } });
+
+ expect(findTitle().exists()).toBe(false);
+ expect(findMainArea().exists()).toBe(false);
+ });
+ });
+
+ describe('nuget metadata', () => {
+ beforeEach(() => {
+ mountComponent({ packageEntity: nugetPackage });
+ });
+
+ it.each`
+ name | finderFunction | text | link | icon
+ ${'source'} | ${findNugetSource} | ${'Source project located at project-foo-url'} | ${'project_url'} | ${'project'}
+ ${'license'} | ${findNugetLicense} | ${'License information located at license-foo-url'} | ${'license_url'} | ${'license'}
+ `('$name element', ({ finderFunction, text, link, icon }) => {
+ const element = finderFunction();
+ expect(element.exists()).toBe(true);
+ expect(element.text()).toBe(text);
+ expect(element.props('icon')).toBe(icon);
+ expect(findElementLink(element).attributes('href')).toBe(nugetPackage.nuget_metadatum[link]);
+ });
+ });
+
+ describe('conan metadata', () => {
+ beforeEach(() => {
+ mountComponent({ packageEntity: conanPackage });
+ });
+
+ it.each`
+ name | finderFunction | text | icon
+ ${'recipe'} | ${findConanRecipe} | ${'Recipe: conan-package/1.0.0@conan+conan-package/stable'} | ${'information-o'}
+ `('$name element', ({ finderFunction, text, icon }) => {
+ const element = finderFunction();
+ expect(element.exists()).toBe(true);
+ expect(element.text()).toBe(text);
+ expect(element.props('icon')).toBe(icon);
+ });
+ });
+
+ describe('maven metadata', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it.each`
+ name | finderFunction | text | icon
+ ${'app'} | ${findMavenApp} | ${'App name: test-app'} | ${'information-o'}
+ ${'group'} | ${findMavenGroup} | ${'App group: com.test.app'} | ${'information-o'}
+ `('$name element', ({ finderFunction, text, icon }) => {
+ const element = finderFunction();
+ expect(element.exists()).toBe(true);
+ expect(element.text()).toBe(text);
+ expect(element.props('icon')).toBe(icon);
+ });
+ });
+});
diff --git a/spec/frontend/packages/details/components/app_spec.js b/spec/frontend/packages/details/components/app_spec.js
new file mode 100644
index 00000000000..f535f3f5744
--- /dev/null
+++ b/spec/frontend/packages/details/components/app_spec.js
@@ -0,0 +1,281 @@
+import Vuex from 'vuex';
+import { mount, createLocalVue } from '@vue/test-utils';
+import { GlEmptyState, GlModal } from '@gitlab/ui';
+import stubChildren from 'helpers/stub_children';
+import Tracking from '~/tracking';
+import * as getters from '~/packages/details/store/getters';
+import PackagesApp from '~/packages/details/components/app.vue';
+import PackageTitle from '~/packages/details/components/package_title.vue';
+
+import * as SharedUtils from '~/packages/shared/utils';
+import { TrackingActions } from '~/packages/shared/constants';
+import PackagesListLoader from '~/packages/shared/components/packages_list_loader.vue';
+import PackageListRow from '~/packages/shared/components/package_list_row.vue';
+
+import DependencyRow from '~/packages/details/components/dependency_row.vue';
+import PackageHistory from '~/packages/details/components/package_history.vue';
+import AdditionalMetadata from '~/packages/details/components/additional_metadata.vue';
+import InstallationCommands from '~/packages/details/components/installation_commands.vue';
+
+import {
+ composerPackage,
+ conanPackage,
+ mavenPackage,
+ mavenFiles,
+ npmPackage,
+ npmFiles,
+ nugetPackage,
+} from '../../mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('PackagesApp', () => {
+ let wrapper;
+ let store;
+ const fetchPackageVersions = jest.fn();
+
+ function createComponent({
+ packageEntity = mavenPackage,
+ packageFiles = mavenFiles,
+ isLoading = false,
+ oneColumnView = false,
+ } = {}) {
+ store = new Vuex.Store({
+ state: {
+ isLoading,
+ packageEntity,
+ packageFiles,
+ canDelete: true,
+ destroyPath: 'destroy-package-path',
+ emptySvgPath: 'empty-illustration',
+ npmPath: 'foo',
+ npmHelpPath: 'foo',
+ projectName: 'bar',
+ oneColumnView,
+ },
+ actions: {
+ fetchPackageVersions,
+ },
+ getters,
+ });
+
+ wrapper = mount(PackagesApp, {
+ localVue,
+ store,
+ stubs: {
+ ...stubChildren(PackagesApp),
+ GlButton: false,
+ GlModal: false,
+ GlTab: false,
+ GlTabs: false,
+ GlTable: false,
+ },
+ });
+ }
+
+ const packageTitle = () => wrapper.find(PackageTitle);
+ const emptyState = () => wrapper.find(GlEmptyState);
+ const allFileRows = () => wrapper.findAll('.js-file-row');
+ const firstFileDownloadLink = () => wrapper.find('.js-file-download');
+ const deleteButton = () => wrapper.find('.js-delete-button');
+ const deleteModal = () => wrapper.find(GlModal);
+ const modalDeleteButton = () => wrapper.find({ ref: 'modal-delete-button' });
+ const versionsTab = () => wrapper.find('.js-versions-tab > a');
+ const packagesLoader = () => wrapper.find(PackagesListLoader);
+ const packagesVersionRows = () => wrapper.findAll(PackageListRow);
+ const noVersionsMessage = () => wrapper.find('[data-testid="no-versions-message"]');
+ const dependenciesTab = () => wrapper.find('.js-dependencies-tab > a');
+ const dependenciesCountBadge = () => wrapper.find('[data-testid="dependencies-badge"]');
+ const noDependenciesMessage = () => wrapper.find('[data-testid="no-dependencies-message"]');
+ const dependencyRows = () => wrapper.findAll(DependencyRow);
+ const findPackageHistory = () => wrapper.find(PackageHistory);
+ const findAdditionalMetadata = () => wrapper.find(AdditionalMetadata);
+ const findInstallationCommands = () => wrapper.find(InstallationCommands);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders the app and displays the package title', () => {
+ createComponent();
+
+ expect(packageTitle()).toExist();
+ });
+
+ it('renders an empty state component when no an invalid package is passed as a prop', () => {
+ createComponent({
+ packageEntity: {},
+ });
+
+ expect(emptyState()).toExist();
+ });
+
+ it('package history has the right props', () => {
+ createComponent();
+ expect(findPackageHistory().exists()).toBe(true);
+ expect(findPackageHistory().props('packageEntity')).toEqual(wrapper.vm.packageEntity);
+ expect(findPackageHistory().props('projectName')).toEqual(wrapper.vm.projectName);
+ });
+
+ it('additional metadata has the right props', () => {
+ createComponent();
+ expect(findAdditionalMetadata().exists()).toBe(true);
+ expect(findAdditionalMetadata().props('packageEntity')).toEqual(wrapper.vm.packageEntity);
+ });
+
+ it('installation commands has the right props', () => {
+ createComponent();
+ expect(findInstallationCommands().exists()).toBe(true);
+ expect(findInstallationCommands().props('packageEntity')).toEqual(wrapper.vm.packageEntity);
+ });
+
+ it('hides the files table if package type is COMPOSER', () => {
+ createComponent({ packageEntity: composerPackage });
+ expect(allFileRows().exists()).toBe(false);
+ });
+
+ it('renders a single file for an npm package as they only contain one file', () => {
+ createComponent({ packageEntity: npmPackage, packageFiles: npmFiles });
+
+ expect(allFileRows()).toExist();
+ expect(allFileRows()).toHaveLength(1);
+ });
+
+ it('renders multiple files for a package that contains more than one file', () => {
+ createComponent();
+
+ expect(allFileRows()).toExist();
+ expect(allFileRows()).toHaveLength(2);
+ });
+
+ it('allows the user to download a package file by rendering a download link', () => {
+ createComponent();
+
+ expect(allFileRows()).toExist();
+ expect(firstFileDownloadLink().vm.$attrs.href).toContain('download');
+ });
+
+ describe('deleting packages', () => {
+ beforeEach(() => {
+ createComponent();
+ deleteButton().trigger('click');
+ });
+
+ it('shows the delete confirmation modal when delete is clicked', () => {
+ expect(deleteModal()).toExist();
+ });
+ });
+
+ describe('versions', () => {
+ describe('api call', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('makes api request on first click of tab', () => {
+ versionsTab().trigger('click');
+
+ expect(fetchPackageVersions).toHaveBeenCalled();
+ });
+ });
+
+ it('displays the loader when state is loading', () => {
+ createComponent({ isLoading: true });
+
+ expect(packagesLoader().exists()).toBe(true);
+ });
+
+ it('displays the correct version count when the package has versions', () => {
+ createComponent({ packageEntity: npmPackage });
+
+ expect(packagesVersionRows()).toHaveLength(npmPackage.versions.length);
+ });
+
+ it('displays the no versions message when there are none', () => {
+ createComponent();
+
+ expect(noVersionsMessage().exists()).toBe(true);
+ });
+ });
+
+ describe('dependency links', () => {
+ it('does not show the dependency links for a non nuget package', () => {
+ createComponent();
+
+ expect(dependenciesTab().exists()).toBe(false);
+ });
+
+ it('shows the dependencies tab with 0 count when a nuget package with no dependencies', () => {
+ createComponent({
+ packageEntity: {
+ ...nugetPackage,
+ dependency_links: [],
+ },
+ });
+
+ return wrapper.vm.$nextTick(() => {
+ const dependenciesBadge = dependenciesCountBadge();
+
+ expect(dependenciesTab().exists()).toBe(true);
+ expect(dependenciesBadge.exists()).toBe(true);
+ expect(dependenciesBadge.text()).toBe('0');
+ expect(noDependenciesMessage().exists()).toBe(true);
+ });
+ });
+
+ it('renders the correct number of dependency rows for a nuget package', () => {
+ createComponent({ packageEntity: nugetPackage });
+
+ return wrapper.vm.$nextTick(() => {
+ const dependenciesBadge = dependenciesCountBadge();
+
+ expect(dependenciesTab().exists()).toBe(true);
+ expect(dependenciesBadge.exists()).toBe(true);
+ expect(dependenciesBadge.text()).toBe(nugetPackage.dependency_links.length.toString());
+ expect(dependencyRows()).toHaveLength(nugetPackage.dependency_links.length);
+ });
+ });
+ });
+
+ describe('tracking', () => {
+ let eventSpy;
+ let utilSpy;
+ const category = 'foo';
+
+ beforeEach(() => {
+ eventSpy = jest.spyOn(Tracking, 'event');
+ utilSpy = jest.spyOn(SharedUtils, 'packageTypeToTrackCategory').mockReturnValue(category);
+ });
+
+ it('tracking category calls packageTypeToTrackCategory', () => {
+ createComponent({ packageEntity: conanPackage });
+ expect(wrapper.vm.tracking.category).toBe(category);
+ expect(utilSpy).toHaveBeenCalledWith('conan');
+ });
+
+ it(`delete button on delete modal call event with ${TrackingActions.DELETE_PACKAGE}`, () => {
+ createComponent({ packageEntity: conanPackage });
+ deleteButton().trigger('click');
+ return wrapper.vm.$nextTick().then(() => {
+ modalDeleteButton().trigger('click');
+ expect(eventSpy).toHaveBeenCalledWith(
+ category,
+ TrackingActions.DELETE_PACKAGE,
+ expect.any(Object),
+ );
+ });
+ });
+
+ it(`file download link call event with ${TrackingActions.PULL_PACKAGE}`, () => {
+ createComponent({ packageEntity: conanPackage });
+
+ firstFileDownloadLink().vm.$emit('click');
+ expect(eventSpy).toHaveBeenCalledWith(
+ category,
+ TrackingActions.PULL_PACKAGE,
+ expect.any(Object),
+ );
+ });
+ });
+});
diff --git a/spec/frontend/packages/details/components/code_instruction_spec.js b/spec/frontend/packages/details/components/code_instruction_spec.js
new file mode 100644
index 00000000000..724eddb9070
--- /dev/null
+++ b/spec/frontend/packages/details/components/code_instruction_spec.js
@@ -0,0 +1,110 @@
+import { mount } from '@vue/test-utils';
+import CodeInstruction from '~/packages/details/components/code_instruction.vue';
+import { TrackingLabels } from '~/packages/details/constants';
+import Tracking from '~/tracking';
+
+describe('Package code instruction', () => {
+ let wrapper;
+
+ const defaultProps = {
+ instruction: 'npm i @my-package',
+ copyText: 'Copy npm install command',
+ };
+
+ function createComponent(props = {}) {
+ wrapper = mount(CodeInstruction, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ }
+
+ const findInstructionInput = () => wrapper.find('.js-instruction-input');
+ const findInstructionPre = () => wrapper.find('.js-instruction-pre');
+ const findInstructionButton = () => wrapper.find('.js-instruction-button');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('single line', () => {
+ beforeEach(() => createComponent());
+
+ it('to match the default snapshot', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+ });
+
+ describe('multiline', () => {
+ beforeEach(() =>
+ createComponent({
+ instruction: 'this is some\nmultiline text',
+ copyText: 'Copy the command',
+ multiline: true,
+ }),
+ );
+
+ it('to match the snapshot', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+ });
+
+ describe('tracking', () => {
+ let eventSpy;
+ const trackingAction = 'test_action';
+ const label = TrackingLabels.CODE_INSTRUCTION;
+
+ beforeEach(() => {
+ eventSpy = jest.spyOn(Tracking, 'event');
+ });
+
+ it('should not track when no trackingAction is provided', () => {
+ createComponent();
+ findInstructionButton().trigger('click');
+
+ expect(eventSpy).toHaveBeenCalledTimes(0);
+ });
+
+ describe('when trackingAction is provided for single line', () => {
+ beforeEach(() =>
+ createComponent({
+ trackingAction,
+ }),
+ );
+
+ it('should track when copying from the input', () => {
+ findInstructionInput().trigger('copy');
+
+ expect(eventSpy).toHaveBeenCalledWith(undefined, trackingAction, {
+ label,
+ });
+ });
+
+ it('should track when the copy button is pressed', () => {
+ findInstructionButton().trigger('click');
+
+ expect(eventSpy).toHaveBeenCalledWith(undefined, trackingAction, {
+ label,
+ });
+ });
+ });
+
+ describe('when trackingAction is provided for multiline', () => {
+ beforeEach(() =>
+ createComponent({
+ trackingAction,
+ multiline: true,
+ }),
+ );
+
+ it('should track when copying from the multiline pre element', () => {
+ findInstructionPre().trigger('copy');
+
+ expect(eventSpy).toHaveBeenCalledWith(undefined, trackingAction, {
+ label,
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages/details/components/composer_installation_spec.js b/spec/frontend/packages/details/components/composer_installation_spec.js
new file mode 100644
index 00000000000..7679d721391
--- /dev/null
+++ b/spec/frontend/packages/details/components/composer_installation_spec.js
@@ -0,0 +1,95 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { GlSprintf, GlLink } from '@gitlab/ui';
+import { registryUrl as composerHelpPath } from 'jest/packages/details/mock_data';
+import { composerPackage as packageEntity } from 'jest/packages/mock_data';
+import ComposerInstallation from '~/packages/details/components/composer_installation.vue';
+import CodeInstructions from '~/packages/details/components/code_instruction.vue';
+import { TrackingActions } from '~/packages/details/constants';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('ComposerInstallation', () => {
+ let wrapper;
+
+ const composerRegistryIncludeStr = 'foo/registry';
+ const composerPackageIncludeStr = 'foo/package';
+
+ const store = new Vuex.Store({
+ state: {
+ packageEntity,
+ composerHelpPath,
+ },
+ getters: {
+ composerRegistryInclude: () => composerRegistryIncludeStr,
+ composerPackageInclude: () => composerPackageIncludeStr,
+ },
+ });
+
+ const findCodeInstructions = () => wrapper.findAll(CodeInstructions);
+ const findRegistryIncludeTitle = () => wrapper.find('[data-testid="registry-include-title"]');
+ const findPackageIncludeTitle = () => wrapper.find('[data-testid="package-include-title"]');
+ const findHelpText = () => wrapper.find('[data-testid="help-text"]');
+ const findHelpLink = () => wrapper.find(GlLink);
+
+ function createComponent() {
+ wrapper = shallowMount(ComposerInstallation, {
+ localVue,
+ store,
+ stubs: {
+ GlSprintf,
+ },
+ });
+ }
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('registry include command', () => {
+ it('uses code_instructions', () => {
+ const registryIncludeCommand = findCodeInstructions().at(0);
+ expect(registryIncludeCommand.exists()).toBe(true);
+ expect(registryIncludeCommand.props()).toMatchObject({
+ instruction: composerRegistryIncludeStr,
+ copyText: 'Copy registry include',
+ trackingAction: TrackingActions.COPY_COMPOSER_REGISTRY_INCLUDE_COMMAND,
+ });
+ });
+
+ it('has the correct title', () => {
+ expect(findRegistryIncludeTitle().text()).toBe('composer.json registry include');
+ });
+ });
+
+ describe('package include command', () => {
+ it('uses code_instructions', () => {
+ const registryIncludeCommand = findCodeInstructions().at(1);
+ expect(registryIncludeCommand.exists()).toBe(true);
+ expect(registryIncludeCommand.props()).toMatchObject({
+ instruction: composerPackageIncludeStr,
+ copyText: 'Copy require package include',
+ trackingAction: TrackingActions.COPY_COMPOSER_PACKAGE_INCLUDE_COMMAND,
+ });
+ });
+
+ it('has the correct title', () => {
+ expect(findPackageIncludeTitle().text()).toBe('composer.json require package include');
+ });
+
+ it('has the correct help text', () => {
+ expect(findHelpText().text()).toBe(
+ 'For more information on Composer packages in GitLab, see the documentation.',
+ );
+ expect(findHelpLink().attributes()).toMatchObject({
+ href: composerHelpPath,
+ target: '_blank',
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages/details/components/conan_installation_spec.js b/spec/frontend/packages/details/components/conan_installation_spec.js
new file mode 100644
index 00000000000..5b31e38dad5
--- /dev/null
+++ b/spec/frontend/packages/details/components/conan_installation_spec.js
@@ -0,0 +1,68 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import ConanInstallation from '~/packages/details/components/conan_installation.vue';
+import CodeInstructions from '~/packages/details/components/code_instruction.vue';
+import { conanPackage as packageEntity } from '../../mock_data';
+import { registryUrl as conanPath } from '../mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('ConanInstallation', () => {
+ let wrapper;
+
+ const conanInstallationCommandStr = 'foo/command';
+ const conanSetupCommandStr = 'foo/setup';
+
+ const store = new Vuex.Store({
+ state: {
+ packageEntity,
+ conanPath,
+ },
+ getters: {
+ conanInstallationCommand: () => conanInstallationCommandStr,
+ conanSetupCommand: () => conanSetupCommandStr,
+ },
+ });
+
+ const findCodeInstructions = () => wrapper.findAll(CodeInstructions);
+
+ function createComponent() {
+ wrapper = shallowMount(ConanInstallation, {
+ localVue,
+ store,
+ });
+ }
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ if (wrapper) wrapper.destroy();
+ });
+
+ it('renders all the messages', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('installation commands', () => {
+ it('renders the correct command', () => {
+ expect(
+ findCodeInstructions()
+ .at(0)
+ .props('instruction'),
+ ).toBe(conanInstallationCommandStr);
+ });
+ });
+
+ describe('setup commands', () => {
+ it('renders the correct command', () => {
+ expect(
+ findCodeInstructions()
+ .at(1)
+ .props('instruction'),
+ ).toBe(conanSetupCommandStr);
+ });
+ });
+});
diff --git a/spec/frontend/packages/details/components/dependency_row_spec.js b/spec/frontend/packages/details/components/dependency_row_spec.js
new file mode 100644
index 00000000000..7d3ee92908d
--- /dev/null
+++ b/spec/frontend/packages/details/components/dependency_row_spec.js
@@ -0,0 +1,62 @@
+import { shallowMount } from '@vue/test-utils';
+import DependencyRow from '~/packages/details/components/dependency_row.vue';
+import { dependencyLinks } from '../../mock_data';
+
+describe('DependencyRow', () => {
+ let wrapper;
+
+ const { withoutFramework, withoutVersion, fullLink } = dependencyLinks;
+
+ function createComponent({ dependencyLink = fullLink } = {}) {
+ wrapper = shallowMount(DependencyRow, {
+ propsData: {
+ dependency: dependencyLink,
+ },
+ });
+ }
+
+ const dependencyVersion = () => wrapper.find('[data-testid="version-pattern"]');
+ const dependencyFramework = () => wrapper.find('[data-testid="target-framework"]');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('renders', () => {
+ it('full dependency', () => {
+ createComponent();
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+ });
+
+ describe('version', () => {
+ it('does not render any version information when not supplied', () => {
+ createComponent({ dependencyLink: withoutVersion });
+
+ expect(dependencyVersion().exists()).toBe(false);
+ });
+
+ it('does render version info when it exists', () => {
+ createComponent();
+
+ expect(dependencyVersion().exists()).toBe(true);
+ expect(dependencyVersion().text()).toBe(fullLink.version_pattern);
+ });
+ });
+
+ describe('target framework', () => {
+ it('does not render any framework information when not supplied', () => {
+ createComponent({ dependencyLink: withoutFramework });
+
+ expect(dependencyFramework().exists()).toBe(false);
+ });
+
+ it('does render framework info when it exists', () => {
+ createComponent();
+
+ expect(dependencyFramework().exists()).toBe(true);
+ expect(dependencyFramework().text()).toBe(`(${fullLink.target_framework})`);
+ });
+ });
+});
diff --git a/spec/frontend/packages/details/components/history_element_spec.js b/spec/frontend/packages/details/components/history_element_spec.js
new file mode 100644
index 00000000000..e8746fc93f5
--- /dev/null
+++ b/spec/frontend/packages/details/components/history_element_spec.js
@@ -0,0 +1,57 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlIcon } from '@gitlab/ui';
+import component from '~/packages/details/components/history_element.vue';
+import TimelineEntryItem from '~/vue_shared/components/notes/timeline_entry_item.vue';
+
+describe('History Element', () => {
+ let wrapper;
+ const defaultProps = {
+ icon: 'pencil',
+ };
+
+ const mountComponent = () => {
+ wrapper = shallowMount(component, {
+ propsData: { ...defaultProps },
+ stubs: {
+ TimelineEntryItem,
+ },
+ slots: {
+ default: '<div data-testid="default-slot"></div>',
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findTimelineEntry = () => wrapper.find(TimelineEntryItem);
+ const findGlIcon = () => wrapper.find(GlIcon);
+ const findDefaultSlot = () => wrapper.find('[data-testid="default-slot"]');
+
+ it('renders the correct markup', () => {
+ mountComponent();
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('has a default slot', () => {
+ mountComponent();
+
+ expect(findDefaultSlot().exists()).toBe(true);
+ });
+ it('has a timeline entry', () => {
+ mountComponent();
+
+ expect(findTimelineEntry().exists()).toBe(true);
+ });
+ it('has an icon', () => {
+ mountComponent();
+
+ const icon = findGlIcon();
+
+ expect(icon.exists()).toBe(true);
+ expect(icon.attributes('name')).toBe(defaultProps.icon);
+ });
+});
diff --git a/spec/frontend/packages/details/components/installations_commands_spec.js b/spec/frontend/packages/details/components/installations_commands_spec.js
new file mode 100644
index 00000000000..60da34ebcd9
--- /dev/null
+++ b/spec/frontend/packages/details/components/installations_commands_spec.js
@@ -0,0 +1,57 @@
+import { shallowMount } from '@vue/test-utils';
+import InstallationCommands from '~/packages/details/components/installation_commands.vue';
+
+import NpmInstallation from '~/packages/details/components/npm_installation.vue';
+import MavenInstallation from '~/packages/details/components/maven_installation.vue';
+import ConanInstallation from '~/packages/details/components/conan_installation.vue';
+import NugetInstallation from '~/packages/details/components/nuget_installation.vue';
+import PypiInstallation from '~/packages/details/components/pypi_installation.vue';
+import ComposerInstallation from '~/packages/details/components/composer_installation.vue';
+
+import {
+ conanPackage,
+ mavenPackage,
+ npmPackage,
+ nugetPackage,
+ pypiPackage,
+ composerPackage,
+} from '../../mock_data';
+
+describe('InstallationCommands', () => {
+ let wrapper;
+
+ function createComponent(propsData) {
+ wrapper = shallowMount(InstallationCommands, {
+ propsData,
+ });
+ }
+
+ const npmInstallation = () => wrapper.find(NpmInstallation);
+ const mavenInstallation = () => wrapper.find(MavenInstallation);
+ const conanInstallation = () => wrapper.find(ConanInstallation);
+ const nugetInstallation = () => wrapper.find(NugetInstallation);
+ const pypiInstallation = () => wrapper.find(PypiInstallation);
+ const composerInstallation = () => wrapper.find(ComposerInstallation);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('installation instructions', () => {
+ describe.each`
+ packageEntity | selector
+ ${conanPackage} | ${conanInstallation}
+ ${mavenPackage} | ${mavenInstallation}
+ ${npmPackage} | ${npmInstallation}
+ ${nugetPackage} | ${nugetInstallation}
+ ${pypiPackage} | ${pypiInstallation}
+ ${composerPackage} | ${composerInstallation}
+ `('renders', ({ packageEntity, selector }) => {
+ it(`${packageEntity.package_type} instructions exist`, () => {
+ createComponent({ packageEntity });
+
+ expect(selector()).toExist();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages/details/components/maven_installation_spec.js b/spec/frontend/packages/details/components/maven_installation_spec.js
new file mode 100644
index 00000000000..5d0007294b6
--- /dev/null
+++ b/spec/frontend/packages/details/components/maven_installation_spec.js
@@ -0,0 +1,91 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { registryUrl as mavenPath } from 'jest/packages/details/mock_data';
+import { mavenPackage as packageEntity } from 'jest/packages/mock_data';
+import MavenInstallation from '~/packages/details/components/maven_installation.vue';
+import CodeInstructions from '~/packages/details/components/code_instruction.vue';
+import { TrackingActions } from '~/packages/details/constants';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('MavenInstallation', () => {
+ let wrapper;
+
+ const xmlCodeBlock = 'foo/xml';
+ const mavenCommandStr = 'foo/command';
+ const mavenSetupXml = 'foo/setup';
+
+ const store = new Vuex.Store({
+ state: {
+ packageEntity,
+ mavenPath,
+ },
+ getters: {
+ mavenInstallationXml: () => xmlCodeBlock,
+ mavenInstallationCommand: () => mavenCommandStr,
+ mavenSetupXml: () => mavenSetupXml,
+ },
+ });
+
+ const findCodeInstructions = () => wrapper.findAll(CodeInstructions);
+
+ function createComponent() {
+ wrapper = shallowMount(MavenInstallation, {
+ localVue,
+ store,
+ });
+ }
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ if (wrapper) wrapper.destroy();
+ });
+
+ it('renders all the messages', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('installation commands', () => {
+ it('renders the correct xml block', () => {
+ expect(
+ findCodeInstructions()
+ .at(0)
+ .props(),
+ ).toMatchObject({
+ instruction: xmlCodeBlock,
+ multiline: true,
+ trackingAction: TrackingActions.COPY_MAVEN_XML,
+ });
+ });
+
+ it('renders the correct maven command', () => {
+ expect(
+ findCodeInstructions()
+ .at(1)
+ .props(),
+ ).toMatchObject({
+ instruction: mavenCommandStr,
+ multiline: false,
+ trackingAction: TrackingActions.COPY_MAVEN_COMMAND,
+ });
+ });
+ });
+
+ describe('setup commands', () => {
+ it('renders the correct xml block', () => {
+ expect(
+ findCodeInstructions()
+ .at(2)
+ .props(),
+ ).toMatchObject({
+ instruction: mavenSetupXml,
+ multiline: true,
+ trackingAction: TrackingActions.COPY_MAVEN_SETUP,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages/details/components/npm_installation_spec.js b/spec/frontend/packages/details/components/npm_installation_spec.js
new file mode 100644
index 00000000000..f47bac57a66
--- /dev/null
+++ b/spec/frontend/packages/details/components/npm_installation_spec.js
@@ -0,0 +1,99 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { npmPackage as packageEntity } from 'jest/packages/mock_data';
+import { registryUrl as nugetPath } from 'jest/packages/details/mock_data';
+import NpmInstallation from '~/packages/details/components/npm_installation.vue';
+import CodeInstructions from '~/packages/details/components/code_instruction.vue';
+import { TrackingActions } from '~/packages/details/constants';
+import { npmInstallationCommand, npmSetupCommand } from '~/packages/details/store/getters';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('NpmInstallation', () => {
+ let wrapper;
+
+ const findCodeInstructions = () => wrapper.findAll(CodeInstructions);
+
+ function createComponent() {
+ const store = new Vuex.Store({
+ state: {
+ packageEntity,
+ nugetPath,
+ },
+ getters: {
+ npmInstallationCommand,
+ npmSetupCommand,
+ },
+ });
+
+ wrapper = shallowMount(NpmInstallation, {
+ localVue,
+ store,
+ });
+ }
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ if (wrapper) wrapper.destroy();
+ });
+
+ it('renders all the messages', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('installation commands', () => {
+ it('renders the correct npm command', () => {
+ expect(
+ findCodeInstructions()
+ .at(0)
+ .props(),
+ ).toMatchObject({
+ instruction: 'npm i @Test/package',
+ multiline: false,
+ trackingAction: TrackingActions.COPY_NPM_INSTALL_COMMAND,
+ });
+ });
+
+ it('renders the correct yarn command', () => {
+ expect(
+ findCodeInstructions()
+ .at(1)
+ .props(),
+ ).toMatchObject({
+ instruction: 'yarn add @Test/package',
+ multiline: false,
+ trackingAction: TrackingActions.COPY_YARN_INSTALL_COMMAND,
+ });
+ });
+ });
+
+ describe('setup commands', () => {
+ it('renders the correct npm command', () => {
+ expect(
+ findCodeInstructions()
+ .at(2)
+ .props(),
+ ).toMatchObject({
+ instruction: 'echo @Test:registry=undefined >> .npmrc',
+ multiline: false,
+ trackingAction: TrackingActions.COPY_NPM_SETUP_COMMAND,
+ });
+ });
+
+ it('renders the correct yarn command', () => {
+ expect(
+ findCodeInstructions()
+ .at(3)
+ .props(),
+ ).toMatchObject({
+ instruction: 'echo \\"@Test:registry\\" \\"undefined\\" >> .yarnrc',
+ multiline: false,
+ trackingAction: TrackingActions.COPY_YARN_SETUP_COMMAND,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages/details/components/nuget_installation_spec.js b/spec/frontend/packages/details/components/nuget_installation_spec.js
new file mode 100644
index 00000000000..a23bf9a18a1
--- /dev/null
+++ b/spec/frontend/packages/details/components/nuget_installation_spec.js
@@ -0,0 +1,75 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { nugetPackage as packageEntity } from 'jest/packages/mock_data';
+import { registryUrl as nugetPath } from 'jest/packages/details/mock_data';
+import NugetInstallation from '~/packages/details/components/nuget_installation.vue';
+import CodeInstructions from '~/packages/details/components/code_instruction.vue';
+import { TrackingActions } from '~/packages/details/constants';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('NugetInstallation', () => {
+ let wrapper;
+
+ const nugetInstallationCommandStr = 'foo/command';
+ const nugetSetupCommandStr = 'foo/setup';
+
+ const store = new Vuex.Store({
+ state: {
+ packageEntity,
+ nugetPath,
+ },
+ getters: {
+ nugetInstallationCommand: () => nugetInstallationCommandStr,
+ nugetSetupCommand: () => nugetSetupCommandStr,
+ },
+ });
+
+ const findCodeInstructions = () => wrapper.findAll(CodeInstructions);
+
+ function createComponent() {
+ wrapper = shallowMount(NugetInstallation, {
+ localVue,
+ store,
+ });
+ }
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ if (wrapper) wrapper.destroy();
+ });
+
+ it('renders all the messages', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('installation commands', () => {
+ it('renders the correct command', () => {
+ expect(
+ findCodeInstructions()
+ .at(0)
+ .props(),
+ ).toMatchObject({
+ instruction: nugetInstallationCommandStr,
+ trackingAction: TrackingActions.COPY_NUGET_INSTALL_COMMAND,
+ });
+ });
+ });
+
+ describe('setup commands', () => {
+ it('renders the correct command', () => {
+ expect(
+ findCodeInstructions()
+ .at(1)
+ .props(),
+ ).toMatchObject({
+ instruction: nugetSetupCommandStr,
+ trackingAction: TrackingActions.COPY_NUGET_SETUP_COMMAND,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages/details/components/package_history_spec.js b/spec/frontend/packages/details/components/package_history_spec.js
new file mode 100644
index 00000000000..e293e119585
--- /dev/null
+++ b/spec/frontend/packages/details/components/package_history_spec.js
@@ -0,0 +1,106 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlLink, GlSprintf } from '@gitlab/ui';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+import component from '~/packages/details/components/package_history.vue';
+
+import { mavenPackage, mockPipelineInfo } from '../../mock_data';
+
+describe('Package History', () => {
+ let wrapper;
+ const defaultProps = {
+ projectName: 'baz project',
+ packageEntity: { ...mavenPackage },
+ };
+
+ const mountComponent = props => {
+ wrapper = shallowMount(component, {
+ propsData: { ...defaultProps, ...props },
+ stubs: {
+ HistoryElement: '<div data-testid="history-element"><slot></slot></div>',
+ GlSprintf,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findHistoryElement = testId => wrapper.find(`[data-testid="${testId}"]`);
+ const findElementLink = container => container.find(GlLink);
+ const findElementTimeAgo = container => container.find(TimeAgoTooltip);
+ const findTitle = () => wrapper.find('[data-testid="title"]');
+ const findTimeline = () => wrapper.find('[data-testid="timeline"]');
+
+ it('has the correct title', () => {
+ mountComponent();
+
+ const title = findTitle();
+
+ expect(title.exists()).toBe(true);
+ expect(title.text()).toBe('History');
+ });
+
+ it('has a timeline container', () => {
+ mountComponent();
+
+ const title = findTimeline();
+
+ expect(title.exists()).toBe(true);
+ expect(title.classes()).toEqual(
+ expect.arrayContaining(['timeline', 'main-notes-list', 'notes']),
+ );
+ });
+
+ describe.each`
+ name | icon | text | timeAgoTooltip | link
+ ${'created-on'} | ${'clock'} | ${'Test package version 1.0.0 was created'} | ${mavenPackage.created_at} | ${null}
+ ${'updated-at'} | ${'pencil'} | ${'Test package version 1.0.0 was updated'} | ${mavenPackage.updated_at} | ${null}
+ ${'commit'} | ${'commit'} | ${'Commit sha-baz on branch branch-name'} | ${null} | ${mockPipelineInfo.project.commit_url}
+ ${'pipeline'} | ${'pipeline'} | ${'Pipeline #1 triggered by foo'} | ${mockPipelineInfo.created_at} | ${mockPipelineInfo.project.pipeline_url}
+ ${'published'} | ${'package'} | ${'Published to the baz project Package Registry'} | ${mavenPackage.created_at} | ${null}
+ `('history element $name', ({ name, icon, text, timeAgoTooltip, link }) => {
+ let element;
+
+ beforeEach(() => {
+ mountComponent({ packageEntity: { ...mavenPackage, pipeline: mockPipelineInfo } });
+ element = findHistoryElement(name);
+ });
+
+ it('has the correct icon', () => {
+ expect(element.props('icon')).toBe(icon);
+ });
+
+ it('has the correct text', () => {
+ expect(element.text()).toBe(text);
+ });
+
+ it('time-ago tooltip', () => {
+ const timeAgo = findElementTimeAgo(element);
+ const exist = Boolean(timeAgoTooltip);
+
+ expect(timeAgo.exists()).toBe(exist);
+ if (exist) {
+ expect(timeAgo.props('time')).toBe(timeAgoTooltip);
+ }
+ });
+
+ it('link', () => {
+ const linkElement = findElementLink(element);
+ const exist = Boolean(link);
+
+ expect(linkElement.exists()).toBe(exist);
+ if (exist) {
+ expect(linkElement.attributes('href')).toBe(link);
+ }
+ });
+ });
+
+ describe('when pipelineInfo is missing', () => {
+ it.each(['commit', 'pipeline'])('%s history element is hidden', name => {
+ mountComponent();
+ expect(findHistoryElement(name).exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/packages/details/components/package_title_spec.js b/spec/frontend/packages/details/components/package_title_spec.js
new file mode 100644
index 00000000000..a30dc4b8aba
--- /dev/null
+++ b/spec/frontend/packages/details/components/package_title_spec.js
@@ -0,0 +1,168 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import PackageTitle from '~/packages/details/components/package_title.vue';
+import PackageTags from '~/packages/shared/components/package_tags.vue';
+import {
+ conanPackage,
+ mavenFiles,
+ mavenPackage,
+ mockTags,
+ npmFiles,
+ npmPackage,
+ nugetPackage,
+} from '../../mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('PackageTitle', () => {
+ let wrapper;
+ let store;
+
+ function createComponent({
+ packageEntity = mavenPackage,
+ packageFiles = mavenFiles,
+ icon = null,
+ } = {}) {
+ store = new Vuex.Store({
+ state: {
+ packageEntity,
+ packageFiles,
+ },
+ getters: {
+ packageTypeDisplay: ({ packageEntity: { package_type: type } }) => type,
+ packagePipeline: ({ packageEntity: { pipeline = null } }) => pipeline,
+ packageIcon: () => icon,
+ },
+ });
+
+ wrapper = shallowMount(PackageTitle, {
+ localVue,
+ store,
+ });
+ }
+
+ const packageIcon = () => wrapper.find('[data-testid="package-icon"]');
+ const packageType = () => wrapper.find('[data-testid="package-type"]');
+ const packageSize = () => wrapper.find('[data-testid="package-size"]');
+ const pipelineProject = () => wrapper.find('[data-testid="pipeline-project"]');
+ const packageRef = () => wrapper.find('[data-testid="package-ref"]');
+ const packageTags = () => wrapper.find(PackageTags);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('renders', () => {
+ it('without tags', () => {
+ createComponent();
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('with tags', () => {
+ createComponent({ packageEntity: { ...mavenPackage, tags: mockTags } });
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+ });
+
+ describe('package icon', () => {
+ const fakeSrc = 'a-fake-src';
+
+ it('shows an icon when provided one from vuex', () => {
+ createComponent({ icon: fakeSrc });
+
+ expect(packageIcon().exists()).toBe(true);
+ });
+
+ it('has the correct src attribute', () => {
+ createComponent({ icon: fakeSrc });
+
+ expect(packageIcon().props('src')).toBe(fakeSrc);
+ });
+
+ it('does not show an icon when not provided one', () => {
+ createComponent();
+
+ expect(packageIcon().exists()).toBe(false);
+ });
+ });
+
+ describe.each`
+ packageEntity | expectedResult
+ ${conanPackage} | ${'conan'}
+ ${mavenPackage} | ${'maven'}
+ ${npmPackage} | ${'npm'}
+ ${nugetPackage} | ${'nuget'}
+ `(`package type`, ({ packageEntity, expectedResult }) => {
+ beforeEach(() => createComponent({ packageEntity }));
+
+ it(`${packageEntity.package_type} should render from Vuex getters ${expectedResult}`, () => {
+ expect(packageType().text()).toBe(expectedResult);
+ });
+ });
+
+ describe('calculates the package size', () => {
+ it('correctly calulates when there is only 1 file', () => {
+ createComponent({ packageEntity: npmPackage, packageFiles: npmFiles });
+
+ expect(packageSize().text()).toBe('200 bytes');
+ });
+
+ it('correctly calulates when there are multiple files', () => {
+ createComponent();
+
+ expect(packageSize().text()).toBe('300 bytes');
+ });
+ });
+
+ describe('package tags', () => {
+ it('displays the package-tags component when the package has tags', () => {
+ createComponent({
+ packageEntity: {
+ ...npmPackage,
+ tags: mockTags,
+ },
+ });
+
+ expect(packageTags().exists()).toBe(true);
+ });
+
+ it('does not display the package-tags component when there are no tags', () => {
+ createComponent();
+
+ expect(packageTags().exists()).toBe(false);
+ });
+ });
+
+ describe('package ref', () => {
+ it('does not display the ref if missing', () => {
+ createComponent();
+
+ expect(packageRef().exists()).toBe(false);
+ });
+
+ it('correctly shows the package ref if there is one', () => {
+ createComponent({ packageEntity: npmPackage });
+
+ expect(packageRef().contains('gl-icon-stub')).toBe(true);
+ expect(packageRef().text()).toBe(npmPackage.pipeline.ref);
+ });
+ });
+
+ describe('pipeline project', () => {
+ it('does not display the project if missing', () => {
+ createComponent();
+
+ expect(pipelineProject().exists()).toBe(false);
+ });
+
+ it('correctly shows the pipeline project if there is one', () => {
+ createComponent({ packageEntity: npmPackage });
+
+ expect(pipelineProject().text()).toBe(npmPackage.pipeline.project.name);
+ expect(pipelineProject().attributes('href')).toBe(npmPackage.pipeline.project.web_url);
+ });
+ });
+});
diff --git a/spec/frontend/packages/details/components/pypi_installation_spec.js b/spec/frontend/packages/details/components/pypi_installation_spec.js
new file mode 100644
index 00000000000..da30b4ba565
--- /dev/null
+++ b/spec/frontend/packages/details/components/pypi_installation_spec.js
@@ -0,0 +1,60 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { pypiPackage as packageEntity } from 'jest/packages/mock_data';
+import PypiInstallation from '~/packages/details/components/pypi_installation.vue';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('PypiInstallation', () => {
+ let wrapper;
+
+ const pipCommandStr = 'pip install';
+ const pypiSetupStr = 'python setup';
+
+ const store = new Vuex.Store({
+ state: {
+ packageEntity,
+ pypiHelpPath: 'foo',
+ },
+ getters: {
+ pypiPipCommand: () => pipCommandStr,
+ pypiSetupCommand: () => pypiSetupStr,
+ },
+ });
+
+ const pipCommand = () => wrapper.find('[data-testid="pip-command"]');
+ const setupInstruction = () => wrapper.find('[data-testid="pypi-setup-content"]');
+
+ function createComponent() {
+ wrapper = shallowMount(PypiInstallation, {
+ localVue,
+ store,
+ });
+ }
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('renders all the messages', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('installation commands', () => {
+ it('renders the correct pip command', () => {
+ expect(pipCommand().props('instruction')).toBe(pipCommandStr);
+ });
+ });
+
+ describe('setup commands', () => {
+ it('renders the correct setup block', () => {
+ expect(setupInstruction().props('instruction')).toBe(pypiSetupStr);
+ });
+ });
+});
diff --git a/spec/frontend/packages/details/mock_data.js b/spec/frontend/packages/details/mock_data.js
new file mode 100644
index 00000000000..d43abcedb2e
--- /dev/null
+++ b/spec/frontend/packages/details/mock_data.js
@@ -0,0 +1,47 @@
+export const registryUrl = 'foo/registry';
+
+export const mavenMetadata = {
+ app_group: 'com.test.package.app',
+ app_name: 'test-package-app',
+ app_version: '1.0.0',
+};
+
+export const generateMavenCommand = ({
+ app_group: appGroup = '',
+ app_name: appName = '',
+ app_version: appVersion = '',
+}) => `mvn dependency:get -Dartifact=${appGroup}:${appName}:${appVersion}`;
+
+export const generateXmlCodeBlock = ({
+ app_group: appGroup = '',
+ app_name: appName = '',
+ app_version: appVersion = '',
+}) => `<dependency>
+ <groupId>${appGroup}</groupId>
+ <artifactId>${appName}</artifactId>
+ <version>${appVersion}</version>
+</dependency>`;
+
+export const generateMavenSetupXml = () => `<repositories>
+ <repository>
+ <id>gitlab-maven</id>
+ <url>${registryUrl}</url>
+ </repository>
+</repositories>
+
+<distributionManagement>
+ <repository>
+ <id>gitlab-maven</id>
+ <url>${registryUrl}</url>
+ </repository>
+
+ <snapshotRepository>
+ <id>gitlab-maven</id>
+ <url>${registryUrl}</url>
+ </snapshotRepository>
+</distributionManagement>`;
+
+export const pypiSetupCommandStr = `[gitlab]
+repository = foo
+username = __token__
+password = <your personal access token>`;
diff --git a/spec/frontend/packages/details/store/actions_spec.js b/spec/frontend/packages/details/store/actions_spec.js
new file mode 100644
index 00000000000..6dfb2b63f85
--- /dev/null
+++ b/spec/frontend/packages/details/store/actions_spec.js
@@ -0,0 +1,76 @@
+import testAction from 'helpers/vuex_action_helper';
+import Api from '~/api';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
+import fetchPackageVersions from '~/packages/details/store/actions';
+import * as types from '~/packages/details/store/mutation_types';
+import { FETCH_PACKAGE_VERSIONS_ERROR } from '~/packages/details/constants';
+import { npmPackage as packageEntity } from '../../mock_data';
+
+jest.mock('~/flash.js');
+jest.mock('~/api.js');
+
+describe('Actions Package details store', () => {
+ describe('fetchPackageVersions', () => {
+ it('should fetch the package versions', done => {
+ Api.projectPackage = jest.fn().mockResolvedValue({ data: packageEntity });
+
+ testAction(
+ fetchPackageVersions,
+ undefined,
+ { packageEntity },
+ [
+ { type: types.SET_LOADING, payload: true },
+ { type: types.SET_PACKAGE_VERSIONS, payload: packageEntity.versions },
+ { type: types.SET_LOADING, payload: false },
+ ],
+ [],
+ () => {
+ expect(Api.projectPackage).toHaveBeenCalledWith(
+ packageEntity.project_id,
+ packageEntity.id,
+ );
+ done();
+ },
+ );
+ });
+
+ it("does not set the versions if they don't exist", done => {
+ Api.projectPackage = jest.fn().mockResolvedValue({ data: { packageEntity, versions: null } });
+
+ testAction(
+ fetchPackageVersions,
+ undefined,
+ { packageEntity },
+ [{ type: types.SET_LOADING, payload: true }, { type: types.SET_LOADING, payload: false }],
+ [],
+ () => {
+ expect(Api.projectPackage).toHaveBeenCalledWith(
+ packageEntity.project_id,
+ packageEntity.id,
+ );
+ done();
+ },
+ );
+ });
+
+ it('should create flash on API error', done => {
+ Api.projectPackage = jest.fn().mockRejectedValue();
+
+ testAction(
+ fetchPackageVersions,
+ undefined,
+ { packageEntity },
+ [{ type: types.SET_LOADING, payload: true }, { type: types.SET_LOADING, payload: false }],
+ [],
+ () => {
+ expect(Api.projectPackage).toHaveBeenCalledWith(
+ packageEntity.project_id,
+ packageEntity.id,
+ );
+ expect(createFlash).toHaveBeenCalledWith(FETCH_PACKAGE_VERSIONS_ERROR);
+ done();
+ },
+ );
+ });
+ });
+});
diff --git a/spec/frontend/packages/details/store/getters_spec.js b/spec/frontend/packages/details/store/getters_spec.js
new file mode 100644
index 00000000000..307976d4124
--- /dev/null
+++ b/spec/frontend/packages/details/store/getters_spec.js
@@ -0,0 +1,237 @@
+import {
+ conanInstallationCommand,
+ conanSetupCommand,
+ packagePipeline,
+ packageTypeDisplay,
+ packageIcon,
+ mavenInstallationXml,
+ mavenInstallationCommand,
+ mavenSetupXml,
+ npmInstallationCommand,
+ npmSetupCommand,
+ nugetInstallationCommand,
+ nugetSetupCommand,
+ pypiPipCommand,
+ pypiSetupCommand,
+ composerRegistryInclude,
+ composerPackageInclude,
+} from '~/packages/details/store/getters';
+import {
+ conanPackage,
+ npmPackage,
+ nugetPackage,
+ mockPipelineInfo,
+ mavenPackage as packageWithoutBuildInfo,
+ pypiPackage,
+} from '../../mock_data';
+import {
+ generateMavenCommand,
+ generateXmlCodeBlock,
+ generateMavenSetupXml,
+ registryUrl,
+ pypiSetupCommandStr,
+} from '../mock_data';
+import { generateConanRecipe } from '~/packages/details/utils';
+import { NpmManager } from '~/packages/details/constants';
+
+describe('Getters PackageDetails Store', () => {
+ let state;
+
+ const defaultState = {
+ packageEntity: packageWithoutBuildInfo,
+ conanPath: registryUrl,
+ mavenPath: registryUrl,
+ npmPath: registryUrl,
+ nugetPath: registryUrl,
+ pypiPath: registryUrl,
+ };
+
+ const setupState = (testState = {}) => {
+ state = {
+ ...defaultState,
+ ...testState,
+ };
+ };
+
+ const recipe = generateConanRecipe(conanPackage);
+ const conanInstallationCommandStr = `conan install ${recipe} --remote=gitlab`;
+ const conanSetupCommandStr = `conan remote add gitlab ${registryUrl}`;
+
+ const mavenCommandStr = generateMavenCommand(packageWithoutBuildInfo.maven_metadatum);
+ const mavenInstallationXmlBlock = generateXmlCodeBlock(packageWithoutBuildInfo.maven_metadatum);
+ const mavenSetupXmlBlock = generateMavenSetupXml();
+
+ const npmInstallStr = `npm i ${npmPackage.name}`;
+ const npmSetupStr = `echo @Test:registry=${registryUrl} >> .npmrc`;
+ const yarnInstallStr = `yarn add ${npmPackage.name}`;
+ const yarnSetupStr = `echo \\"@Test:registry\\" \\"${registryUrl}\\" >> .yarnrc`;
+
+ const nugetInstallationCommandStr = `nuget install ${nugetPackage.name} -Source "GitLab"`;
+ const nugetSetupCommandStr = `nuget source Add -Name "GitLab" -Source "${registryUrl}" -UserName <your_username> -Password <your_token>`;
+
+ const pypiPipCommandStr = `pip install ${pypiPackage.name} --index-url ${registryUrl}`;
+ const composerRegistryIncludeStr = '{"type":"composer","url":"foo"}';
+ const composerPackageIncludeStr = JSON.stringify({
+ [packageWithoutBuildInfo.name]: packageWithoutBuildInfo.version,
+ });
+
+ describe('packagePipeline', () => {
+ it('should return the pipeline info when pipeline exists', () => {
+ setupState({
+ packageEntity: {
+ ...npmPackage,
+ pipeline: mockPipelineInfo,
+ },
+ });
+
+ expect(packagePipeline(state)).toEqual(mockPipelineInfo);
+ });
+
+ it('should return null when build_info does not exist', () => {
+ setupState();
+
+ expect(packagePipeline(state)).toBe(null);
+ });
+ });
+
+ describe('packageTypeDisplay', () => {
+ describe.each`
+ packageEntity | expectedResult
+ ${conanPackage} | ${'Conan'}
+ ${packageWithoutBuildInfo} | ${'Maven'}
+ ${npmPackage} | ${'NPM'}
+ ${nugetPackage} | ${'NuGet'}
+ ${pypiPackage} | ${'PyPi'}
+ `(`package type`, ({ packageEntity, expectedResult }) => {
+ beforeEach(() => setupState({ packageEntity }));
+
+ it(`${packageEntity.package_type} should show as ${expectedResult}`, () => {
+ expect(packageTypeDisplay(state)).toBe(expectedResult);
+ });
+ });
+ });
+
+ describe('packageIcon', () => {
+ describe('nuget packages', () => {
+ it('should return nuget package icon', () => {
+ setupState({ packageEntity: nugetPackage });
+
+ expect(packageIcon(state)).toBe(nugetPackage.nuget_metadatum.icon_url);
+ });
+
+ it('should return null when nuget package does not have an icon', () => {
+ setupState({ packageEntity: { ...nugetPackage, nuget_metadatum: {} } });
+
+ expect(packageIcon(state)).toBe(null);
+ });
+ });
+
+ it('should not find icons for other package types', () => {
+ setupState({ packageEntity: npmPackage });
+
+ expect(packageIcon(state)).toBe(null);
+ });
+ });
+
+ describe('conan string getters', () => {
+ it('gets the correct conanInstallationCommand', () => {
+ setupState({ packageEntity: conanPackage });
+
+ expect(conanInstallationCommand(state)).toBe(conanInstallationCommandStr);
+ });
+
+ it('gets the correct conanSetupCommand', () => {
+ setupState({ packageEntity: conanPackage });
+
+ expect(conanSetupCommand(state)).toBe(conanSetupCommandStr);
+ });
+ });
+
+ describe('maven string getters', () => {
+ it('gets the correct mavenInstallationXml', () => {
+ setupState();
+
+ expect(mavenInstallationXml(state)).toBe(mavenInstallationXmlBlock);
+ });
+
+ it('gets the correct mavenInstallationCommand', () => {
+ setupState();
+
+ expect(mavenInstallationCommand(state)).toBe(mavenCommandStr);
+ });
+
+ it('gets the correct mavenSetupXml', () => {
+ setupState();
+
+ expect(mavenSetupXml(state)).toBe(mavenSetupXmlBlock);
+ });
+ });
+
+ describe('npm string getters', () => {
+ it('gets the correct npmInstallationCommand for NPM', () => {
+ setupState({ packageEntity: npmPackage });
+
+ expect(npmInstallationCommand(state)(NpmManager.NPM)).toBe(npmInstallStr);
+ });
+
+ it('gets the correct npmSetupCommand for NPM', () => {
+ setupState({ packageEntity: npmPackage });
+
+ expect(npmSetupCommand(state)(NpmManager.NPM)).toBe(npmSetupStr);
+ });
+
+ it('gets the correct npmInstallationCommand for Yarn', () => {
+ setupState({ packageEntity: npmPackage });
+
+ expect(npmInstallationCommand(state)(NpmManager.YARN)).toBe(yarnInstallStr);
+ });
+
+ it('gets the correct npmSetupCommand for Yarn', () => {
+ setupState({ packageEntity: npmPackage });
+
+ expect(npmSetupCommand(state)(NpmManager.YARN)).toBe(yarnSetupStr);
+ });
+ });
+
+ describe('nuget string getters', () => {
+ it('gets the correct nugetInstallationCommand', () => {
+ setupState({ packageEntity: nugetPackage });
+
+ expect(nugetInstallationCommand(state)).toBe(nugetInstallationCommandStr);
+ });
+
+ it('gets the correct nugetSetupCommand', () => {
+ setupState({ packageEntity: nugetPackage });
+
+ expect(nugetSetupCommand(state)).toBe(nugetSetupCommandStr);
+ });
+ });
+
+ describe('pypi string getters', () => {
+ it('gets the correct pypiPipCommand', () => {
+ setupState({ packageEntity: pypiPackage });
+
+ expect(pypiPipCommand(state)).toBe(pypiPipCommandStr);
+ });
+
+ it('gets the correct pypiSetupCommand', () => {
+ setupState({ pypiSetupPath: 'foo' });
+
+ expect(pypiSetupCommand(state)).toBe(pypiSetupCommandStr);
+ });
+ });
+
+ describe('composer string getters', () => {
+ it('gets the correct composerRegistryInclude command', () => {
+ setupState({ composerPath: 'foo' });
+
+ expect(composerRegistryInclude(state)).toBe(composerRegistryIncludeStr);
+ });
+
+ it('gets the correct composerPackageInclude command', () => {
+ setupState();
+
+ expect(composerPackageInclude(state)).toBe(composerPackageIncludeStr);
+ });
+ });
+});
diff --git a/spec/frontend/packages/details/store/mutations_spec.js b/spec/frontend/packages/details/store/mutations_spec.js
new file mode 100644
index 00000000000..501a56dcdde
--- /dev/null
+++ b/spec/frontend/packages/details/store/mutations_spec.js
@@ -0,0 +1,31 @@
+import mutations from '~/packages/details/store/mutations';
+import * as types from '~/packages/details/store/mutation_types';
+import { npmPackage as packageEntity } from '../../mock_data';
+
+describe('Mutations package details Store', () => {
+ let mockState;
+
+ beforeEach(() => {
+ mockState = {
+ packageEntity,
+ };
+ });
+
+ describe('SET_LOADING', () => {
+ it('should set loading', () => {
+ mutations[types.SET_LOADING](mockState, true);
+
+ expect(mockState.isLoading).toEqual(true);
+ });
+ });
+
+ describe('SET_PACKAGE_VERSIONS', () => {
+ it('should set the package entity versions', () => {
+ const fakeVersions = [1, 2, 3];
+
+ mutations[types.SET_PACKAGE_VERSIONS](mockState, fakeVersions);
+
+ expect(mockState.packageEntity.versions).toEqual(fakeVersions);
+ });
+ });
+});
diff --git a/spec/frontend/packages/details/utils_spec.js b/spec/frontend/packages/details/utils_spec.js
new file mode 100644
index 00000000000..087888016ee
--- /dev/null
+++ b/spec/frontend/packages/details/utils_spec.js
@@ -0,0 +1,24 @@
+import { generateConanRecipe } from '~/packages/details/utils';
+import { conanPackage } from '../mock_data';
+
+describe('Package detail utils', () => {
+ describe('generateConanRecipe', () => {
+ it('correctly generates the conan recipe', () => {
+ const recipe = generateConanRecipe(conanPackage);
+
+ expect(recipe).toEqual(conanPackage.recipe);
+ });
+
+ it('returns an empty recipe when no information is supplied', () => {
+ const recipe = generateConanRecipe({});
+
+ expect(recipe).toEqual('/@/');
+ });
+
+ it('recipe returns empty strings for missing metadata', () => {
+ const recipe = generateConanRecipe({ name: 'foo', version: '0.0.1' });
+
+ expect(recipe).toBe('foo/0.0.1@/');
+ });
+ });
+});
diff --git a/spec/frontend/packages/list/coming_soon/helpers_spec.js b/spec/frontend/packages/list/coming_soon/helpers_spec.js
new file mode 100644
index 00000000000..4a996bfad76
--- /dev/null
+++ b/spec/frontend/packages/list/coming_soon/helpers_spec.js
@@ -0,0 +1,36 @@
+import * as comingSoon from '~/packages/list/coming_soon/helpers';
+import { fakeIssues, asGraphQLResponse, asViewModel } from './mock_data';
+
+jest.mock('~/api.js');
+
+describe('Coming Soon Helpers', () => {
+ const [noLabels, acceptingMergeRequestLabel, workflowLabel] = fakeIssues;
+
+ describe('toViewModel', () => {
+ it('formats a GraphQL response correctly', () => {
+ expect(comingSoon.toViewModel(asGraphQLResponse)).toEqual(asViewModel);
+ });
+ });
+
+ describe('findWorkflowLabel', () => {
+ it('finds a workflow label', () => {
+ expect(comingSoon.findWorkflowLabel(workflowLabel.labels)).toEqual(workflowLabel.labels[0]);
+ });
+
+ it("returns undefined when there isn't one", () => {
+ expect(comingSoon.findWorkflowLabel(noLabels.labels)).toBeUndefined();
+ });
+ });
+
+ describe('findAcceptingContributionsLabel', () => {
+ it('finds the correct label when it exists', () => {
+ expect(comingSoon.findAcceptingContributionsLabel(acceptingMergeRequestLabel.labels)).toEqual(
+ acceptingMergeRequestLabel.labels[0],
+ );
+ });
+
+ it("returns undefined when there isn't one", () => {
+ expect(comingSoon.findAcceptingContributionsLabel(noLabels.labels)).toBeUndefined();
+ });
+ });
+});
diff --git a/spec/frontend/packages/list/coming_soon/mock_data.js b/spec/frontend/packages/list/coming_soon/mock_data.js
new file mode 100644
index 00000000000..bb4568e4bd5
--- /dev/null
+++ b/spec/frontend/packages/list/coming_soon/mock_data.js
@@ -0,0 +1,90 @@
+export const fakeIssues = [
+ {
+ id: 1,
+ iid: 1,
+ title: 'issue one',
+ webUrl: 'foo',
+ },
+ {
+ id: 2,
+ iid: 2,
+ title: 'issue two',
+ labels: [{ title: 'Accepting merge requests', color: '#69d100' }],
+ milestone: {
+ title: '12.10',
+ },
+ webUrl: 'foo',
+ },
+ {
+ id: 3,
+ iid: 3,
+ title: 'issue three',
+ labels: [{ title: 'workflow::In dev', color: '#428bca' }],
+ webUrl: 'foo',
+ },
+ {
+ id: 4,
+ iid: 4,
+ title: 'issue four',
+ labels: [
+ { title: 'Accepting merge requests', color: '#69d100' },
+ { title: 'workflow::In dev', color: '#428bca' },
+ ],
+ webUrl: 'foo',
+ },
+];
+
+export const asGraphQLResponse = {
+ project: {
+ issues: {
+ nodes: fakeIssues.map(x => ({
+ ...x,
+ labels: {
+ nodes: x.labels,
+ },
+ })),
+ },
+ },
+};
+
+export const asViewModel = [
+ {
+ ...fakeIssues[0],
+ labels: [],
+ },
+ {
+ ...fakeIssues[1],
+ labels: [
+ {
+ title: 'Accepting merge requests',
+ color: '#69d100',
+ scoped: false,
+ },
+ ],
+ },
+ {
+ ...fakeIssues[2],
+ labels: [
+ {
+ title: 'workflow::In dev',
+ color: '#428bca',
+ scoped: true,
+ },
+ ],
+ },
+ {
+ ...fakeIssues[3],
+ labels: [
+ {
+ title: 'workflow::In dev',
+ color: '#428bca',
+ scoped: true,
+ },
+ {
+ title: 'Accepting merge requests',
+ color: '#69d100',
+ scoped: false,
+ },
+ ],
+ },
+];
diff --git a/spec/frontend/packages/list/coming_soon/packages_coming_soon_spec.js b/spec/frontend/packages/list/coming_soon/packages_coming_soon_spec.js
new file mode 100644
index 00000000000..c4cdadc45e6
--- /dev/null
+++ b/spec/frontend/packages/list/coming_soon/packages_coming_soon_spec.js
@@ -0,0 +1,138 @@
+import { GlEmptyState, GlSkeletonLoader, GlLabel } from '@gitlab/ui';
+import { mount, createLocalVue } from '@vue/test-utils';
+import VueApollo, { ApolloQuery } from 'vue-apollo';
+import ComingSoon from '~/packages/list/coming_soon/packages_coming_soon.vue';
+import { TrackingActions } from '~/packages/shared/constants';
+import { asViewModel } from './mock_data';
+import Tracking from '~/tracking';
+
+jest.mock('~/packages/list/coming_soon/helpers.js');
+
+const localVue = createLocalVue();
+localVue.use(VueApollo);
+
+describe('packages_coming_soon', () => {
+ let wrapper;
+
+ const findSkeletonLoader = () => wrapper.find(GlSkeletonLoader);
+ const findAllIssues = () => wrapper.findAll('[data-testid="issue-row"]');
+ const findIssuesData = () =>
+ findAllIssues().wrappers.map(x => {
+ const titleLink = x.find('[data-testid="issue-title-link"]');
+ const milestone = x.find('[data-testid="milestone"]');
+ const issueIdLink = x.find('[data-testid="issue-id-link"]');
+ const labels = x.findAll(GlLabel);
+
+ const issueId = Number(issueIdLink.text().substr(1));
+
+ return {
+ id: issueId,
+ iid: issueId,
+ title: titleLink.text(),
+ webUrl: titleLink.attributes('href'),
+ labels: labels.wrappers.map(label => ({
+ color: label.props('backgroundColor'),
+ title: label.props('title'),
+ scoped: label.props('scoped'),
+ })),
+ ...(milestone.exists() ? { milestone: { title: milestone.text() } } : {}),
+ };
+ });
+ const findIssueTitleLink = () => wrapper.find('[data-testid="issue-title-link"]');
+ const findIssueIdLink = () => wrapper.find('[data-testid="issue-id-link"]');
+ const findEmptyState = () => wrapper.find(GlEmptyState);
+
+ const mountComponent = (testParams = {}) => {
+ const $apolloData = {
+ loading: testParams.isLoading || false,
+ };
+
+ wrapper = mount(ComingSoon, {
+ localVue,
+ propsData: {
+ illustration: 'foo',
+ projectPath: 'foo',
+ suggestedContributionsPath: 'foo',
+ },
+ stubs: {
+ ApolloQuery,
+ GlLink: true,
+ },
+ mocks: {
+ $apolloData,
+ },
+ });
+
+ // Mock the GraphQL query result
+ wrapper.find(ApolloQuery).setData({
+ result: {
+ data: testParams.issues || asViewModel,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('when loading', () => {
+ beforeEach(() => mountComponent({ isLoading: true }));
+
+ it('renders the skeleton loader', () => {
+ expect(findSkeletonLoader().exists()).toBe(true);
+ });
+ });
+
+ describe('when there are no issues', () => {
+ beforeEach(() => mountComponent({ issues: [] }));
+
+ it('renders the empty state', () => {
+ expect(findEmptyState().exists()).toBe(true);
+ });
+ });
+
+ describe('when there are issues', () => {
+ beforeEach(() => mountComponent());
+
+ it('renders each issue', () => {
+ expect(findIssuesData()).toEqual(asViewModel);
+ });
+ });
+
+ describe('tracking', () => {
+ const firstIssue = asViewModel[0];
+ let eventSpy;
+
+ beforeEach(() => {
+ eventSpy = jest.spyOn(Tracking, 'event');
+ mountComponent();
+ });
+
+ it('tracks when mounted', () => {
+ expect(eventSpy).toHaveBeenCalledWith(undefined, TrackingActions.COMING_SOON_REQUESTED, {});
+ });
+
+ it('tracks when an issue title link is clicked', () => {
+ eventSpy.mockClear();
+
+ findIssueTitleLink().vm.$emit('click');
+
+ expect(eventSpy).toHaveBeenCalledWith(undefined, TrackingActions.COMING_SOON_LIST, {
+ label: firstIssue.title,
+ value: firstIssue.iid,
+ });
+ });
+
+ it('tracks when an issue id link is clicked', () => {
+ eventSpy.mockClear();
+
+ findIssueIdLink().vm.$emit('click');
+
+ expect(eventSpy).toHaveBeenCalledWith(undefined, TrackingActions.COMING_SOON_LIST, {
+ label: firstIssue.title,
+ value: firstIssue.iid,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages/list/components/__snapshots__/packages_filter_spec.js.snap b/spec/frontend/packages/list/components/__snapshots__/packages_filter_spec.js.snap
new file mode 100644
index 00000000000..ed77f25916f
--- /dev/null
+++ b/spec/frontend/packages/list/components/__snapshots__/packages_filter_spec.js.snap
@@ -0,0 +1,14 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`packages_filter renders 1`] = `
+<gl-search-box-by-click-stub
+ clearable="true"
+ clearbuttontitle="Clear"
+ clearrecentsearchestext="Clear recent searches"
+ closebuttontitle="Close"
+ norecentsearchestext="You don't have any recent searches"
+ placeholder="Filter by name"
+ recentsearchesheader="Recent searches"
+ value=""
+/>
+`;
diff --git a/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap b/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap
new file mode 100644
index 00000000000..2b7a4c83bed
--- /dev/null
+++ b/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap
@@ -0,0 +1,457 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`packages_list_app renders 1`] = `
+<b-tabs-stub
+ activenavitemclass="gl-tab-nav-item-active gl-tab-nav-item-active-indigo"
+ class="gl-tabs"
+ contentclass=",gl-tab-content"
+ navclass="gl-tabs-nav"
+ nofade="true"
+ nonavstyle="true"
+ tag="div"
+>
+ <template>
+
+ <b-tab-stub
+ tag="div"
+ title="All"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
+ >
+ <div
+ class="col-12"
+ >
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
+ </div>
+
+ <div
+ class="col-12"
+ >
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ >
+ <h1
+ class="h4"
+ >
+ There are no packages yet
+ </h1>
+
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
+
+ <div>
+ <!---->
+
+ <!---->
+ </div>
+ </div>
+ </div>
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
+ <b-tab-stub
+ tag="div"
+ title="Composer"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
+ >
+ <div
+ class="col-12"
+ >
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no Composer packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
+ </div>
+
+ <div
+ class="col-12"
+ >
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ >
+ <h1
+ class="h4"
+ >
+ There are no Composer packages yet
+ </h1>
+
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
+
+ <div>
+ <!---->
+
+ <!---->
+ </div>
+ </div>
+ </div>
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
+ <b-tab-stub
+ tag="div"
+ title="Conan"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
+ >
+ <div
+ class="col-12"
+ >
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no Conan packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
+ </div>
+
+ <div
+ class="col-12"
+ >
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ >
+ <h1
+ class="h4"
+ >
+ There are no Conan packages yet
+ </h1>
+
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
+
+ <div>
+ <!---->
+
+ <!---->
+ </div>
+ </div>
+ </div>
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
+ <b-tab-stub
+ tag="div"
+ title="Maven"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
+ >
+ <div
+ class="col-12"
+ >
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no Maven packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
+ </div>
+
+ <div
+ class="col-12"
+ >
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ >
+ <h1
+ class="h4"
+ >
+ There are no Maven packages yet
+ </h1>
+
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
+
+ <div>
+ <!---->
+
+ <!---->
+ </div>
+ </div>
+ </div>
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
+ <b-tab-stub
+ tag="div"
+ title="NPM"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
+ >
+ <div
+ class="col-12"
+ >
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no NPM packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
+ </div>
+
+ <div
+ class="col-12"
+ >
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ >
+ <h1
+ class="h4"
+ >
+ There are no NPM packages yet
+ </h1>
+
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
+
+ <div>
+ <!---->
+
+ <!---->
+ </div>
+ </div>
+ </div>
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
+ <b-tab-stub
+ tag="div"
+ title="NuGet"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
+ >
+ <div
+ class="col-12"
+ >
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no NuGet packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
+ </div>
+
+ <div
+ class="col-12"
+ >
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ >
+ <h1
+ class="h4"
+ >
+ There are no NuGet packages yet
+ </h1>
+
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
+
+ <div>
+ <!---->
+
+ <!---->
+ </div>
+ </div>
+ </div>
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
+ <b-tab-stub
+ tag="div"
+ title="PyPi"
+ titlelinkclass="gl-tab-nav-item"
+ >
+ <template>
+ <div>
+ <section
+ class="row empty-state text-center"
+ >
+ <div
+ class="col-12"
+ >
+ <div
+ class="svg-250 svg-content"
+ >
+ <img
+ alt="There are no PyPi packages yet"
+ class="gl-max-w-full"
+ src="helpSvg"
+ />
+ </div>
+ </div>
+
+ <div
+ class="col-12"
+ >
+ <div
+ class="text-content gl-mx-auto gl-my-0 gl-p-5"
+ >
+ <h1
+ class="h4"
+ >
+ There are no PyPi packages yet
+ </h1>
+
+ <p>
+ Learn how to
+ <b-link-stub
+ class="gl-link"
+ event="click"
+ href="helpUrl"
+ routertag="a"
+ target="_blank"
+ >
+ publish and share your packages
+ </b-link-stub>
+ with GitLab.
+ </p>
+
+ <div>
+ <!---->
+
+ <!---->
+ </div>
+ </div>
+ </div>
+ </section>
+ </div>
+ </template>
+ </b-tab-stub>
+
+ <!---->
+ </template>
+ <template>
+ <div
+ class="d-flex align-self-center ml-md-auto py-1 py-md-0"
+ >
+ <package-filter-stub
+ class="mr-1"
+ />
+
+ <package-sort-stub />
+ </div>
+ </template>
+</b-tabs-stub>
+`;
diff --git a/spec/frontend/packages/list/components/packages_filter_spec.js b/spec/frontend/packages/list/components/packages_filter_spec.js
new file mode 100644
index 00000000000..b186b5f5e48
--- /dev/null
+++ b/spec/frontend/packages/list/components/packages_filter_spec.js
@@ -0,0 +1,50 @@
+import Vuex from 'vuex';
+import { GlSearchBoxByClick } from '@gitlab/ui';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import PackagesFilter from '~/packages/list/components/packages_filter.vue';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('packages_filter', () => {
+ let wrapper;
+ let store;
+
+ const findGlSearchBox = () => wrapper.find(GlSearchBoxByClick);
+
+ const mountComponent = () => {
+ store = new Vuex.Store();
+ store.dispatch = jest.fn();
+
+ wrapper = shallowMount(PackagesFilter, {
+ localVue,
+ store,
+ });
+ };
+
+ beforeEach(mountComponent);
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('renders', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('emits events', () => {
+ it('sets the filter value in the store on input', () => {
+ const searchString = 'foo';
+ findGlSearchBox().vm.$emit('input', searchString);
+
+ expect(store.dispatch).toHaveBeenCalledWith('setFilter', searchString);
+ });
+
+ it('emits the filter event when search box is submitted', () => {
+ findGlSearchBox().vm.$emit('submit');
+
+ expect(wrapper.emitted('filter')).toBeTruthy();
+ });
+ });
+});
diff --git a/spec/frontend/packages/list/components/packages_list_app_spec.js b/spec/frontend/packages/list/components/packages_list_app_spec.js
new file mode 100644
index 00000000000..31bab3886c1
--- /dev/null
+++ b/spec/frontend/packages/list/components/packages_list_app_spec.js
@@ -0,0 +1,148 @@
+import Vuex from 'vuex';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { GlEmptyState, GlTab, GlTabs, GlSprintf, GlLink } from '@gitlab/ui';
+import PackageListApp from '~/packages/list/components/packages_list_app.vue';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('packages_list_app', () => {
+ let wrapper;
+ let store;
+
+ const PackageList = {
+ name: 'package-list',
+ template: '<div><slot name="empty-state"></slot></div>',
+ };
+ const GlLoadingIcon = { name: 'gl-loading-icon', template: '<div>loading</div>' };
+
+ const emptyListHelpUrl = 'helpUrl';
+ const findEmptyState = () => wrapper.find(GlEmptyState);
+ const findListComponent = () => wrapper.find(PackageList);
+ const findTabComponent = (index = 0) => wrapper.findAll(GlTab).at(index);
+
+ const createStore = (filterQuery = '') => {
+ store = new Vuex.Store({
+ state: {
+ isLoading: false,
+ config: {
+ resourceId: 'project_id',
+ emptyListIllustration: 'helpSvg',
+ emptyListHelpUrl,
+ },
+ filterQuery,
+ },
+ });
+ store.dispatch = jest.fn();
+ };
+
+ const mountComponent = () => {
+ wrapper = shallowMount(PackageListApp, {
+ localVue,
+ store,
+ stubs: {
+ GlEmptyState,
+ GlLoadingIcon,
+ PackageList,
+ GlTab,
+ GlTabs,
+ GlSprintf,
+ GlLink,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createStore();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders', () => {
+ mountComponent();
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('empty state', () => {
+ it('generate the correct empty list link', () => {
+ mountComponent();
+
+ const link = findListComponent().find(GlLink);
+
+ expect(link.attributes('href')).toBe(emptyListHelpUrl);
+ expect(link.text()).toBe('publish and share your packages');
+ });
+
+ it('includes the right content on the default tab', () => {
+ mountComponent();
+
+ const heading = findEmptyState().find('h1');
+
+ expect(heading.text()).toBe('There are no packages yet');
+ });
+ });
+
+ it('call requestPackagesList on page:changed', () => {
+ mountComponent();
+
+ const list = findListComponent();
+ list.vm.$emit('page:changed', 1);
+ expect(store.dispatch).toHaveBeenCalledWith('requestPackagesList', { page: 1 });
+ });
+
+ it('call requestDeletePackage on package:delete', () => {
+ mountComponent();
+
+ const list = findListComponent();
+ list.vm.$emit('package:delete', 'foo');
+ expect(store.dispatch).toHaveBeenCalledWith('requestDeletePackage', 'foo');
+ });
+
+ it('calls requestPackagesList on sort:changed', () => {
+ mountComponent();
+
+ const list = findListComponent();
+ list.vm.$emit('sort:changed');
+ expect(store.dispatch).toHaveBeenCalledWith('requestPackagesList');
+ });
+
+ it('does not call requestPackagesList two times on render', () => {
+ mountComponent();
+
+ expect(store.dispatch).toHaveBeenCalledTimes(1);
+ });
+
+ describe('tab change', () => {
+ it('calls requestPackagesList when all tab is clicked', () => {
+ mountComponent();
+
+ findTabComponent().trigger('click');
+
+ expect(store.dispatch).toHaveBeenCalledWith('requestPackagesList');
+ });
+
+ it('calls requestPackagesList when a package type tab is clicked', () => {
+ mountComponent();
+
+ findTabComponent(1).trigger('click');
+
+ expect(store.dispatch).toHaveBeenCalledWith('requestPackagesList');
+ });
+ });
+
+ describe('filter without results', () => {
+ beforeEach(() => {
+ createStore('foo');
+ mountComponent();
+ });
+
+ it('should show specific empty message', () => {
+ expect(findEmptyState().text()).toContain('Sorry, your filter produced no results');
+ expect(findEmptyState().text()).toContain(
+ 'To widen your search, change or remove the filters above',
+ );
+ });
+ });
+});
diff --git a/spec/frontend/packages/list/components/packages_list_spec.js b/spec/frontend/packages/list/components/packages_list_spec.js
new file mode 100644
index 00000000000..a90d5056212
--- /dev/null
+++ b/spec/frontend/packages/list/components/packages_list_spec.js
@@ -0,0 +1,219 @@
+import Vuex from 'vuex';
+import { last } from 'lodash';
+import { GlTable, GlPagination, GlModal } from '@gitlab/ui';
+import { mount, createLocalVue } from '@vue/test-utils';
+import stubChildren from 'helpers/stub_children';
+import Tracking from '~/tracking';
+import PackagesList from '~/packages/list/components/packages_list.vue';
+import PackagesListLoader from '~/packages/shared/components/packages_list_loader.vue';
+import PackagesListRow from '~/packages/shared/components/package_list_row.vue';
+import * as SharedUtils from '~/packages/shared/utils';
+import { TrackingActions } from '~/packages/shared/constants';
+import { packageList } from '../../mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('packages_list', () => {
+ let wrapper;
+ let store;
+
+ const GlSortingItem = { name: 'sorting-item-stub', template: '<div><slot></slot></div>' };
+ const EmptySlotStub = { name: 'empty-slot-stub', template: '<div>bar</div>' };
+
+ const findPackagesListLoader = () => wrapper.find(PackagesListLoader);
+ const findPackageListPagination = () => wrapper.find(GlPagination);
+ const findPackageListDeleteModal = () => wrapper.find(GlModal);
+ const findEmptySlot = () => wrapper.find({ name: 'empty-slot-stub' });
+ const findPackagesListRow = () => wrapper.find(PackagesListRow);
+
+ const createStore = (isGroupPage, packages, isLoading) => {
+ const state = {
+ isLoading,
+ packages,
+ pagination: {
+ perPage: 1,
+ total: 1,
+ page: 1,
+ },
+ config: {
+ isGroupPage,
+ },
+ sorting: {
+ orderBy: 'version',
+ sort: 'desc',
+ },
+ };
+ store = new Vuex.Store({
+ state,
+ getters: {
+ getList: () => packages,
+ },
+ });
+ store.dispatch = jest.fn();
+ };
+
+ const mountComponent = ({
+ isGroupPage = false,
+ packages = packageList,
+ isLoading = false,
+ ...options
+ } = {}) => {
+ createStore(isGroupPage, packages, isLoading);
+
+ wrapper = mount(PackagesList, {
+ localVue,
+ store,
+ stubs: {
+ ...stubChildren(PackagesList),
+ GlTable,
+ GlSortingItem,
+ GlModal,
+ },
+ ...options,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('when is loading', () => {
+ beforeEach(() => {
+ mountComponent({
+ packages: [],
+ isLoading: true,
+ });
+ });
+
+ it('shows skeleton loader when loading', () => {
+ expect(findPackagesListLoader().exists()).toBe(true);
+ });
+ });
+
+ describe('when is not loading', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it('does not show skeleton loader when not loading', () => {
+ expect(findPackagesListLoader().exists()).toBe(false);
+ });
+ });
+
+ describe('layout', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it('contains a pagination component', () => {
+ const sorting = findPackageListPagination();
+ expect(sorting.exists()).toBe(true);
+ });
+
+ it('contains a modal component', () => {
+ const sorting = findPackageListDeleteModal();
+ expect(sorting.exists()).toBe(true);
+ });
+ });
+
+ describe('when the user can destroy the package', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it('setItemToBeDeleted sets itemToBeDeleted and open the modal', () => {
+ const mockModalShow = jest.spyOn(wrapper.vm.$refs.packageListDeleteModal, 'show');
+ const item = last(wrapper.vm.list);
+
+ findPackagesListRow().vm.$emit('packageToDelete', item);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.vm.itemToBeDeleted).toEqual(item);
+ expect(mockModalShow).toHaveBeenCalled();
+ });
+ });
+
+ it('deleteItemConfirmation resets itemToBeDeleted', () => {
+ wrapper.setData({ itemToBeDeleted: 1 });
+ wrapper.vm.deleteItemConfirmation();
+ expect(wrapper.vm.itemToBeDeleted).toEqual(null);
+ });
+
+ it('deleteItemConfirmation emit package:delete', () => {
+ const itemToBeDeleted = { id: 2 };
+ wrapper.setData({ itemToBeDeleted });
+ wrapper.vm.deleteItemConfirmation();
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.emitted('package:delete')[0]).toEqual([itemToBeDeleted]);
+ });
+ });
+
+ it('deleteItemCanceled resets itemToBeDeleted', () => {
+ wrapper.setData({ itemToBeDeleted: 1 });
+ wrapper.vm.deleteItemCanceled();
+ expect(wrapper.vm.itemToBeDeleted).toEqual(null);
+ });
+ });
+
+ describe('when the list is empty', () => {
+ beforeEach(() => {
+ mountComponent({
+ packages: [],
+ slots: {
+ 'empty-state': EmptySlotStub,
+ },
+ });
+ });
+
+ it('show the empty slot', () => {
+ const emptySlot = findEmptySlot();
+ expect(emptySlot.exists()).toBe(true);
+ });
+ });
+
+ describe('pagination component', () => {
+ let pagination;
+ let modelEvent;
+
+ beforeEach(() => {
+ mountComponent();
+ pagination = findPackageListPagination();
+ // retrieve the event used by v-model, a more sturdy approach than hardcoding it
+ modelEvent = pagination.vm.$options.model.event;
+ });
+
+ it('emits page:changed events when the page changes', () => {
+ pagination.vm.$emit(modelEvent, 2);
+ expect(wrapper.emitted('page:changed')).toEqual([[2]]);
+ });
+ });
+
+ describe('tracking', () => {
+ let eventSpy;
+ let utilSpy;
+ const category = 'foo';
+
+ beforeEach(() => {
+ mountComponent();
+ eventSpy = jest.spyOn(Tracking, 'event');
+ utilSpy = jest.spyOn(SharedUtils, 'packageTypeToTrackCategory').mockReturnValue(category);
+ wrapper.setData({ itemToBeDeleted: { package_type: 'conan' } });
+ });
+
+ it('tracking category calls packageTypeToTrackCategory', () => {
+ expect(wrapper.vm.tracking.category).toBe(category);
+ expect(utilSpy).toHaveBeenCalledWith('conan');
+ });
+
+ it('deleteItemConfirmation calls event', () => {
+ wrapper.vm.deleteItemConfirmation();
+ expect(eventSpy).toHaveBeenCalledWith(
+ category,
+ TrackingActions.DELETE_PACKAGE,
+ expect.any(Object),
+ );
+ });
+ });
+});
diff --git a/spec/frontend/packages/list/components/packages_sort_spec.js b/spec/frontend/packages/list/components/packages_sort_spec.js
new file mode 100644
index 00000000000..ff3e8e19413
--- /dev/null
+++ b/spec/frontend/packages/list/components/packages_sort_spec.js
@@ -0,0 +1,92 @@
+import Vuex from 'vuex';
+import { GlSorting } from '@gitlab/ui';
+import { mount, createLocalVue } from '@vue/test-utils';
+import stubChildren from 'helpers/stub_children';
+import PackagesSort from '~/packages/list/components/packages_sort.vue';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('packages_sort', () => {
+ let wrapper;
+ let store;
+ let sorting;
+ let sortingItems;
+
+ const GlSortingItem = { name: 'sorting-item-stub', template: '<div><slot></slot></div>' };
+
+ const findPackageListSorting = () => wrapper.find(GlSorting);
+ const findSortingItems = () => wrapper.findAll(GlSortingItem);
+
+ const createStore = isGroupPage => {
+ const state = {
+ config: {
+ isGroupPage,
+ },
+ sorting: {
+ orderBy: 'version',
+ sort: 'desc',
+ },
+ };
+ store = new Vuex.Store({
+ state,
+ });
+ store.dispatch = jest.fn();
+ };
+
+ const mountComponent = (isGroupPage = false) => {
+ createStore(isGroupPage);
+
+ wrapper = mount(PackagesSort, {
+ localVue,
+ store,
+ stubs: {
+ ...stubChildren(PackagesSort),
+ GlSortingItem,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('when is in projects', () => {
+ beforeEach(() => {
+ mountComponent();
+ sorting = findPackageListSorting();
+ sortingItems = findSortingItems();
+ });
+
+ it('has all the sortable items', () => {
+ expect(sortingItems).toHaveLength(wrapper.vm.sortableFields.length);
+ });
+
+ it('on sort change set sorting in vuex and emit event', () => {
+ sorting.vm.$emit('sortDirectionChange');
+ expect(store.dispatch).toHaveBeenCalledWith('setSorting', { sort: 'asc' });
+ expect(wrapper.emitted('sort:changed')).toBeTruthy();
+ });
+
+ it('on sort item click set sorting and emit event', () => {
+ const item = sortingItems.at(0);
+ const { orderBy } = wrapper.vm.sortableFields[0];
+ item.vm.$emit('click');
+ expect(store.dispatch).toHaveBeenCalledWith('setSorting', { orderBy });
+ expect(wrapper.emitted('sort:changed')).toBeTruthy();
+ });
+ });
+
+ describe('when is in group', () => {
+ beforeEach(() => {
+ mountComponent(true);
+ sorting = findPackageListSorting();
+ sortingItems = findSortingItems();
+ });
+
+ it('has all the sortable items', () => {
+ expect(sortingItems).toHaveLength(wrapper.vm.sortableFields.length);
+ });
+ });
+});
diff --git a/spec/frontend/packages/list/stores/actions_spec.js b/spec/frontend/packages/list/stores/actions_spec.js
new file mode 100644
index 00000000000..faa629cc01f
--- /dev/null
+++ b/spec/frontend/packages/list/stores/actions_spec.js
@@ -0,0 +1,240 @@
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import Api from '~/api';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
+import * as actions from '~/packages/list/stores/actions';
+import * as types from '~/packages/list/stores/mutation_types';
+import { MISSING_DELETE_PATH_ERROR, DELETE_PACKAGE_ERROR_MESSAGE } from '~/packages/list/constants';
+
+jest.mock('~/flash.js');
+jest.mock('~/api.js');
+
+describe('Actions Package list store', () => {
+ const headers = 'bar';
+ let mock;
+
+ beforeEach(() => {
+ Api.projectPackages = jest.fn().mockResolvedValue({ data: 'foo', headers });
+ Api.groupPackages = jest.fn().mockResolvedValue({ data: 'baz', headers });
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('requestPackagesList', () => {
+ const sorting = {
+ sort: 'asc',
+ orderBy: 'version',
+ };
+ it('should fetch the project packages list when isGroupPage is false', done => {
+ testAction(
+ actions.requestPackagesList,
+ undefined,
+ { config: { isGroupPage: false, resourceId: 1 }, sorting },
+ [],
+ [
+ { type: 'setLoading', payload: true },
+ { type: 'receivePackagesListSuccess', payload: { data: 'foo', headers } },
+ { type: 'setLoading', payload: false },
+ ],
+ () => {
+ expect(Api.projectPackages).toHaveBeenCalledWith(1, {
+ params: { page: 1, per_page: 20, sort: sorting.sort, order_by: sorting.orderBy },
+ });
+ done();
+ },
+ );
+ });
+
+ it('should fetch the group packages list when isGroupPage is true', done => {
+ testAction(
+ actions.requestPackagesList,
+ undefined,
+ { config: { isGroupPage: true, resourceId: 2 }, sorting },
+ [],
+ [
+ { type: 'setLoading', payload: true },
+ { type: 'receivePackagesListSuccess', payload: { data: 'baz', headers } },
+ { type: 'setLoading', payload: false },
+ ],
+ () => {
+ expect(Api.groupPackages).toHaveBeenCalledWith(2, {
+ params: { page: 1, per_page: 20, sort: sorting.sort, order_by: sorting.orderBy },
+ });
+ done();
+ },
+ );
+ });
+
+ it('should fetch packages of a certain type when selectedType is present', done => {
+ const packageType = 'maven';
+
+ testAction(
+ actions.requestPackagesList,
+ undefined,
+ {
+ config: { isGroupPage: false, resourceId: 1 },
+ sorting,
+ selectedType: { type: packageType },
+ },
+ [],
+ [
+ { type: 'setLoading', payload: true },
+ { type: 'receivePackagesListSuccess', payload: { data: 'foo', headers } },
+ { type: 'setLoading', payload: false },
+ ],
+ () => {
+ expect(Api.projectPackages).toHaveBeenCalledWith(1, {
+ params: {
+ page: 1,
+ per_page: 20,
+ sort: sorting.sort,
+ order_by: sorting.orderBy,
+ package_type: packageType,
+ },
+ });
+ done();
+ },
+ );
+ });
+
+ it('should create flash on API error', done => {
+ Api.projectPackages = jest.fn().mockRejectedValue();
+ testAction(
+ actions.requestPackagesList,
+ undefined,
+ { config: { isGroupPage: false, resourceId: 2 }, sorting },
+ [],
+ [{ type: 'setLoading', payload: true }, { type: 'setLoading', payload: false }],
+ () => {
+ expect(createFlash).toHaveBeenCalled();
+ done();
+ },
+ );
+ });
+ });
+
+ describe('receivePackagesListSuccess', () => {
+ it('should set received packages', done => {
+ const data = 'foo';
+
+ testAction(
+ actions.receivePackagesListSuccess,
+ { data, headers },
+ null,
+ [
+ { type: types.SET_PACKAGE_LIST_SUCCESS, payload: data },
+ { type: types.SET_PAGINATION, payload: headers },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setInitialState', () => {
+ it('should commit setInitialState', done => {
+ testAction(
+ actions.setInitialState,
+ '1',
+ null,
+ [{ type: types.SET_INITIAL_STATE, payload: '1' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setLoading', () => {
+ it('should commit set main loading', done => {
+ testAction(
+ actions.setLoading,
+ true,
+ null,
+ [{ type: types.SET_MAIN_LOADING, payload: true }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('requestDeletePackage', () => {
+ const payload = {
+ _links: {
+ delete_api_path: 'foo',
+ },
+ };
+ it('should perform a delete operation on _links.delete_api_path', done => {
+ mock.onDelete(payload._links.delete_api_path).replyOnce(200);
+ Api.projectPackages = jest.fn().mockResolvedValue({ data: 'foo' });
+
+ testAction(
+ actions.requestDeletePackage,
+ payload,
+ { pagination: { page: 1 } },
+ [],
+ [
+ { type: 'setLoading', payload: true },
+ { type: 'requestPackagesList', payload: { page: 1 } },
+ ],
+ done,
+ );
+ });
+
+ it('should stop the loading and call create flash on api error', done => {
+ mock.onDelete(payload._links.delete_api_path).replyOnce(400);
+ testAction(
+ actions.requestDeletePackage,
+ payload,
+ null,
+ [],
+ [{ type: 'setLoading', payload: true }, { type: 'setLoading', payload: false }],
+ () => {
+ expect(createFlash).toHaveBeenCalled();
+ done();
+ },
+ );
+ });
+
+ it.each`
+ property | actionPayload
+ ${'_links'} | ${{}}
+ ${'delete_api_path'} | ${{ _links: {} }}
+ `('should reject and createFlash when $property is missing', ({ actionPayload }, done) => {
+ testAction(actions.requestDeletePackage, actionPayload, null, [], []).catch(e => {
+ expect(e).toEqual(new Error(MISSING_DELETE_PATH_ERROR));
+ expect(createFlash).toHaveBeenCalledWith(DELETE_PACKAGE_ERROR_MESSAGE);
+ done();
+ });
+ });
+ });
+
+ describe('setSorting', () => {
+ it('should commit SET_SORTING', done => {
+ testAction(
+ actions.setSorting,
+ 'foo',
+ null,
+ [{ type: types.SET_SORTING, payload: 'foo' }],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setFilter', () => {
+ it('should commit SET_FILTER', done => {
+ testAction(
+ actions.setFilter,
+ 'foo',
+ null,
+ [{ type: types.SET_FILTER, payload: 'foo' }],
+ [],
+ done,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/packages/list/stores/getters_spec.js b/spec/frontend/packages/list/stores/getters_spec.js
new file mode 100644
index 00000000000..080bbc21d9f
--- /dev/null
+++ b/spec/frontend/packages/list/stores/getters_spec.js
@@ -0,0 +1,36 @@
+import getList from '~/packages/list/stores/getters';
+import { packageList } from '../../mock_data';
+
+describe('Getters registry list store', () => {
+ let state;
+
+ const setState = ({ isGroupPage = false } = {}) => {
+ state = {
+ packages: packageList,
+ config: {
+ isGroupPage,
+ },
+ };
+ };
+
+ beforeEach(() => setState());
+
+ afterEach(() => {
+ state = null;
+ });
+
+ describe('getList', () => {
+ it('returns a list of packages', () => {
+ const result = getList(state);
+
+ expect(result).toHaveLength(packageList.length);
+ expect(result[0].name).toBe('Test package');
+ });
+
+ it('adds projectPathName', () => {
+ const result = getList(state);
+
+ expect(result[0].projectPathName).toMatchInlineSnapshot(`"foo / bar / baz"`);
+ });
+ });
+});
diff --git a/spec/frontend/packages/list/stores/mutations_spec.js b/spec/frontend/packages/list/stores/mutations_spec.js
new file mode 100644
index 00000000000..563a3dabbb3
--- /dev/null
+++ b/spec/frontend/packages/list/stores/mutations_spec.js
@@ -0,0 +1,95 @@
+import mutations from '~/packages/list/stores/mutations';
+import * as types from '~/packages/list/stores/mutation_types';
+import createState from '~/packages/list/stores/state';
+import * as commonUtils from '~/lib/utils/common_utils';
+import { npmPackage, mavenPackage } from '../../mock_data';
+
+describe('Mutations Registry Store', () => {
+ let mockState;
+ beforeEach(() => {
+ mockState = createState();
+ });
+
+ describe('SET_INITIAL_STATE', () => {
+ it('should set the initial state', () => {
+ const config = {
+ resourceId: '1',
+ pageType: 'groups',
+ userCanDelete: '',
+ emptyListIllustration: 'foo',
+ emptyListHelpUrl: 'baz',
+ comingSoonJson: '{ "project_path": "gitlab-org/gitlab-test" }',
+ };
+
+ const expectedState = {
+ ...mockState,
+ config: {
+ ...config,
+ isGroupPage: true,
+ canDestroyPackage: true,
+ },
+ };
+ mutations[types.SET_INITIAL_STATE](mockState, config);
+
+ expect(mockState.projectId).toEqual(expectedState.projectId);
+ });
+ });
+
+ describe('SET_PACKAGE_LIST_SUCCESS', () => {
+ it('should set a packages list', () => {
+ const payload = [npmPackage, mavenPackage];
+ const expectedState = { ...mockState, packages: payload };
+ mutations[types.SET_PACKAGE_LIST_SUCCESS](mockState, payload);
+
+ expect(mockState.packages).toEqual(expectedState.packages);
+ });
+ });
+
+ describe('SET_MAIN_LOADING', () => {
+ it('should set main loading', () => {
+ mutations[types.SET_MAIN_LOADING](mockState, true);
+
+ expect(mockState.isLoading).toEqual(true);
+ });
+ });
+
+ describe('SET_PAGINATION', () => {
+ const mockPagination = { perPage: 10, page: 1 };
+ beforeEach(() => {
+ commonUtils.normalizeHeaders = jest.fn().mockReturnValue('baz');
+ commonUtils.parseIntPagination = jest.fn().mockReturnValue(mockPagination);
+ });
+ it('should set a parsed pagination', () => {
+ mutations[types.SET_PAGINATION](mockState, 'foo');
+ expect(commonUtils.normalizeHeaders).toHaveBeenCalledWith('foo');
+ expect(commonUtils.parseIntPagination).toHaveBeenCalledWith('baz');
+ expect(mockState.pagination).toEqual(mockPagination);
+ });
+ });
+
+ describe('SET_SORTING', () => {
+ it('should merge the sorting object with sort value', () => {
+ mutations[types.SET_SORTING](mockState, { sort: 'desc' });
+ expect(mockState.sorting).toEqual({ ...mockState.sorting, sort: 'desc' });
+ });
+
+ it('should merge the sorting object with order_by value', () => {
+ mutations[types.SET_SORTING](mockState, { orderBy: 'foo' });
+ expect(mockState.sorting).toEqual({ ...mockState.sorting, orderBy: 'foo' });
+ });
+ });
+
+ describe('SET_SELECTED_TYPE', () => {
+ it('should set the selected type', () => {
+ mutations[types.SET_SELECTED_TYPE](mockState, { type: 'maven' });
+ expect(mockState.selectedType).toEqual({ type: 'maven' });
+ });
+ });
+
+ describe('SET_FILTER', () => {
+ it('should set the filter query', () => {
+ mutations[types.SET_FILTER](mockState, 'foo');
+ expect(mockState.filterQuery).toEqual('foo');
+ });
+ });
+});
diff --git a/spec/frontend/packages/list/utils_spec.js b/spec/frontend/packages/list/utils_spec.js
new file mode 100644
index 00000000000..5bcc3784752
--- /dev/null
+++ b/spec/frontend/packages/list/utils_spec.js
@@ -0,0 +1,39 @@
+import { getNewPaginationPage } from '~/packages/list/utils';
+
+describe('Packages list utils', () => {
+ describe('packageTypeDisplay', () => {
+ it('returns the current page when total items exceeds pagniation', () => {
+ expect(getNewPaginationPage(2, 20, 21)).toBe(2);
+ });
+
+ it('returns the previous page when total items is lower than or equal to pagination', () => {
+ expect(getNewPaginationPage(2, 20, 20)).toBe(1);
+ });
+
+ it('returns the first page when totalItems is lower than or equal to perPage', () => {
+ expect(getNewPaginationPage(4, 20, 20)).toBe(1);
+ });
+
+ describe('works when a different perPage is used', () => {
+ it('returns the current page', () => {
+ expect(getNewPaginationPage(2, 10, 11)).toBe(2);
+ });
+
+ it('returns the previous page', () => {
+ expect(getNewPaginationPage(2, 10, 10)).toBe(1);
+ });
+ });
+
+ describe.each`
+ currentPage | totalItems | expectedResult
+ ${1} | ${20} | ${1}
+ ${2} | ${20} | ${1}
+ ${3} | ${40} | ${2}
+ ${4} | ${60} | ${3}
+ `(`works across numerious pages`, ({ currentPage, totalItems, expectedResult }) => {
+ it(`when currentPage is ${currentPage} return to the previous page ${expectedResult}`, () => {
+ expect(getNewPaginationPage(currentPage, 20, totalItems)).toBe(expectedResult);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages/mock_data.js b/spec/frontend/packages/mock_data.js
new file mode 100644
index 00000000000..86205b0744c
--- /dev/null
+++ b/spec/frontend/packages/mock_data.js
@@ -0,0 +1,170 @@
+const _links = {
+ web_path: 'foo',
+ delete_api_path: 'bar',
+};
+
+export const mockPipelineInfo = {
+ id: 1,
+ ref: 'branch-name',
+ sha: 'sha-baz',
+ user: {
+ name: 'foo',
+ },
+ project: {
+ name: 'foo-project',
+ web_url: 'foo-project-link',
+ commit_url: 'foo-commit-link',
+ pipeline_url: 'foo-pipeline-link',
+ },
+ created_at: '2015-12-10',
+};
+
+export const mavenPackage = {
+ created_at: '2015-12-10',
+ id: 1,
+ maven_metadatum: {
+ app_group: 'com.test.app',
+ app_name: 'test-app',
+ app_version: '1.0-SNAPSHOT',
+ },
+ name: 'Test package',
+ package_type: 'maven',
+ project_path: 'foo/bar/baz',
+ project_id: 1,
+ updated_at: '2015-12-10',
+ version: '1.0.0',
+ _links,
+};
+
+export const mavenFiles = [
+ {
+ created_at: '2015-12-10',
+ file_name: 'File one',
+ id: 1,
+ size: 100,
+ download_path: '/-/package_files/1/download',
+ },
+ {
+ created_at: '2015-12-10',
+ file_name: 'File two',
+ id: 2,
+ size: 200,
+ download_path: '/-/package_files/2/download',
+ },
+];
+
+export const npmPackage = {
+ created_at: '2015-12-10',
+ id: 2,
+ name: '@Test/package',
+ package_type: 'npm',
+ project_path: 'foo/bar/baz',
+ project_id: 1,
+ updated_at: '2015-12-10',
+ version: '',
+ versions: [],
+ _links,
+ pipeline: mockPipelineInfo,
+};
+
+export const npmFiles = [
+ {
+ created_at: '2015-12-10',
+ file_name: '@test/test-package-1.0.0.tgz',
+ id: 2,
+ size: 200,
+ download_path: '/-/package_files/2/download',
+ },
+];
+
+export const conanPackage = {
+ conan_metadatum: {
+ package_channel: 'stable',
+ package_username: 'conan+conan-package',
+ },
+ created_at: '2015-12-10',
+ id: 3,
+ name: 'conan-package',
+ project_path: 'foo/bar/baz',
+ package_files: [],
+ package_type: 'conan',
+ project_id: 1,
+ recipe: 'conan-package/1.0.0@conan+conan-package/stable',
+ updated_at: '2015-12-10',
+ version: '1.0.0',
+ _links,
+};
+
+export const dependencyLinks = {
+ withoutFramework: { name: 'Moqi', version_pattern: '2.5.6' },
+ withoutVersion: { name: 'Castle.Core', version_pattern: '' },
+ fullLink: {
+ name: 'Test.Dependency',
+ version_pattern: '2.3.7',
+ target_framework: '.NETStandard2.0',
+ },
+ anotherFullLink: {
+ name: 'Newtonsoft.Json',
+ version_pattern: '12.0.3',
+ target_framework: '.NETStandard2.0',
+ },
+};
+
+export const nugetPackage = {
+ created_at: '2015-12-10',
+ id: 4,
+ name: 'NugetPackage1',
+ package_files: [],
+ package_type: 'nuget',
+ project_id: 1,
+ tags: [],
+ updated_at: '2015-12-10',
+ version: '1.0.0',
+ dependency_links: Object.values(dependencyLinks),
+ nuget_metadatum: {
+ icon_url: 'fake-icon',
+ project_url: 'project-foo-url',
+ license_url: 'license-foo-url',
+ },
+};
+
+export const pypiPackage = {
+ created_at: '2015-12-10',
+ id: 5,
+ name: 'PyPiPackage',
+ package_files: [],
+ package_type: 'pypi',
+ project_id: 1,
+ tags: [],
+ updated_at: '2015-12-10',
+ version: '1.0.0',
+};
+
+export const composerPackage = {
+ created_at: '2015-12-10',
+ id: 5,
+ name: 'ComposerPackage',
+ package_files: [],
+ package_type: 'composer',
+ project_id: 1,
+ tags: [],
+ updated_at: '2015-12-10',
+ version: '1.0.0',
+};
+
+export const mockTags = [
+ {
+ name: 'foo-1',
+ },
+ {
+ name: 'foo-2',
+ },
+ {
+ name: 'foo-3',
+ },
+ {
+ name: 'foo-4',
+ },
+];
+
+export const packageList = [mavenPackage, { ...npmPackage, tags: mockTags }, conanPackage];
diff --git a/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap b/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap
new file mode 100644
index 00000000000..eab8d7b67cc
--- /dev/null
+++ b/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap
@@ -0,0 +1,101 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`packages_list_row renders 1`] = `
+<div
+ class="gl-responsive-table-row"
+ data-qa-selector="packages-row"
+>
+ <div
+ class="table-section section-50 d-flex flex-md-column justify-content-between flex-wrap"
+ >
+ <div
+ class="d-flex align-items-center mr-2"
+ >
+ <gl-link-stub
+ class="text-dark font-weight-bold mb-md-1"
+ data-qa-selector="package_link"
+ href="foo"
+ >
+
+ Test package
+
+ </gl-link-stub>
+
+ <!---->
+ </div>
+
+ <div
+ class="d-flex text-secondary text-truncate mt-md-2"
+ >
+ <span>
+ 1.0.0
+ </span>
+
+ <!---->
+
+ <div
+ class="d-flex align-items-center"
+ >
+ <gl-icon-stub
+ class="text-secondary ml-2 mr-1"
+ name="review-list"
+ size="16"
+ />
+
+ <gl-link-stub
+ class="text-secondary"
+ data-testid="packages-row-project"
+ href="/foo/bar/baz"
+ >
+
+ </gl-link-stub>
+ </div>
+
+ <div
+ class="d-flex align-items-center"
+ data-testid="package-type"
+ >
+ <gl-icon-stub
+ class="text-secondary ml-2 mr-1"
+ name="package"
+ size="16"
+ />
+
+ <span>
+ Maven
+ </span>
+ </div>
+ </div>
+ </div>
+
+ <div
+ class="table-section d-flex flex-md-column justify-content-between align-items-md-end section-40"
+ >
+ <publish-method-stub
+ packageentity="[object Object]"
+ />
+
+ <div
+ class="text-secondary order-0 order-md-1 mt-md-2"
+ >
+ <gl-sprintf-stub
+ message="Created %{timestamp}"
+ />
+ </div>
+ </div>
+
+ <div
+ class="table-section section-10 d-flex justify-content-end"
+ >
+ <gl-button-stub
+ aria-label="Remove package"
+ category="primary"
+ data-testid="action-delete"
+ icon="remove"
+ size="medium"
+ title="Remove package"
+ variant="danger"
+ />
+ </div>
+</div>
+`;
diff --git a/spec/frontend/packages/shared/components/__snapshots__/publish_method_spec.js.snap b/spec/frontend/packages/shared/components/__snapshots__/publish_method_spec.js.snap
new file mode 100644
index 00000000000..5ecca63d41d
--- /dev/null
+++ b/spec/frontend/packages/shared/components/__snapshots__/publish_method_spec.js.snap
@@ -0,0 +1,39 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`publish_method renders 1`] = `
+<div
+ class="d-flex align-items-center text-secondary order-1 order-md-0 mb-md-1"
+>
+ <gl-icon-stub
+ class="mr-1"
+ name="git-merge"
+ size="16"
+ />
+
+ <strong
+ class="mr-1 text-dark"
+ >
+ branch-name
+ </strong>
+
+ <gl-icon-stub
+ class="mr-1"
+ name="commit"
+ size="16"
+ />
+
+ <gl-link-stub
+ class="mr-1"
+ href="../commit/sha-baz"
+ >
+ sha-baz
+ </gl-link-stub>
+
+ <clipboard-button-stub
+ cssclass="border-0 text-secondary py-0 px-1"
+ text="sha-baz"
+ title="Copy commit SHA"
+ tooltipplacement="top"
+ />
+</div>
+`;
diff --git a/spec/frontend/packages/shared/components/package_list_row_spec.js b/spec/frontend/packages/shared/components/package_list_row_spec.js
new file mode 100644
index 00000000000..c0ae972d519
--- /dev/null
+++ b/spec/frontend/packages/shared/components/package_list_row_spec.js
@@ -0,0 +1,106 @@
+import { mount, shallowMount } from '@vue/test-utils';
+import PackagesListRow from '~/packages/shared/components/package_list_row.vue';
+import PackageTags from '~/packages/shared/components/package_tags.vue';
+import { packageList } from '../../mock_data';
+
+describe('packages_list_row', () => {
+ let wrapper;
+ let store;
+
+ const [packageWithoutTags, packageWithTags] = packageList;
+
+ const findPackageTags = () => wrapper.find(PackageTags);
+ const findProjectLink = () => wrapper.find('[data-testid="packages-row-project"]');
+ const findDeleteButton = () => wrapper.find('[data-testid="action-delete"]');
+ const findPackageType = () => wrapper.find('[data-testid="package-type"]');
+
+ const mountComponent = ({
+ isGroup = false,
+ packageEntity = packageWithoutTags,
+ shallow = true,
+ showPackageType = true,
+ disableDelete = false,
+ } = {}) => {
+ const mountFunc = shallow ? shallowMount : mount;
+
+ wrapper = mountFunc(PackagesListRow, {
+ store,
+ propsData: {
+ packageLink: 'foo',
+ packageEntity,
+ isGroup,
+ showPackageType,
+ disableDelete,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('renders', () => {
+ mountComponent();
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('tags', () => {
+ it('renders package tags when a package has tags', () => {
+ mountComponent({ isGroup: false, packageEntity: packageWithTags });
+
+ expect(findPackageTags().exists()).toBe(true);
+ });
+
+ it('does not render when there are no tags', () => {
+ mountComponent();
+
+ expect(findPackageTags().exists()).toBe(false);
+ });
+ });
+
+ describe('when is is group', () => {
+ beforeEach(() => {
+ mountComponent({ isGroup: true });
+ });
+
+ it('has project field', () => {
+ expect(findProjectLink().exists()).toBe(true);
+ });
+ });
+
+ describe('showPackageType', () => {
+ it('shows the type when set', () => {
+ mountComponent();
+
+ expect(findPackageType().exists()).toBe(true);
+ });
+
+ it('does not show the type when not set', () => {
+ mountComponent({ showPackageType: false });
+
+ expect(findPackageType().exists()).toBe(false);
+ });
+ });
+
+ describe('deleteAvailable', () => {
+ it('does not show when not set', () => {
+ mountComponent({ disableDelete: true });
+
+ expect(findDeleteButton().exists()).toBe(false);
+ });
+ });
+
+ describe('delete event', () => {
+ beforeEach(() => mountComponent({ packageEntity: packageWithoutTags, shallow: false }));
+
+ it('emits the packageToDelete event when the delete button is clicked', () => {
+ findDeleteButton().trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('packageToDelete')).toBeTruthy();
+ expect(wrapper.emitted('packageToDelete')[0]).toEqual([packageWithoutTags]);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/packages/shared/components/package_tags_spec.js b/spec/frontend/packages/shared/components/package_tags_spec.js
new file mode 100644
index 00000000000..cc49a9a9244
--- /dev/null
+++ b/spec/frontend/packages/shared/components/package_tags_spec.js
@@ -0,0 +1,115 @@
+import { mount } from '@vue/test-utils';
+import PackageTags from '~/packages/shared/components/package_tags.vue';
+import { mockTags } from '../../mock_data';
+
+describe('PackageTags', () => {
+ let wrapper;
+
+ function createComponent(tags = [], props = {}) {
+ const propsData = {
+ tags,
+ ...props,
+ };
+
+ wrapper = mount(PackageTags, {
+ propsData,
+ });
+ }
+
+ const tagLabel = () => wrapper.find('[data-testid="tagLabel"]');
+ const tagBadges = () => wrapper.findAll('[data-testid="tagBadge"]');
+ const moreBadge = () => wrapper.find('[data-testid="moreBadge"]');
+
+ afterEach(() => {
+ if (wrapper) wrapper.destroy();
+ });
+
+ describe('tag label', () => {
+ it('shows the tag label by default', () => {
+ createComponent();
+
+ expect(tagLabel().exists()).toBe(true);
+ });
+
+ it('hides when hideLabel prop is set to true', () => {
+ createComponent(mockTags, { hideLabel: true });
+
+ expect(tagLabel().exists()).toBe(false);
+ });
+ });
+
+ it('renders the correct number of tags', () => {
+ createComponent(mockTags.slice(0, 2));
+
+ expect(tagBadges()).toHaveLength(2);
+ expect(moreBadge().exists()).toBe(false);
+ });
+
+ it('does not render more than the configured tagDisplayLimit', () => {
+ createComponent(mockTags);
+
+ expect(tagBadges()).toHaveLength(2);
+ });
+
+ it('renders the more tags badge if there are more than the configured limit', () => {
+ createComponent(mockTags);
+
+ expect(tagBadges()).toHaveLength(2);
+ expect(moreBadge().exists()).toBe(true);
+ expect(moreBadge().text()).toContain('2');
+ });
+
+ it('renders the configured tagDisplayLimit when set in props', () => {
+ createComponent(mockTags, { tagDisplayLimit: 1 });
+
+ expect(tagBadges()).toHaveLength(1);
+ expect(moreBadge().exists()).toBe(true);
+ expect(moreBadge().text()).toContain('3');
+ });
+
+ describe('tagBadgeStyle', () => {
+ const defaultStyle = ['badge', 'badge-info', 'gl-display-none'];
+
+ it('shows tag badge when there is only one', () => {
+ createComponent([mockTags[0]]);
+
+ const expectedStyle = [...defaultStyle, 'gl-display-flex', 'gl-ml-3'];
+
+ expect(
+ tagBadges()
+ .at(0)
+ .classes(),
+ ).toEqual(expect.arrayContaining(expectedStyle));
+ });
+
+ it('shows tag badge for medium or heigher resolutions', () => {
+ createComponent(mockTags);
+
+ const expectedStyle = [...defaultStyle, 'd-md-flex'];
+
+ expect(
+ tagBadges()
+ .at(1)
+ .classes(),
+ ).toEqual(expect.arrayContaining(expectedStyle));
+ });
+
+ it('correctly prepends left and appends right when there is more than one tag', () => {
+ createComponent(mockTags, {
+ tagDisplayLimit: 4,
+ });
+
+ const expectedStyleWithoutAppend = [...defaultStyle, 'd-md-flex'];
+ const expectedStyleWithAppend = [...expectedStyleWithoutAppend, 'gl-mr-2'];
+
+ const allBadges = tagBadges();
+
+ expect(allBadges.at(0).classes()).toEqual(
+ expect.arrayContaining([...expectedStyleWithAppend, 'gl-ml-3']),
+ );
+ expect(allBadges.at(1).classes()).toEqual(expect.arrayContaining(expectedStyleWithAppend));
+ expect(allBadges.at(2).classes()).toEqual(expect.arrayContaining(expectedStyleWithAppend));
+ expect(allBadges.at(3).classes()).toEqual(expect.arrayContaining(expectedStyleWithoutAppend));
+ });
+ });
+});
diff --git a/spec/frontend/packages/shared/components/packages_list_loader_spec.js b/spec/frontend/packages/shared/components/packages_list_loader_spec.js
new file mode 100644
index 00000000000..c8c2e2a4ba4
--- /dev/null
+++ b/spec/frontend/packages/shared/components/packages_list_loader_spec.js
@@ -0,0 +1,42 @@
+import { mount } from '@vue/test-utils';
+import PackagesListLoader from '~/packages/shared/components/packages_list_loader.vue';
+
+describe('PackagesListLoader', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = mount(PackagesListLoader, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ const getShapes = () => wrapper.vm.desktopShapes;
+ const findSquareButton = () => wrapper.find({ ref: 'button-loader' });
+
+ beforeEach(createComponent);
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('when used for projects', () => {
+ it('should return 5 rects with last one being a square', () => {
+ expect(getShapes()).toHaveLength(5);
+ expect(findSquareButton().exists()).toBe(true);
+ });
+ });
+
+ describe('when used for groups', () => {
+ beforeEach(() => {
+ createComponent({ isGroup: true });
+ });
+
+ it('should return 5 rects with no square', () => {
+ expect(getShapes()).toHaveLength(5);
+ expect(findSquareButton().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/packages/shared/components/publish_method_spec.js b/spec/frontend/packages/shared/components/publish_method_spec.js
new file mode 100644
index 00000000000..bb9287c1204
--- /dev/null
+++ b/spec/frontend/packages/shared/components/publish_method_spec.js
@@ -0,0 +1,50 @@
+import { shallowMount } from '@vue/test-utils';
+import PublishMethod from '~/packages/shared/components/publish_method.vue';
+import { packageList } from '../../mock_data';
+
+describe('publish_method', () => {
+ let wrapper;
+
+ const [packageWithoutPipeline, packageWithPipeline] = packageList;
+
+ const findPipelineRef = () => wrapper.find({ ref: 'pipeline-ref' });
+ const findPipelineSha = () => wrapper.find({ ref: 'pipeline-sha' });
+ const findManualPublish = () => wrapper.find({ ref: 'manual-ref' });
+
+ const mountComponent = (packageEntity = {}, isGroup = false) => {
+ wrapper = shallowMount(PublishMethod, {
+ propsData: {
+ packageEntity,
+ isGroup,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('renders', () => {
+ mountComponent(packageWithPipeline);
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ describe('pipeline information', () => {
+ it('displays branch and commit when pipeline info exists', () => {
+ mountComponent(packageWithPipeline);
+
+ expect(findPipelineRef().exists()).toBe(true);
+ expect(findPipelineSha().exists()).toBe(true);
+ });
+
+ it('does not show any pipeline details when no information exists', () => {
+ mountComponent(packageWithoutPipeline);
+
+ expect(findPipelineRef().exists()).toBe(false);
+ expect(findPipelineSha().exists()).toBe(false);
+ expect(findManualPublish().exists()).toBe(true);
+ expect(findManualPublish().text()).toBe('Manually Published');
+ });
+ });
+});
diff --git a/spec/frontend/packages/shared/utils_spec.js b/spec/frontend/packages/shared/utils_spec.js
new file mode 100644
index 00000000000..1fe90a4827f
--- /dev/null
+++ b/spec/frontend/packages/shared/utils_spec.js
@@ -0,0 +1,66 @@
+import {
+ packageTypeToTrackCategory,
+ beautifyPath,
+ getPackageTypeLabel,
+ getCommitLink,
+} from '~/packages/shared/utils';
+import { PackageType, TrackingCategories } from '~/packages/shared/constants';
+import { packageList } from '../mock_data';
+
+describe('Packages shared utils', () => {
+ describe('packageTypeToTrackCategory', () => {
+ it('prepend UI to package category', () => {
+ expect(packageTypeToTrackCategory()).toMatchInlineSnapshot(`"UI::undefined"`);
+ });
+
+ it.each(Object.keys(PackageType))('returns a correct category string for %s', packageKey => {
+ const packageName = PackageType[packageKey];
+ expect(packageTypeToTrackCategory(packageName)).toBe(
+ `UI::${TrackingCategories[packageName]}`,
+ );
+ });
+ });
+
+ describe('beautifyPath', () => {
+ it('returns a string with spaces around /', () => {
+ expect(beautifyPath('foo/bar')).toBe('foo / bar');
+ });
+ it('does not fail for empty string', () => {
+ expect(beautifyPath()).toBe('');
+ });
+ });
+
+ describe('getPackageTypeLabel', () => {
+ describe.each`
+ packageType | expectedResult
+ ${'conan'} | ${'Conan'}
+ ${'maven'} | ${'Maven'}
+ ${'npm'} | ${'NPM'}
+ ${'nuget'} | ${'NuGet'}
+ ${'pypi'} | ${'PyPi'}
+ ${'composer'} | ${'Composer'}
+ ${'foo'} | ${null}
+ `(`package type`, ({ packageType, expectedResult }) => {
+ it(`${packageType} should show as ${expectedResult}`, () => {
+ expect(getPackageTypeLabel(packageType)).toBe(expectedResult);
+ });
+ });
+ });
+
+ describe('getCommitLink', () => {
+ it('returns a relative link when isGroup is false', () => {
+ const link = getCommitLink(packageList[0], false);
+
+ expect(link).toContain('../commit');
+ });
+
+ describe('when isGroup is true', () => {
+ it('returns an absolute link matching project path', () => {
+ const mavenPackage = packageList[0];
+ const link = getCommitLink(mavenPackage, true);
+
+ expect(link).toContain(`/${mavenPackage.project_path}/commit`);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/pager_spec.js b/spec/frontend/pager_spec.js
index 47056c2804c..8b60f872bfd 100644
--- a/spec/frontend/pager_spec.js
+++ b/spec/frontend/pager_spec.js
@@ -1,9 +1,9 @@
import $ from 'jquery';
import MockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'jest/helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
import Pager from '~/pager';
import { removeParams } from '~/lib/utils/url_utility';
-import { TEST_HOST } from 'jest/helpers/test_constants';
jest.mock('~/lib/utils/url_utility', () => ({
removeParams: jest.fn().mockName('removeParams'),
diff --git a/spec/frontend/pages/admin/jobs/index/components/stop_jobs_modal_spec.js b/spec/frontend/pages/admin/jobs/index/components/stop_jobs_modal_spec.js
index fb7a07b7bc7..c662fb7ba4a 100644
--- a/spec/frontend/pages/admin/jobs/index/components/stop_jobs_modal_spec.js
+++ b/spec/frontend/pages/admin/jobs/index/components/stop_jobs_modal_spec.js
@@ -1,9 +1,9 @@
import Vue from 'vue';
-import { redirectTo } from '~/lib/utils/url_utility';
import mountComponent from 'helpers/vue_mount_component_helper';
+import { TEST_HOST } from 'jest/helpers/test_constants';
+import { redirectTo } from '~/lib/utils/url_utility';
import axios from '~/lib/utils/axios_utils';
import stopJobsModal from '~/pages/admin/jobs/index/components/stop_jobs_modal.vue';
-import { TEST_HOST } from 'jest/helpers/test_constants';
jest.mock('~/lib/utils/url_utility', () => ({
...jest.requireActual('~/lib/utils/url_utility'),
diff --git a/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap b/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
index 82589e5147c..fc37a545511 100644
--- a/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
+++ b/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
@@ -37,29 +37,35 @@ exports[`User Operation confirmation modal renders modal with form included 1`]
value=""
/>
</form>
- <gl-deprecated-button-stub
- size="md"
- variant="secondary"
+ <gl-button-stub
+ category="primary"
+ icon=""
+ size="medium"
+ variant="default"
>
Cancel
- </gl-deprecated-button-stub>
+ </gl-button-stub>
- <gl-deprecated-button-stub
+ <gl-button-stub
+ category="primary"
disabled="true"
- size="md"
+ icon=""
+ size="medium"
variant="warning"
>
secondaryAction
- </gl-deprecated-button-stub>
+ </gl-button-stub>
- <gl-deprecated-button-stub
+ <gl-button-stub
+ category="primary"
disabled="true"
- size="md"
+ icon=""
+ size="medium"
variant="danger"
>
action
- </gl-deprecated-button-stub>
+ </gl-button-stub>
</div>
`;
diff --git a/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js b/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js
index 16b0bd305cd..3efefa8137f 100644
--- a/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js
+++ b/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDeprecatedButton, GlFormInput } from '@gitlab/ui';
+import { GlButton, GlFormInput } from '@gitlab/ui';
import DeleteUserModal from '~/pages/admin/users/components/delete_user_modal.vue';
import ModalStub from './stubs/modal_stub';
@@ -13,7 +13,7 @@ describe('User Operation confirmation modal', () => {
const findButton = variant =>
wrapper
- .findAll(GlDeprecatedButton)
+ .findAll(GlButton)
.filter(w => w.attributes('variant') === variant)
.at(0);
const findForm = () => wrapper.find('form');
diff --git a/spec/frontend/pages/dashboard/projects/index/components/customize_homepage_banner_spec.js b/spec/frontend/pages/dashboard/projects/index/components/customize_homepage_banner_spec.js
new file mode 100644
index 00000000000..b3a297ac2c5
--- /dev/null
+++ b/spec/frontend/pages/dashboard/projects/index/components/customize_homepage_banner_spec.js
@@ -0,0 +1,50 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlBanner } from '@gitlab/ui';
+import MockAdapter from 'axios-mock-adapter';
+import CustomizeHomepageBanner from '~/pages/dashboard/projects/index/components/customize_homepage_banner.vue';
+import axios from '~/lib/utils/axios_utils';
+
+const svgPath = '/illustrations/background';
+const provide = {
+ svgPath,
+ preferencesBehaviorPath: 'some/behavior/path',
+ calloutsPath: 'call/out/path',
+ calloutsFeatureId: 'some-feature-id',
+};
+
+const createComponent = () => {
+ return shallowMount(CustomizeHomepageBanner, { provide });
+};
+
+describe('CustomizeHomepageBanner', () => {
+ let mockAxios;
+ let wrapper;
+
+ beforeEach(() => {
+ mockAxios = new MockAdapter(axios);
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ mockAxios.restore();
+ });
+
+ it('should render the banner when not dismissed', () => {
+ expect(wrapper.contains(GlBanner)).toBe(true);
+ });
+
+ it('should close the banner when dismiss is clicked', async () => {
+ mockAxios.onPost(provide.calloutsPath).replyOnce(200);
+ expect(wrapper.contains(GlBanner)).toBe(true);
+ wrapper.find(GlBanner).vm.$emit('close');
+
+ await wrapper.vm.$nextTick();
+ expect(wrapper.contains(GlBanner)).toBe(false);
+ });
+
+ it('includes the body text from options', () => {
+ expect(wrapper.html()).toContain(wrapper.vm.$options.i18n.body);
+ });
+});
diff --git a/spec/frontend/pages/labels/components/promote_label_modal_spec.js b/spec/frontend/pages/labels/components/promote_label_modal_spec.js
index d4aabcc02f4..1fa12cf1365 100644
--- a/spec/frontend/pages/labels/components/promote_label_modal_spec.js
+++ b/spec/frontend/pages/labels/components/promote_label_modal_spec.js
@@ -1,9 +1,9 @@
import Vue from 'vue';
import mountComponent from 'helpers/vue_mount_component_helper';
+import { TEST_HOST } from 'jest/helpers/test_constants';
import promoteLabelModal from '~/pages/projects/labels/components/promote_label_modal.vue';
import eventHub from '~/pages/projects/labels/event_hub';
import axios from '~/lib/utils/axios_utils';
-import { TEST_HOST } from 'jest/helpers/test_constants';
describe('Promote label modal', () => {
let vm;
diff --git a/spec/frontend/pages/milestones/shared/components/delete_milestone_modal_spec.js b/spec/frontend/pages/milestones/shared/components/delete_milestone_modal_spec.js
index c376cf02594..1d9a964c3c3 100644
--- a/spec/frontend/pages/milestones/shared/components/delete_milestone_modal_spec.js
+++ b/spec/frontend/pages/milestones/shared/components/delete_milestone_modal_spec.js
@@ -1,10 +1,10 @@
import Vue from 'vue';
-import { redirectTo } from '~/lib/utils/url_utility';
import mountComponent from 'helpers/vue_mount_component_helper';
+import { TEST_HOST } from 'jest/helpers/test_constants';
+import { redirectTo } from '~/lib/utils/url_utility';
import axios from '~/lib/utils/axios_utils';
import deleteMilestoneModal from '~/pages/milestones/shared/components/delete_milestone_modal.vue';
import eventHub from '~/pages/milestones/shared/event_hub';
-import { TEST_HOST } from 'jest/helpers/test_constants';
jest.mock('~/lib/utils/url_utility', () => ({
...jest.requireActual('~/lib/utils/url_utility'),
diff --git a/spec/frontend/pages/milestones/shared/components/promote_milestone_modal_spec.js b/spec/frontend/pages/milestones/shared/components/promote_milestone_modal_spec.js
index 87d32a67d47..e8a6e259837 100644
--- a/spec/frontend/pages/milestones/shared/components/promote_milestone_modal_spec.js
+++ b/spec/frontend/pages/milestones/shared/components/promote_milestone_modal_spec.js
@@ -1,9 +1,9 @@
import Vue from 'vue';
import mountComponent from 'helpers/vue_mount_component_helper';
+import { TEST_HOST } from 'jest/helpers/test_constants';
import promoteMilestoneModal from '~/pages/milestones/shared/components/promote_milestone_modal.vue';
import eventHub from '~/pages/milestones/shared/event_hub';
import axios from '~/lib/utils/axios_utils';
-import { TEST_HOST } from 'jest/helpers/test_constants';
describe('Promote milestone modal', () => {
let vm;
diff --git a/spec/frontend/pages/profiles/show/emoji_menu_spec.js b/spec/frontend/pages/profiles/show/emoji_menu_spec.js
index 00320fb4601..08fc0b92424 100644
--- a/spec/frontend/pages/profiles/show/emoji_menu_spec.js
+++ b/spec/frontend/pages/profiles/show/emoji_menu_spec.js
@@ -55,7 +55,7 @@ describe('EmojiMenu', () => {
});
});
- it('does not make an axios requst', done => {
+ it('does not make an axios request', done => {
jest.spyOn(axios, 'request').mockReturnValue();
emojiMenu.addAward(dummyVotesBlock(), dummyAwardUrl, dummyEmoji, false, () => {
diff --git a/spec/frontend/pages/projects/forks/new/components/fork_groups_list_spec.js b/spec/frontend/pages/projects/forks/new/components/fork_groups_list_spec.js
index 979dff78eba..2ec608569e3 100644
--- a/spec/frontend/pages/projects/forks/new/components/fork_groups_list_spec.js
+++ b/spec/frontend/pages/projects/forks/new/components/fork_groups_list_spec.js
@@ -1,14 +1,14 @@
import AxiosMockAdapter from 'axios-mock-adapter';
-import axios from '~/lib/utils/axios_utils';
import { shallowMount } from '@vue/test-utils';
import { GlLoadingIcon, GlSearchBoxByType } from '@gitlab/ui';
import { nextTick } from 'vue';
-import createFlash from '~/flash';
+import waitForPromises from 'helpers/wait_for_promises';
+import axios from '~/lib/utils/axios_utils';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import ForkGroupsList from '~/pages/projects/forks/new/components/fork_groups_list.vue';
import ForkGroupsListItem from '~/pages/projects/forks/new/components/fork_groups_list_item.vue';
-import waitForPromises from 'helpers/wait_for_promises';
-jest.mock('~/flash', () => jest.fn());
+jest.mock('~/flash');
describe('Fork groups list component', () => {
let wrapper;
diff --git a/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap b/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap
index 94089ea922b..211f4ea20f5 100644
--- a/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap
+++ b/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap
@@ -9,10 +9,10 @@ exports[`Code Coverage when fetching data is successful matches the snapshot 1`]
<!---->
- <gl-dropdown-stub
+ <gl-deprecated-dropdown-stub
text="rspec"
>
- <gl-dropdown-item-stub
+ <gl-deprecated-dropdown-item-stub
value="rspec"
>
<div
@@ -32,8 +32,8 @@ exports[`Code Coverage when fetching data is successful matches the snapshot 1`]
</span>
</div>
- </gl-dropdown-item-stub>
- <gl-dropdown-item-stub
+ </gl-deprecated-dropdown-item-stub>
+ <gl-deprecated-dropdown-item-stub
value="cypress"
>
<div
@@ -49,8 +49,8 @@ exports[`Code Coverage when fetching data is successful matches the snapshot 1`]
</span>
</div>
- </gl-dropdown-item-stub>
- <gl-dropdown-item-stub
+ </gl-deprecated-dropdown-item-stub>
+ <gl-deprecated-dropdown-item-stub
value="karma"
>
<div
@@ -66,8 +66,8 @@ exports[`Code Coverage when fetching data is successful matches the snapshot 1`]
</span>
</div>
- </gl-dropdown-item-stub>
- </gl-dropdown-stub>
+ </gl-deprecated-dropdown-item-stub>
+ </gl-deprecated-dropdown-stub>
</div>
<gl-area-chart-stub
diff --git a/spec/frontend/pages/projects/graphs/code_coverage_spec.js b/spec/frontend/pages/projects/graphs/code_coverage_spec.js
index 30c7ff78c6e..54a080fb62b 100644
--- a/spec/frontend/pages/projects/graphs/code_coverage_spec.js
+++ b/spec/frontend/pages/projects/graphs/code_coverage_spec.js
@@ -1,12 +1,12 @@
import MockAdapter from 'axios-mock-adapter';
import { shallowMount } from '@vue/test-utils';
-import { GlAlert, GlIcon, GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlAlert, GlIcon, GlDeprecatedDropdown, GlDeprecatedDropdownItem } from '@gitlab/ui';
import { GlAreaChart } from '@gitlab/ui/dist/charts';
+import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import CodeCoverage from '~/pages/projects/graphs/components/code_coverage.vue';
import { codeCoverageMockData, sortedDataByDates } from './mock_data';
-import waitForPromises from 'helpers/wait_for_promises';
import httpStatusCodes from '~/lib/utils/http_status';
describe('Code Coverage', () => {
@@ -17,7 +17,7 @@ describe('Code Coverage', () => {
const findAlert = () => wrapper.find(GlAlert);
const findAreaChart = () => wrapper.find(GlAreaChart);
- const findAllDropdownItems = () => wrapper.findAll(GlDropdownItem);
+ const findAllDropdownItems = () => wrapper.findAll(GlDeprecatedDropdownItem);
const findFirstDropdownItem = () => findAllDropdownItems().at(0);
const findSecondDropdownItem = () => findAllDropdownItems().at(1);
@@ -124,7 +124,7 @@ describe('Code Coverage', () => {
});
it('renders the dropdown with all custom names as options', () => {
- expect(wrapper.contains(GlDropdown)).toBeDefined();
+ expect(wrapper.contains(GlDeprecatedDropdown)).toBeDefined();
expect(findAllDropdownItems()).toHaveLength(codeCoverageMockData.length);
expect(findFirstDropdownItem().text()).toBe(codeCoverageMockData[0].group_name);
});
diff --git a/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js b/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
index 8917251d285..4c73225b54c 100644
--- a/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
+++ b/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
@@ -1,5 +1,5 @@
import $ from 'jquery';
-import GLDropdown from '~/gl_dropdown'; // eslint-disable-line no-unused-vars
+import '~/gl_dropdown';
import TimezoneDropdown, {
formatUtcOffset,
formatTimezone,
diff --git a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
index 1f7eec567b8..a50ceed5d09 100644
--- a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
+++ b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
@@ -6,6 +6,8 @@ import {
visibilityLevelDescriptions,
visibilityOptions,
} from '~/pages/projects/shared/permissions/constants';
+import projectFeatureSetting from '~/pages/projects/shared/permissions/components/project_feature_setting.vue';
+import projectFeatureToggle from '~/vue_shared/components/toggle_button.vue';
const defaultProps = {
currentSettings: {
@@ -65,7 +67,13 @@ describe('Settings Panel', () => {
return mountComponent({ ...extraProps, currentSettings: currentSettingsProps });
};
- const findLFSSettingsMessage = () => wrapper.find({ ref: 'git-lfs-settings' }).find('p');
+ const findLFSSettingsRow = () => wrapper.find({ ref: 'git-lfs-settings' });
+ const findLFSSettingsMessage = () => findLFSSettingsRow().find('p');
+ const findLFSFeatureToggle = () => findLFSSettingsRow().find(projectFeatureToggle);
+
+ const findRepositoryFeatureProjectRow = () => wrapper.find({ ref: 'repository-settings' });
+ const findRepositoryFeatureSetting = () =>
+ findRepositoryFeatureProjectRow().find(projectFeatureSetting);
beforeEach(() => {
wrapper = mountComponent();
@@ -154,7 +162,7 @@ describe('Settings Panel', () => {
it('should set the repository help text when the visibility level is set to private', () => {
wrapper = overrideCurrentSettings({ visibilityLevel: visibilityOptions.PRIVATE });
- expect(wrapper.find({ ref: 'repository-settings' }).props().helpText).toEqual(
+ expect(findRepositoryFeatureProjectRow().props().helpText).toBe(
'View and edit files in this project',
);
});
@@ -162,7 +170,7 @@ describe('Settings Panel', () => {
it('should set the repository help text with a read access warning when the visibility level is set to non-private', () => {
wrapper = overrideCurrentSettings({ visibilityLevel: visibilityOptions.PUBLIC });
- expect(wrapper.find({ ref: 'repository-settings' }).props().helpText).toEqual(
+ expect(findRepositoryFeatureProjectRow().props().helpText).toBe(
'View and edit files in this project. Non-project members will only have read access',
);
});
@@ -176,7 +184,7 @@ describe('Settings Panel', () => {
wrapper
.find('[name="project[project_feature_attributes][merge_requests_access_level]"]')
.props().disabledInput,
- ).toEqual(false);
+ ).toBe(false);
});
it('should disable the merge requests access level input when the repository is disabled', () => {
@@ -186,7 +194,7 @@ describe('Settings Panel', () => {
wrapper
.find('[name="project[project_feature_attributes][merge_requests_access_level]"]')
.props().disabledInput,
- ).toEqual(true);
+ ).toBe(true);
});
});
@@ -197,7 +205,7 @@ describe('Settings Panel', () => {
expect(
wrapper.find('[name="project[project_feature_attributes][forking_access_level]"]').props()
.disabledInput,
- ).toEqual(false);
+ ).toBe(false);
});
it('should disable the forking access level input when the repository is disabled', () => {
@@ -206,7 +214,7 @@ describe('Settings Panel', () => {
expect(
wrapper.find('[name="project[project_feature_attributes][forking_access_level]"]').props()
.disabledInput,
- ).toEqual(true);
+ ).toBe(true);
});
});
@@ -217,7 +225,7 @@ describe('Settings Panel', () => {
expect(
wrapper.find('[name="project[project_feature_attributes][builds_access_level]"]').props()
.disabledInput,
- ).toEqual(false);
+ ).toBe(false);
});
it('should disable the builds access level input when the repository is disabled', () => {
@@ -226,7 +234,7 @@ describe('Settings Panel', () => {
expect(
wrapper.find('[name="project[project_feature_attributes][builds_access_level]"]').props()
.disabledInput,
- ).toEqual(true);
+ ).toBe(true);
});
});
@@ -287,7 +295,7 @@ describe('Settings Panel', () => {
expect(
wrapper.find('[name="project[container_registry_enabled]"]').props().disabledInput,
- ).toEqual(false);
+ ).toBe(false);
});
it('should disable the container registry input when the repository is disabled', () => {
@@ -298,7 +306,7 @@ describe('Settings Panel', () => {
expect(
wrapper.find('[name="project[container_registry_enabled]"]').props().disabledInput,
- ).toEqual(true);
+ ).toBe(true);
});
});
@@ -307,7 +315,7 @@ describe('Settings Panel', () => {
wrapper.setProps({ lfsAvailable: true });
return wrapper.vm.$nextTick(() => {
- expect(wrapper.find({ ref: 'git-lfs-settings' }).exists()).toEqual(true);
+ expect(findLFSSettingsRow().exists()).toBe(true);
});
});
@@ -315,14 +323,12 @@ describe('Settings Panel', () => {
wrapper.setProps({ lfsAvailable: false });
return wrapper.vm.$nextTick(() => {
- expect(wrapper.find({ ref: 'git-lfs-settings' }).exists()).toEqual(false);
+ expect(findLFSSettingsRow().exists()).toBe(false);
});
});
it('should set the LFS settings help path', () => {
- expect(wrapper.find({ ref: 'git-lfs-settings' }).props().helpPath).toBe(
- defaultProps.lfsHelpPath,
- );
+ expect(findLFSSettingsRow().props().helpPath).toBe(defaultProps.lfsHelpPath);
});
it('should enable the LFS input when the repository is enabled', () => {
@@ -331,7 +337,7 @@ describe('Settings Panel', () => {
{ lfsAvailable: true },
);
- expect(wrapper.find('[name="project[lfs_enabled]"]').props().disabledInput).toEqual(false);
+ expect(findLFSFeatureToggle().props().disabledInput).toBe(false);
});
it('should disable the LFS input when the repository is disabled', () => {
@@ -340,7 +346,27 @@ describe('Settings Panel', () => {
{ lfsAvailable: true },
);
- expect(wrapper.find('[name="project[lfs_enabled]"]').props().disabledInput).toEqual(true);
+ expect(findLFSFeatureToggle().props().disabledInput).toBe(true);
+ });
+
+ it('should not change lfsEnabled when disabling the repository', async () => {
+ // mount over shallowMount, because we are aiming to test rendered state of toggle
+ wrapper = mountComponent({ currentSettings: { lfsEnabled: true } }, mount);
+
+ const repositoryFeatureToggleButton = findRepositoryFeatureSetting().find('button');
+ const lfsFeatureToggleButton = findLFSFeatureToggle().find('button');
+ const isToggleButtonChecked = toggleButton => toggleButton.classes('is-checked');
+
+ // assert the initial state
+ expect(isToggleButtonChecked(lfsFeatureToggleButton)).toBe(true);
+ expect(isToggleButtonChecked(repositoryFeatureToggleButton)).toBe(true);
+
+ repositoryFeatureToggleButton.trigger('click');
+ await wrapper.vm.$nextTick();
+
+ expect(isToggleButtonChecked(repositoryFeatureToggleButton)).toBe(false);
+ // LFS toggle should still be checked
+ expect(isToggleButtonChecked(lfsFeatureToggleButton)).toBe(true);
});
describe.each`
@@ -364,14 +390,14 @@ describe('Settings Panel', () => {
expect(message.text()).toContain(
'LFS objects from this repository are still available to forks',
);
- expect(link.text()).toEqual('How do I remove them?');
- expect(link.attributes('href')).toEqual(
+ expect(link.text()).toBe('How do I remove them?');
+ expect(link.attributes('href')).toBe(
'/help/topics/git/lfs/index#removing-objects-from-lfs',
);
});
} else {
it('does not show warning message', () => {
- expect(findLFSSettingsMessage().exists()).toEqual(false);
+ expect(findLFSSettingsMessage().exists()).toBe(false);
});
}
},
@@ -383,7 +409,7 @@ describe('Settings Panel', () => {
wrapper.setProps({ packagesAvailable: true });
return wrapper.vm.$nextTick(() => {
- expect(wrapper.find({ ref: 'package-settings' }).exists()).toEqual(true);
+ expect(wrapper.find({ ref: 'package-settings' }).exists()).toBe(true);
});
});
@@ -391,7 +417,7 @@ describe('Settings Panel', () => {
wrapper.setProps({ packagesAvailable: false });
return wrapper.vm.$nextTick(() => {
- expect(wrapper.find({ ref: 'package-settings' }).exists()).toEqual(false);
+ expect(wrapper.find({ ref: 'package-settings' }).exists()).toBe(false);
});
});
@@ -411,9 +437,7 @@ describe('Settings Panel', () => {
{ packagesAvailable: true },
);
- expect(wrapper.find('[name="project[packages_enabled]"]').props().disabledInput).toEqual(
- false,
- );
+ expect(wrapper.find('[name="project[packages_enabled]"]').props().disabledInput).toBe(false);
});
it('should disable the packages input when the repository is disabled', () => {
@@ -422,9 +446,7 @@ describe('Settings Panel', () => {
{ packagesAvailable: true },
);
- expect(wrapper.find('[name="project[packages_enabled]"]').props().disabledInput).toEqual(
- true,
- );
+ expect(wrapper.find('[name="project[packages_enabled]"]').props().disabledInput).toBe(true);
});
});
@@ -503,7 +525,7 @@ describe('Settings Panel', () => {
});
it('should contain help text', () => {
- expect(wrapper.find({ ref: 'metrics-visibility-settings' }).props().helpText).toEqual(
+ expect(wrapper.find({ ref: 'metrics-visibility-settings' }).props().helpText).toBe(
'With Metrics Dashboard you can visualize this project performance metrics',
);
});
@@ -514,7 +536,7 @@ describe('Settings Panel', () => {
const metricsSettingsRow = wrapper.find({ ref: 'metrics-visibility-settings' });
expect(wrapper.vm.metricsOptionsDropdownEnabled).toBe(true);
- expect(metricsSettingsRow.find('select').attributes('disabled')).toEqual('disabled');
+ expect(metricsSettingsRow.find('select').attributes('disabled')).toBe('disabled');
});
});
});
diff --git a/spec/frontend/pages/sessions/new/signin_tabs_memoizer_spec.js b/spec/frontend/pages/sessions/new/signin_tabs_memoizer_spec.js
index 738498edbd3..589ec0ae047 100644
--- a/spec/frontend/pages/sessions/new/signin_tabs_memoizer_spec.js
+++ b/spec/frontend/pages/sessions/new/signin_tabs_memoizer_spec.js
@@ -1,8 +1,6 @@
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import AccessorUtilities from '~/lib/utils/accessor';
import SigninTabsMemoizer from '~/pages/sessions/new/signin_tabs_memoizer';
-import trackData from '~/pages/sessions/new/index';
-import Tracking from '~/tracking';
-import { useLocalStorageSpy } from 'helpers/local_storage_helper';
useLocalStorageSpy();
@@ -99,50 +97,6 @@ describe('SigninTabsMemoizer', () => {
});
});
- describe('trackData', () => {
- beforeEach(() => {
- jest.spyOn(Tracking, 'event').mockImplementation(() => {});
- });
-
- describe('with tracking data', () => {
- beforeEach(() => {
- gon.tracking_data = {
- category: 'Growth::Acquisition::Experiment::SignUpFlow',
- action: 'start',
- label: 'uuid',
- property: 'control_group',
- };
- trackData();
- });
-
- it('should track data when the "click" event of the register tab is triggered', () => {
- document.querySelector('a[href="#register-pane"]').click();
-
- expect(Tracking.event).toHaveBeenCalledWith(
- 'Growth::Acquisition::Experiment::SignUpFlow',
- 'start',
- {
- label: 'uuid',
- property: 'control_group',
- },
- );
- });
- });
-
- describe('without tracking data', () => {
- beforeEach(() => {
- gon.tracking_data = undefined;
- trackData();
- });
-
- it('should not track data when the "click" event of the register tab is triggered', () => {
- document.querySelector('a[href="#register-pane"]').click();
-
- expect(Tracking.event).not.toHaveBeenCalled();
- });
- });
- });
-
describe('saveData', () => {
beforeEach(() => {
memo = {
diff --git a/spec/frontend/pdf/page_spec.js b/spec/frontend/pdf/page_spec.js
index 4e24b0696ec..f9d94781265 100644
--- a/spec/frontend/pdf/page_spec.js
+++ b/spec/frontend/pdf/page_spec.js
@@ -1,6 +1,6 @@
import Vue from 'vue';
-import PageComponent from '~/pdf/page/index.vue';
import mountComponent from 'helpers/vue_mount_component_helper';
+import PageComponent from '~/pdf/page/index.vue';
jest.mock('pdfjs-dist/webpack', () => {
return { default: jest.requireActual('pdfjs-dist/build/pdf') };
diff --git a/spec/frontend/performance_bar/components/detailed_metric_spec.js b/spec/frontend/performance_bar/components/detailed_metric_spec.js
index f040dcfdea4..b9dc4c9588c 100644
--- a/spec/frontend/performance_bar/components/detailed_metric_spec.js
+++ b/spec/frontend/performance_bar/components/detailed_metric_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
+import { trimText } from 'helpers/text_helper';
import DetailedMetric from '~/performance_bar/components/detailed_metric.vue';
import RequestWarning from '~/performance_bar/components/request_warning.vue';
-import { trimText } from 'helpers/text_helper';
describe('detailedMetric', () => {
let wrapper;
diff --git a/spec/frontend/persistent_user_callout_spec.js b/spec/frontend/persistent_user_callout_spec.js
index 97985ba3a07..578fd8d836a 100644
--- a/spec/frontend/persistent_user_callout_spec.js
+++ b/spec/frontend/persistent_user_callout_spec.js
@@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import PersistentUserCallout from '~/persistent_user_callout';
-import Flash from '~/flash';
+import { deprecatedCreateFlash as Flash } from '~/flash';
jest.mock('~/flash');
diff --git a/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js b/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
new file mode 100644
index 00000000000..d1e6b6b938a
--- /dev/null
+++ b/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
@@ -0,0 +1,108 @@
+import { mount, shallowMount } from '@vue/test-utils';
+import { GlNewDropdown, GlNewDropdownItem, GlForm } from '@gitlab/ui';
+import Api from '~/api';
+import PipelineNewForm from '~/pipeline_new/components/pipeline_new_form.vue';
+import { mockRefs, mockParams, mockPostParams, mockProjectId } from '../mock_data';
+
+describe('Pipeline New Form', () => {
+ let wrapper;
+
+ const dummySubmitEvent = {
+ preventDefault() {},
+ };
+
+ const findForm = () => wrapper.find(GlForm);
+ const findDropdown = () => wrapper.find(GlNewDropdown);
+ const findDropdownItems = () => wrapper.findAll(GlNewDropdownItem);
+ const findVariableRows = () => wrapper.findAll('[data-testid="ci-variable-row"]');
+ const findRemoveIcons = () => wrapper.findAll('[data-testid="remove-ci-variable-row"]');
+ const findKeyInputs = () => wrapper.findAll('[data-testid="pipeline-form-ci-variable-key"]');
+
+ const createComponent = (term = '', props = {}, method = shallowMount) => {
+ wrapper = method(PipelineNewForm, {
+ propsData: {
+ projectId: mockProjectId,
+ pipelinesPath: '',
+ refs: mockRefs,
+ defaultBranch: 'master',
+ settingsLink: '',
+ ...props,
+ },
+ data() {
+ return {
+ searchTerm: term,
+ };
+ },
+ });
+ };
+
+ beforeEach(() => {
+ jest.spyOn(Api, 'createPipeline').mockResolvedValue({ data: { web_url: '/' } });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('Dropdown with branches and tags', () => {
+ it('displays dropdown with all branches and tags', () => {
+ createComponent();
+ expect(findDropdownItems().length).toBe(mockRefs.length);
+ });
+
+ it('when user enters search term the list is filtered', () => {
+ createComponent('master');
+
+ expect(findDropdownItems().length).toBe(1);
+ expect(
+ findDropdownItems()
+ .at(0)
+ .text(),
+ ).toBe('master');
+ });
+ });
+
+ describe('Form', () => {
+ beforeEach(() => {
+ createComponent('', mockParams, mount);
+ });
+ it('displays the correct values for the provided query params', () => {
+ expect(findDropdown().props('text')).toBe('tag-1');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findVariableRows().length).toBe(3);
+ });
+ });
+
+ it('does not display remove icon for last row', () => {
+ expect(findRemoveIcons().length).toBe(2);
+ });
+
+ it('removes ci variable row on remove icon button click', () => {
+ findRemoveIcons()
+ .at(1)
+ .trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findVariableRows().length).toBe(2);
+ });
+ });
+
+ it('creates a pipeline on submit', () => {
+ findForm().vm.$emit('submit', dummySubmitEvent);
+
+ expect(Api.createPipeline).toHaveBeenCalledWith(mockProjectId, mockPostParams);
+ });
+
+ it('creates blank variable on input change event', () => {
+ findKeyInputs()
+ .at(2)
+ .trigger('change');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findVariableRows().length).toBe(4);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_new/mock_data.js b/spec/frontend/pipeline_new/mock_data.js
new file mode 100644
index 00000000000..55ec1fb5afc
--- /dev/null
+++ b/spec/frontend/pipeline_new/mock_data.js
@@ -0,0 +1,21 @@
+export const mockRefs = ['master', 'branch-1', 'tag-1'];
+
+export const mockParams = {
+ refParam: 'tag-1',
+ variableParams: {
+ test_var: 'test_var_val',
+ },
+ fileParams: {
+ test_file: 'test_file_val',
+ },
+};
+
+export const mockProjectId = '21';
+
+export const mockPostParams = {
+ ref: 'tag-1',
+ variables: [
+ { key: 'test_var', value: 'test_var_val', variable_type: 'env_var' },
+ { key: 'test_file', value: 'test_file_val', variable_type: 'file' },
+ ],
+};
diff --git a/spec/frontend/pipelines/components/dag/dag_spec.js b/spec/frontend/pipelines/components/dag/dag_spec.js
index 7dea6d819b9..989f6c17197 100644
--- a/spec/frontend/pipelines/components/dag/dag_spec.js
+++ b/spec/frontend/pipelines/components/dag/dag_spec.js
@@ -1,7 +1,4 @@
import { mount, shallowMount } from '@vue/test-utils';
-import MockAdapter from 'axios-mock-adapter';
-import axios from '~/lib/utils/axios_utils';
-import waitForPromises from 'helpers/wait_for_promises';
import { GlAlert, GlEmptyState } from '@gitlab/ui';
import Dag from '~/pipelines/components/dag/dag.vue';
import DagGraph from '~/pipelines/components/dag/dag_graph.vue';
@@ -11,13 +8,11 @@ import {
ADD_NOTE,
REMOVE_NOTE,
REPLACE_NOTES,
- DEFAULT,
PARSE_FAILURE,
- LOAD_FAILURE,
UNSUPPORTED_DATA,
} from '~/pipelines/components/dag//constants';
import {
- mockBaseData,
+ mockParsedGraphQLNodes,
tooSmallGraph,
unparseableGraph,
graphWithoutDependencies,
@@ -27,7 +22,6 @@ import {
describe('Pipeline DAG graph wrapper', () => {
let wrapper;
- let mock;
const getAlert = () => wrapper.find(GlAlert);
const getAllAlerts = () => wrapper.findAll(GlAlert);
const getGraph = () => wrapper.find(DagGraph);
@@ -35,45 +29,46 @@ describe('Pipeline DAG graph wrapper', () => {
const getErrorText = type => wrapper.vm.$options.errorTexts[type];
const getEmptyState = () => wrapper.find(GlEmptyState);
- const dataPath = '/root/test/pipelines/90/dag.json';
-
- const createComponent = (propsData = {}, method = shallowMount) => {
+ const createComponent = ({
+ graphData = mockParsedGraphQLNodes,
+ provideOverride = {},
+ method = shallowMount,
+ } = {}) => {
if (wrapper?.destroy) {
wrapper.destroy();
}
wrapper = method(Dag, {
- propsData: {
+ provide: {
+ pipelineProjectPath: 'root/abc-dag',
+ pipelineIid: '1',
emptySvgPath: '/my-svg',
dagDocPath: '/my-doc',
- ...propsData,
+ ...provideOverride,
},
data() {
return {
+ graphData,
showFailureAlert: false,
};
},
});
};
- beforeEach(() => {
- mock = new MockAdapter(axios);
- });
-
afterEach(() => {
- mock.restore();
wrapper.destroy();
wrapper = null;
});
- describe('when there is no dataUrl', () => {
+ describe('when a query argument is undefined', () => {
beforeEach(() => {
- createComponent({ graphUrl: undefined });
+ createComponent({
+ provideOverride: { pipelineProjectPath: undefined },
+ graphData: null,
+ });
});
- it('shows the DEFAULT alert and not the graph', () => {
- expect(getAlert().exists()).toBe(true);
- expect(getAlert().text()).toBe(getErrorText(DEFAULT));
+ it('does not render the graph', async () => {
expect(getGraph().exists()).toBe(false);
});
@@ -82,36 +77,12 @@ describe('Pipeline DAG graph wrapper', () => {
});
});
- describe('when there is a dataUrl', () => {
- describe('but the data fetch fails', () => {
+ describe('when all query variables are defined', () => {
+ describe('but the parse fails', () => {
beforeEach(async () => {
- mock.onGet(dataPath).replyOnce(500);
- createComponent({ graphUrl: dataPath });
-
- await wrapper.vm.$nextTick();
-
- return waitForPromises();
- });
-
- it('shows the LOAD_FAILURE alert and not the graph', () => {
- expect(getAlert().exists()).toBe(true);
- expect(getAlert().text()).toBe(getErrorText(LOAD_FAILURE));
- expect(getGraph().exists()).toBe(false);
- });
-
- it('does not render the empty state', () => {
- expect(getEmptyState().exists()).toBe(false);
- });
- });
-
- describe('the data fetch succeeds but the parse fails', () => {
- beforeEach(async () => {
- mock.onGet(dataPath).replyOnce(200, unparseableGraph);
- createComponent({ graphUrl: dataPath });
-
- await wrapper.vm.$nextTick();
-
- return waitForPromises();
+ createComponent({
+ graphData: unparseableGraph,
+ });
});
it('shows the PARSE_FAILURE alert and not the graph', () => {
@@ -125,19 +96,12 @@ describe('Pipeline DAG graph wrapper', () => {
});
});
- describe('and the data fetch and parse succeeds', () => {
+ describe('parse succeeds', () => {
beforeEach(async () => {
- mock.onGet(dataPath).replyOnce(200, mockBaseData);
- createComponent({ graphUrl: dataPath }, mount);
-
- await wrapper.vm.$nextTick();
-
- return waitForPromises();
+ createComponent({ method: mount });
});
- it('shows the graph and the beta alert', () => {
- expect(getAllAlerts().length).toBe(1);
- expect(getAlert().text()).toContain('This feature is currently in beta.');
+ it('shows the graph', () => {
expect(getGraph().exists()).toBe(true);
});
@@ -146,14 +110,11 @@ describe('Pipeline DAG graph wrapper', () => {
});
});
- describe('the data fetch and parse succeeds, but the resulting graph is too small', () => {
+ describe('parse succeeds, but the resulting graph is too small', () => {
beforeEach(async () => {
- mock.onGet(dataPath).replyOnce(200, tooSmallGraph);
- createComponent({ graphUrl: dataPath });
-
- await wrapper.vm.$nextTick();
-
- return waitForPromises();
+ createComponent({
+ graphData: tooSmallGraph,
+ });
});
it('shows the UNSUPPORTED_DATA alert and not the graph', () => {
@@ -167,19 +128,16 @@ describe('Pipeline DAG graph wrapper', () => {
});
});
- describe('the data fetch succeeds but the returned data is empty', () => {
+ describe('the returned data is empty', () => {
beforeEach(async () => {
- mock.onGet(dataPath).replyOnce(200, graphWithoutDependencies);
- createComponent({ graphUrl: dataPath }, mount);
-
- await wrapper.vm.$nextTick();
-
- return waitForPromises();
+ createComponent({
+ method: mount,
+ graphData: graphWithoutDependencies,
+ });
});
it('does not render an error alert or the graph', () => {
- expect(getAllAlerts().length).toBe(1);
- expect(getAlert().text()).toContain('This feature is currently in beta.');
+ expect(getAllAlerts().length).toBe(0);
expect(getGraph().exists()).toBe(false);
});
@@ -191,12 +149,7 @@ describe('Pipeline DAG graph wrapper', () => {
describe('annotations', () => {
beforeEach(async () => {
- mock.onGet(dataPath).replyOnce(200, mockBaseData);
- createComponent({ graphUrl: dataPath }, mount);
-
- await wrapper.vm.$nextTick();
-
- return waitForPromises();
+ createComponent();
});
it('toggles on link mouseover and mouseout', async () => {
diff --git a/spec/frontend/pipelines/components/dag/drawing_utils_spec.js b/spec/frontend/pipelines/components/dag/drawing_utils_spec.js
index a50163411ed..37a7d07485b 100644
--- a/spec/frontend/pipelines/components/dag/drawing_utils_spec.js
+++ b/spec/frontend/pipelines/components/dag/drawing_utils_spec.js
@@ -1,9 +1,9 @@
import { createSankey } from '~/pipelines/components/dag/drawing_utils';
import { parseData } from '~/pipelines/components/dag/parsing_utils';
-import { mockBaseData } from './mock_data';
+import { mockParsedGraphQLNodes } from './mock_data';
describe('DAG visualization drawing utilities', () => {
- const parsed = parseData(mockBaseData.stages);
+ const parsed = parseData(mockParsedGraphQLNodes);
const layoutSettings = {
width: 200,
diff --git a/spec/frontend/pipelines/components/dag/mock_data.js b/spec/frontend/pipelines/components/dag/mock_data.js
index 3b39b9cd21c..e7e93804195 100644
--- a/spec/frontend/pipelines/components/dag/mock_data.js
+++ b/spec/frontend/pipelines/components/dag/mock_data.js
@@ -1,127 +1,56 @@
-/*
- It is important that the simple base include parallel jobs
- as well as non-parallel jobs with spaces in the name to prevent
- us relying on spaces as an indicator.
-*/
-export const mockBaseData = {
- stages: [
- {
- name: 'test',
- groups: [
- {
- name: 'jest',
- size: 2,
- jobs: [{ name: 'jest 1/2', needs: ['frontend fixtures'] }, { name: 'jest 2/2' }],
- },
- {
- name: 'rspec',
- size: 1,
- jobs: [{ name: 'rspec', needs: ['frontend fixtures'] }],
- },
- ],
- },
- {
- name: 'fixtures',
- groups: [
- {
- name: 'frontend fixtures',
- size: 1,
- jobs: [{ name: 'frontend fixtures' }],
- },
- ],
- },
- {
- name: 'un-needed',
- groups: [
- {
- name: 'un-needed',
- size: 1,
- jobs: [{ name: 'un-needed' }],
- },
- ],
- },
- ],
-};
-
-export const tooSmallGraph = {
- stages: [
- {
- name: 'test',
- groups: [
- {
- name: 'jest',
- size: 2,
- jobs: [{ name: 'jest 1/2' }, { name: 'jest 2/2' }],
- },
- {
- name: 'rspec',
- size: 1,
- jobs: [{ name: 'rspec', needs: ['frontend fixtures'] }],
- },
- ],
- },
- {
- name: 'fixtures',
- groups: [
- {
- name: 'frontend fixtures',
- size: 1,
- jobs: [{ name: 'frontend fixtures' }],
- },
- ],
- },
- {
- name: 'un-needed',
- groups: [
- {
- name: 'un-needed',
- size: 1,
- jobs: [{ name: 'un-needed' }],
- },
- ],
- },
- ],
-};
+export const tooSmallGraph = [
+ {
+ category: 'test',
+ name: 'jest',
+ size: 2,
+ jobs: [{ name: 'jest 1/2' }, { name: 'jest 2/2' }],
+ },
+ {
+ category: 'test',
+ name: 'rspec',
+ size: 1,
+ jobs: [{ name: 'rspec', needs: ['frontend fixtures'] }],
+ },
+ {
+ category: 'fixtures',
+ name: 'frontend fixtures',
+ size: 1,
+ jobs: [{ name: 'frontend fixtures' }],
+ },
+ {
+ category: 'un-needed',
+ name: 'un-needed',
+ size: 1,
+ jobs: [{ name: 'un-needed' }],
+ },
+];
-export const graphWithoutDependencies = {
- stages: [
- {
- name: 'test',
- groups: [
- {
- name: 'jest',
- size: 2,
- jobs: [{ name: 'jest 1/2' }, { name: 'jest 2/2' }],
- },
- {
- name: 'rspec',
- size: 1,
- jobs: [{ name: 'rspec' }],
- },
- ],
- },
- {
- name: 'fixtures',
- groups: [
- {
- name: 'frontend fixtures',
- size: 1,
- jobs: [{ name: 'frontend fixtures' }],
- },
- ],
- },
- {
- name: 'un-needed',
- groups: [
- {
- name: 'un-needed',
- size: 1,
- jobs: [{ name: 'un-needed' }],
- },
- ],
- },
- ],
-};
+export const graphWithoutDependencies = [
+ {
+ category: 'test',
+ name: 'jest',
+ size: 2,
+ jobs: [{ name: 'jest 1/2' }, { name: 'jest 2/2' }],
+ },
+ {
+ category: 'test',
+ name: 'rspec',
+ size: 1,
+ jobs: [{ name: 'rspec' }],
+ },
+ {
+ category: 'fixtures',
+ name: 'frontend fixtures',
+ size: 1,
+ jobs: [{ name: 'frontend fixtures' }],
+ },
+ {
+ category: 'un-needed',
+ name: 'un-needed',
+ size: 1,
+ jobs: [{ name: 'un-needed' }],
+ },
+];
export const unparseableGraph = [
{
@@ -468,3 +397,264 @@ export const multiNote = {
},
},
};
+
+/*
+ It is important that the base include parallel jobs
+ as well as non-parallel jobs with spaces in the name to prevent
+ us relying on spaces as an indicator.
+*/
+
+export const mockParsedGraphQLNodes = [
+ {
+ category: 'build',
+ name: 'build_a',
+ size: 1,
+ jobs: [
+ {
+ name: 'build_a',
+ needs: [],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+ {
+ category: 'build',
+ name: 'build_b',
+ size: 1,
+ jobs: [
+ {
+ name: 'build_b',
+ needs: [],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+ {
+ category: 'test',
+ name: 'test_a',
+ size: 1,
+ jobs: [
+ {
+ name: 'test_a',
+ needs: ['build_a'],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+ {
+ category: 'test',
+ name: 'test_b',
+ size: 1,
+ jobs: [
+ {
+ name: 'test_b',
+ needs: [],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+ {
+ category: 'test',
+ name: 'test_c',
+ size: 1,
+ jobs: [
+ {
+ name: 'test_c',
+ needs: [],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+ {
+ category: 'test',
+ name: 'test_d',
+ size: 1,
+ jobs: [
+ {
+ name: 'test_d',
+ needs: [],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+ {
+ category: 'post-test',
+ name: 'post_test_a',
+ size: 1,
+ jobs: [
+ {
+ name: 'post_test_a',
+ needs: [],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+ {
+ category: 'post-test',
+ name: 'post_test_b',
+ size: 1,
+ jobs: [
+ {
+ name: 'post_test_b',
+ needs: [],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+ {
+ category: 'post-test',
+ name: 'post_test_c',
+ size: 1,
+ jobs: [
+ {
+ name: 'post_test_c',
+ needs: ['test_b', 'test_a'],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+ {
+ category: 'staging',
+ name: 'staging_a',
+ size: 1,
+ jobs: [
+ {
+ name: 'staging_a',
+ needs: ['post_test_a'],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+ {
+ category: 'staging',
+ name: 'staging_b',
+ size: 1,
+ jobs: [
+ {
+ name: 'staging_b',
+ needs: ['post_test_b'],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+ {
+ category: 'staging',
+ name: 'staging_c',
+ size: 1,
+ jobs: [
+ {
+ name: 'staging_c',
+ needs: [],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+ {
+ category: 'staging',
+ name: 'staging_d',
+ size: 1,
+ jobs: [
+ {
+ name: 'staging_d',
+ needs: [],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+ {
+ category: 'staging',
+ name: 'staging_e',
+ size: 1,
+ jobs: [
+ {
+ name: 'staging_e',
+ needs: [],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+ {
+ category: 'canary',
+ name: 'canary_a',
+ size: 1,
+ jobs: [
+ {
+ name: 'canary_a',
+ needs: ['staging_b', 'staging_a'],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+ {
+ category: 'canary',
+ name: 'canary_b',
+ size: 1,
+ jobs: [
+ {
+ name: 'canary_b',
+ needs: [],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+ {
+ category: 'canary',
+ name: 'canary_c',
+ size: 1,
+ jobs: [
+ {
+ name: 'canary_c',
+ needs: ['staging_b'],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+ {
+ category: 'production',
+ name: 'production_a',
+ size: 1,
+ jobs: [
+ {
+ name: 'production_a',
+ needs: ['canary_a'],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+ {
+ category: 'production',
+ name: 'production_b',
+ size: 1,
+ jobs: [
+ {
+ name: 'production_b',
+ needs: [],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+ {
+ category: 'production',
+ name: 'production_c',
+ size: 1,
+ jobs: [
+ {
+ name: 'production_c',
+ needs: [],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+ {
+ category: 'production',
+ name: 'production_d',
+ size: 1,
+ jobs: [
+ {
+ name: 'production_d',
+ needs: ['canary_c'],
+ },
+ ],
+ __typename: 'CiGroup',
+ },
+];
diff --git a/spec/frontend/pipelines/components/dag/parsing_utils_spec.js b/spec/frontend/pipelines/components/dag/parsing_utils_spec.js
index d9a1296e572..e93fa8e6760 100644
--- a/spec/frontend/pipelines/components/dag/parsing_utils_spec.js
+++ b/spec/frontend/pipelines/components/dag/parsing_utils_spec.js
@@ -1,5 +1,5 @@
import {
- createNodesStructure,
+ createNodeDict,
makeLinksFromNodes,
filterByAncestors,
parseData,
@@ -8,56 +8,17 @@ import {
} from '~/pipelines/components/dag/parsing_utils';
import { createSankey } from '~/pipelines/components/dag/drawing_utils';
-import { mockBaseData } from './mock_data';
+import { mockParsedGraphQLNodes } from './mock_data';
describe('DAG visualization parsing utilities', () => {
- const { nodes, nodeDict } = createNodesStructure(mockBaseData.stages);
- const unfilteredLinks = makeLinksFromNodes(nodes, nodeDict);
- const parsed = parseData(mockBaseData.stages);
-
- const layoutSettings = {
- width: 200,
- height: 200,
- nodeWidth: 10,
- nodePadding: 20,
- paddingForLabels: 100,
- };
-
- const sankeyLayout = createSankey(layoutSettings)(parsed);
-
- describe('createNodesStructure', () => {
- const parallelGroupName = 'jest';
- const parallelJobName = 'jest 1/2';
- const singleJobName = 'frontend fixtures';
-
- const { name, jobs, size } = mockBaseData.stages[0].groups[0];
-
- it('returns the expected node structure', () => {
- expect(nodes[0]).toHaveProperty('category', mockBaseData.stages[0].name);
- expect(nodes[0]).toHaveProperty('name', name);
- expect(nodes[0]).toHaveProperty('jobs', jobs);
- expect(nodes[0]).toHaveProperty('size', size);
- });
-
- it('adds needs to top level of nodeDict entries', () => {
- expect(nodeDict[parallelGroupName]).toHaveProperty('needs');
- expect(nodeDict[parallelJobName]).toHaveProperty('needs');
- expect(nodeDict[singleJobName]).toHaveProperty('needs');
- });
-
- it('makes entries in nodeDict for jobs and parallel jobs', () => {
- const nodeNames = Object.keys(nodeDict);
-
- expect(nodeNames.includes(parallelGroupName)).toBe(true);
- expect(nodeNames.includes(parallelJobName)).toBe(true);
- expect(nodeNames.includes(singleJobName)).toBe(true);
- });
- });
+ const nodeDict = createNodeDict(mockParsedGraphQLNodes);
+ const unfilteredLinks = makeLinksFromNodes(mockParsedGraphQLNodes, nodeDict);
+ const parsed = parseData(mockParsedGraphQLNodes);
describe('makeLinksFromNodes', () => {
it('returns the expected link structure', () => {
- expect(unfilteredLinks[0]).toHaveProperty('source', 'frontend fixtures');
- expect(unfilteredLinks[0]).toHaveProperty('target', 'jest');
+ expect(unfilteredLinks[0]).toHaveProperty('source', 'build_a');
+ expect(unfilteredLinks[0]).toHaveProperty('target', 'test_a');
expect(unfilteredLinks[0]).toHaveProperty('value', 10);
});
});
@@ -107,8 +68,22 @@ describe('DAG visualization parsing utilities', () => {
describe('removeOrphanNodes', () => {
it('removes sankey nodes that have no needs and are not needed', () => {
+ const layoutSettings = {
+ width: 200,
+ height: 200,
+ nodeWidth: 10,
+ nodePadding: 20,
+ paddingForLabels: 100,
+ };
+
+ const sankeyLayout = createSankey(layoutSettings)(parsed);
const cleanedNodes = removeOrphanNodes(sankeyLayout.nodes);
- expect(cleanedNodes).toHaveLength(sankeyLayout.nodes.length - 1);
+ /*
+ These lengths are determined by the mock data.
+ If the data changes, the numbers may also change.
+ */
+ expect(parsed.nodes).toHaveLength(21);
+ expect(cleanedNodes).toHaveLength(12);
});
});
diff --git a/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js b/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
index add7b56845e..c5b7318d3af 100644
--- a/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
+++ b/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
@@ -1,10 +1,10 @@
-import Api from '~/api';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
+import { GlFilteredSearch } from '@gitlab/ui';
+import Api from '~/api';
import axios from '~/lib/utils/axios_utils';
import PipelinesFilteredSearch from '~/pipelines/components/pipelines_list/pipelines_filtered_search.vue';
import { users, mockSearch, branches, tags } from '../mock_data';
-import { GlFilteredSearch } from '@gitlab/ui';
describe('Pipelines filtered search', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/graph/action_component_spec.js b/spec/frontend/pipelines/graph/action_component_spec.js
index 3c5938cfa1f..ab477292bc1 100644
--- a/spec/frontend/pipelines/graph/action_component_spec.js
+++ b/spec/frontend/pipelines/graph/action_component_spec.js
@@ -1,4 +1,5 @@
import { mount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
@@ -7,7 +8,7 @@ import ActionComponent from '~/pipelines/components/graph/action_component.vue';
describe('pipeline graph action component', () => {
let wrapper;
let mock;
- const findButton = () => wrapper.find('button');
+ const findButton = () => wrapper.find(GlButton);
beforeEach(() => {
mock = new MockAdapter(axios);
diff --git a/spec/frontend/pipelines/graph/graph_component_spec.js b/spec/frontend/pipelines/graph/graph_component_spec.js
index 9731ce3f8a6..1389649abea 100644
--- a/spec/frontend/pipelines/graph/graph_component_spec.js
+++ b/spec/frontend/pipelines/graph/graph_component_spec.js
@@ -1,5 +1,6 @@
import Vue from 'vue';
import { mount } from '@vue/test-utils';
+import { setHTMLFixture } from 'helpers/fixtures';
import PipelineStore from '~/pipelines/stores/pipeline_store';
import graphComponent from '~/pipelines/components/graph/graph_component.vue';
import stageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
@@ -7,7 +8,6 @@ import linkedPipelinesColumn from '~/pipelines/components/graph/linked_pipelines
import graphJSON from './mock_data';
import linkedPipelineJSON from './linked_pipelines_mock_data';
import PipelinesMediator from '~/pipelines/pipeline_details_mediator';
-import { setHTMLFixture } from 'helpers/fixtures';
describe('graph component', () => {
const store = new PipelineStore();
diff --git a/spec/frontend/pipelines/graph/linked_pipeline_spec.js b/spec/frontend/pipelines/graph/linked_pipeline_spec.js
index 133d5695afb..59121c54ff3 100644
--- a/spec/frontend/pipelines/graph/linked_pipeline_spec.js
+++ b/spec/frontend/pipelines/graph/linked_pipeline_spec.js
@@ -1,4 +1,5 @@
import { mount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
import LinkedPipelineComponent from '~/pipelines/components/graph/linked_pipeline.vue';
import CiStatus from '~/vue_shared/components/ci_icon.vue';
@@ -12,7 +13,7 @@ const invalidTriggeredPipelineId = mockPipeline.project.id + 5;
describe('Linked pipeline', () => {
let wrapper;
- const findButton = () => wrapper.find('button');
+ const findButton = () => wrapper.find(GlButton);
const findPipelineLabel = () => wrapper.find('[data-testid="downstream-pipeline-label"]');
const findLinkedPipeline = () => wrapper.find({ ref: 'linkedPipeline' });
@@ -42,9 +43,7 @@ describe('Linked pipeline', () => {
});
it('should render a button', () => {
- const linkElement = wrapper.find('.js-linked-pipeline-content');
-
- expect(linkElement.exists()).toBe(true);
+ expect(findButton().exists()).toBe(true);
});
it('should render the project name', () => {
@@ -62,7 +61,7 @@ describe('Linked pipeline', () => {
});
it('should have a ci-status child component', () => {
- expect(wrapper.find('.js-linked-pipeline-status').exists()).toBe(true);
+ expect(wrapper.find(CiStatus).exists()).toBe(true);
});
it('should render the pipeline id', () => {
@@ -77,15 +76,14 @@ describe('Linked pipeline', () => {
});
it('should render the tooltip text as the title attribute', () => {
- const tooltipRef = wrapper.find('.js-linked-pipeline-content');
- const titleAttr = tooltipRef.attributes('title');
+ const titleAttr = findButton().attributes('title');
expect(titleAttr).toContain(mockPipeline.project.name);
expect(titleAttr).toContain(mockPipeline.details.status.label);
});
- it('does not render the loading icon when isLoading is false', () => {
- expect(wrapper.find('.js-linked-pipeline-loading').exists()).toBe(false);
+ it('sets the loading prop to false', () => {
+ expect(findButton().props('loading')).toBe(false);
});
it('should display multi-project label when pipeline project id is not the same as triggered pipeline project id', () => {
@@ -132,8 +130,8 @@ describe('Linked pipeline', () => {
createWrapper(props);
});
- it('renders a loading icon', () => {
- expect(wrapper.find('.js-linked-pipeline-loading').exists()).toBe(true);
+ it('sets the loading prop to true', () => {
+ expect(findButton().props('loading')).toBe(true);
});
});
diff --git a/spec/frontend/pipelines/header_component_spec.js b/spec/frontend/pipelines/header_component_spec.js
index 1c3a6c545a0..5388d624d3c 100644
--- a/spec/frontend/pipelines/header_component_spec.js
+++ b/spec/frontend/pipelines/header_component_spec.js
@@ -1,8 +1,8 @@
import { shallowMount } from '@vue/test-utils';
+import { GlModal } from '@gitlab/ui';
import HeaderComponent from '~/pipelines/components/header_component.vue';
import CiHeader from '~/vue_shared/components/header_ci_component.vue';
import eventHub from '~/pipelines/event_hub';
-import { GlModal } from '@gitlab/ui';
describe('Pipeline details header', () => {
let wrapper;
@@ -85,13 +85,13 @@ describe('Pipeline details header', () => {
});
it('should call postAction when retry button action is clicked', () => {
- wrapper.find('.js-retry-button').vm.$emit('click');
+ wrapper.find('[data-testid="retryButton"]').vm.$emit('click');
expect(eventHub.$emit).toHaveBeenCalledWith('headerPostAction', 'retry');
});
it('should call postAction when cancel button action is clicked', () => {
- wrapper.find('.js-btn-cancel-pipeline').vm.$emit('click');
+ wrapper.find('[data-testid="cancelPipeline"]').vm.$emit('click');
expect(eventHub.$emit).toHaveBeenCalledWith('headerPostAction', 'cancel');
});
diff --git a/spec/frontend/pipelines/pipeline_details_mediator_spec.js b/spec/frontend/pipelines/pipeline_details_mediator_spec.js
index 083e97666ed..d6699a43b54 100644
--- a/spec/frontend/pipelines/pipeline_details_mediator_spec.js
+++ b/spec/frontend/pipelines/pipeline_details_mediator_spec.js
@@ -1,7 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import PipelineMediator from '~/pipelines/pipeline_details_mediator';
-import waitForPromises from 'helpers/wait_for_promises';
describe('PipelineMdediator', () => {
let mediator;
diff --git a/spec/frontend/pipelines/pipelines_actions_spec.js b/spec/frontend/pipelines/pipelines_actions_spec.js
index aef54d94974..cce4c2dfa7b 100644
--- a/spec/frontend/pipelines/pipelines_actions_spec.js
+++ b/spec/frontend/pipelines/pipelines_actions_spec.js
@@ -1,11 +1,11 @@
import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'spec/test_constants';
+import { GlDeprecatedButton } from '@gitlab/ui';
+import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import PipelinesActions from '~/pipelines/components/pipelines_list/pipelines_actions.vue';
-import { GlDeprecatedButton } from '@gitlab/ui';
import GlCountdown from '~/vue_shared/components/gl_countdown.vue';
-import waitForPromises from 'helpers/wait_for_promises';
describe('Pipelines Actions dropdown', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/pipelines_artifacts_spec.js b/spec/frontend/pipelines/pipelines_artifacts_spec.js
index 512205c3fc3..83f6cb68eba 100644
--- a/spec/frontend/pipelines/pipelines_artifacts_spec.js
+++ b/spec/frontend/pipelines/pipelines_artifacts_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import PipelineArtifacts from '~/pipelines/components/pipelines_list/pipelines_artifacts.vue';
import { GlLink } from '@gitlab/ui';
+import PipelineArtifacts from '~/pipelines/components/pipelines_list/pipelines_artifacts.vue';
describe('Pipelines Artifacts dropdown', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/pipelines/pipelines_spec.js
index 66446b9aa1d..b0ad6bbd228 100644
--- a/spec/frontend/pipelines/pipelines_spec.js
+++ b/spec/frontend/pipelines/pipelines_spec.js
@@ -1,16 +1,16 @@
-import Api from '~/api';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
-import axios from '~/lib/utils/axios_utils';
import waitForPromises from 'helpers/wait_for_promises';
+import { GlFilteredSearch } from '@gitlab/ui';
+import Api from '~/api';
+import axios from '~/lib/utils/axios_utils';
import PipelinesComponent from '~/pipelines/components/pipelines_list/pipelines.vue';
import Store from '~/pipelines/stores/pipelines_store';
import { pipelineWithStages, stageReply, users, mockSearch, branches } from './mock_data';
import { RAW_TEXT_WARNING } from '~/pipelines/constants';
-import { GlFilteredSearch } from '@gitlab/ui';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
-jest.mock('~/flash', () => jest.fn());
+jest.mock('~/flash');
describe('Pipelines', () => {
const jsonFixtureName = 'pipelines/pipelines.json';
diff --git a/spec/frontend/pipelines/stage_spec.js b/spec/frontend/pipelines/stage_spec.js
index 547f8994ca5..e134b81856b 100644
--- a/spec/frontend/pipelines/stage_spec.js
+++ b/spec/frontend/pipelines/stage_spec.js
@@ -1,10 +1,10 @@
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import StageComponent from '~/pipelines/components/pipelines_list/stage.vue';
import eventHub from '~/pipelines/event_hub';
import { stageReply } from './mock_data';
-import waitForPromises from 'helpers/wait_for_promises';
describe('Pipelines stage component', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/test_reports/stores/actions_spec.js b/spec/frontend/pipelines/test_reports/stores/actions_spec.js
index d4647c55a53..1809f15a6e6 100644
--- a/spec/frontend/pipelines/test_reports/stores/actions_spec.js
+++ b/spec/frontend/pipelines/test_reports/stores/actions_spec.js
@@ -5,7 +5,7 @@ import * as actions from '~/pipelines/stores/test_reports/actions';
import * as types from '~/pipelines/stores/test_reports/mutation_types';
import { TEST_HOST } from '../../../helpers/test_constants';
import testAction from '../../../helpers/vuex_action_helper';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
jest.mock('~/flash.js');
@@ -16,14 +16,13 @@ describe('Actions TestReports Store', () => {
const testReports = getJSONFixture('pipelines/test_report.json');
const summary = { total_count: 1 };
- const fullReportEndpoint = `${TEST_HOST}/test_reports.json`;
+ const suiteEndpoint = `${TEST_HOST}/tests/:suite_name.json`;
const summaryEndpoint = `${TEST_HOST}/test_reports/summary.json`;
const defaultState = {
- fullReportEndpoint,
+ suiteEndpoint,
summaryEndpoint,
testReports: {},
selectedSuite: null,
- useBuildSummaryReport: false,
};
beforeEach(() => {
@@ -40,89 +39,63 @@ describe('Actions TestReports Store', () => {
mock.onGet(summaryEndpoint).replyOnce(200, summary, {});
});
- describe('when useBuildSummaryReport in state is true', () => {
- it('sets testReports and shows tests', done => {
- testAction(
- actions.fetchSummary,
- null,
- { ...state, useBuildSummaryReport: true },
- [{ type: types.SET_SUMMARY, payload: summary }],
- [{ type: 'toggleLoading' }, { type: 'toggleLoading' }],
- done,
- );
- });
-
- it('should create flash on API error', done => {
- testAction(
- actions.fetchSummary,
- null,
- {
- summaryEndpoint: null,
- useBuildSummaryReport: true,
- },
- [],
- [{ type: 'toggleLoading' }, { type: 'toggleLoading' }],
- () => {
- expect(createFlash).toHaveBeenCalled();
- done();
- },
- );
- });
+ it('sets testReports and shows tests', done => {
+ testAction(
+ actions.fetchSummary,
+ null,
+ state,
+ [{ type: types.SET_SUMMARY, payload: summary }],
+ [{ type: 'toggleLoading' }, { type: 'toggleLoading' }],
+ done,
+ );
});
- describe('when useBuildSummaryReport in state is false', () => {
- it('sets testReports and shows tests', done => {
- testAction(
- actions.fetchSummary,
- null,
- state,
- [{ type: types.SET_SUMMARY, payload: summary }],
- [],
- done,
- );
- });
-
- it('should create flash on API error', done => {
- testAction(
- actions.fetchSummary,
- null,
- {
- summaryEndpoint: null,
- },
- [],
- [],
- () => {
- expect(createFlash).toHaveBeenCalled();
- done();
- },
- );
- });
+ it('should create flash on API error', done => {
+ testAction(
+ actions.fetchSummary,
+ null,
+ { summaryEndpoint: null },
+ [],
+ [{ type: 'toggleLoading' }, { type: 'toggleLoading' }],
+ () => {
+ expect(createFlash).toHaveBeenCalled();
+ done();
+ },
+ );
});
});
- describe('fetch full report', () => {
+ describe('fetch test suite', () => {
beforeEach(() => {
- mock.onGet(fullReportEndpoint).replyOnce(200, testReports, {});
+ const buildIds = [1];
+ testReports.test_suites[0].build_ids = buildIds;
+ const endpoint = suiteEndpoint.replace(':suite_name', testReports.test_suites[0].name);
+ mock
+ .onGet(endpoint, { params: { build_ids: buildIds } })
+ .replyOnce(200, testReports.test_suites[0], {});
});
- it('sets testReports and shows tests', done => {
+ it('sets test suite and shows tests', done => {
+ const suite = testReports.test_suites[0];
+ const index = 0;
+
testAction(
- actions.fetchFullReport,
- null,
- state,
- [{ type: types.SET_REPORTS, payload: testReports }],
+ actions.fetchTestSuite,
+ index,
+ { ...state, testReports },
+ [{ type: types.SET_SUITE, payload: { suite, index } }],
[{ type: 'toggleLoading' }, { type: 'toggleLoading' }],
done,
);
});
it('should create flash on API error', done => {
+ const index = 0;
+
testAction(
- actions.fetchFullReport,
- null,
- {
- fullReportEndpoint: null,
- },
+ actions.fetchTestSuite,
+ index,
+ { ...state, testReports, suiteEndpoint: null },
[],
[{ type: 'toggleLoading' }, { type: 'toggleLoading' }],
() => {
@@ -131,6 +104,15 @@ describe('Actions TestReports Store', () => {
},
);
});
+
+ describe('when we already have the suite data', () => {
+ it('should not fetch suite', done => {
+ const index = 0;
+ testReports.test_suites[0].hasFullSuite = true;
+
+ testAction(actions.fetchTestSuite, index, { ...state, testReports }, [], [], done);
+ });
+ });
});
describe('set selected suite index', () => {
diff --git a/spec/frontend/pipelines/test_reports/stores/mutations_spec.js b/spec/frontend/pipelines/test_reports/stores/mutations_spec.js
index f4cc5c4bc5d..b935029bc6a 100644
--- a/spec/frontend/pipelines/test_reports/stores/mutations_spec.js
+++ b/spec/frontend/pipelines/test_reports/stores/mutations_spec.js
@@ -12,20 +12,24 @@ describe('Mutations TestReports Store', () => {
testReports: {},
selectedSuite: null,
isLoading: false,
- hasFullReport: false,
};
beforeEach(() => {
mockState = { ...defaultState };
});
- describe('set reports', () => {
- it('should set testReports', () => {
- const expectedState = { ...mockState, testReports };
- mutations[types.SET_REPORTS](mockState, testReports);
+ describe('set suite', () => {
+ it('should set the suite at the given index', () => {
+ mockState.testReports = testReports;
+ const suite = { name: 'test_suite' };
+ const index = 0;
+ const expectedState = { ...mockState };
+ expectedState.testReports.test_suites[index] = { suite, hasFullSuite: true };
+ mutations[types.SET_SUITE](mockState, { suite, index });
- expect(mockState.testReports).toEqual(expectedState.testReports);
- expect(mockState.hasFullReport).toBe(true);
+ expect(mockState.testReports.test_suites[index]).toEqual(
+ expectedState.testReports.test_suites[index],
+ );
});
});
@@ -40,10 +44,21 @@ describe('Mutations TestReports Store', () => {
describe('set summary', () => {
it('should set summary', () => {
- const summary = { total_count: 1 };
+ const summary = {
+ total: { time: 0, count: 10, success: 1, failed: 2, skipped: 3, error: 4 },
+ };
+ const expectedSummary = {
+ ...summary,
+ total_time: 0,
+ total_count: 10,
+ success_count: 1,
+ failed_count: 2,
+ skipped_count: 3,
+ error_count: 4,
+ };
mutations[types.SET_SUMMARY](mockState, summary);
- expect(mockState.testReports).toEqual(summary);
+ expect(mockState.testReports).toEqual(expectedSummary);
});
});
diff --git a/spec/frontend/pipelines/test_reports/test_reports_spec.js b/spec/frontend/pipelines/test_reports/test_reports_spec.js
index ef0bcffabe3..a709edf5184 100644
--- a/spec/frontend/pipelines/test_reports/test_reports_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_reports_spec.js
@@ -22,7 +22,7 @@ describe('Test reports app', () => {
const testSummaryTable = () => wrapper.find(TestSummaryTable);
const actionSpies = {
- fetchFullReport: jest.fn(),
+ fetchTestSuite: jest.fn(),
fetchSummary: jest.fn(),
setSelectedSuiteIndex: jest.fn(),
removeSelectedSuiteIndex: jest.fn(),
@@ -91,28 +91,14 @@ describe('Test reports app', () => {
});
describe('when a suite is clicked', () => {
- describe('when the full test report has already been received', () => {
- beforeEach(() => {
- createComponent({ hasFullReport: true });
- testSummaryTable().vm.$emit('row-click', 0);
- });
-
- it('should only call setSelectedSuiteIndex', () => {
- expect(actionSpies.setSelectedSuiteIndex).toHaveBeenCalled();
- expect(actionSpies.fetchFullReport).not.toHaveBeenCalled();
- });
+ beforeEach(() => {
+ createComponent({ hasFullReport: true });
+ testSummaryTable().vm.$emit('row-click', 0);
});
- describe('when the full test report has not been received', () => {
- beforeEach(() => {
- createComponent({ hasFullReport: false });
- testSummaryTable().vm.$emit('row-click', 0);
- });
-
- it('should call setSelectedSuiteIndex and fetchFullReport', () => {
- expect(actionSpies.setSelectedSuiteIndex).toHaveBeenCalled();
- expect(actionSpies.fetchFullReport).toHaveBeenCalled();
- });
+ it('should call setSelectedSuiteIndex and fetchTestSuite', () => {
+ expect(actionSpies.setSelectedSuiteIndex).toHaveBeenCalled();
+ expect(actionSpies.fetchTestSuite).toHaveBeenCalled();
});
});
diff --git a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js b/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
index 65bffe7039a..3a4aa94571e 100644
--- a/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_suite_table_spec.js
@@ -23,6 +23,8 @@ describe('Test reports suite table', () => {
const noCasesMessage = () => wrapper.find('.js-no-test-cases');
const allCaseRows = () => wrapper.findAll('.js-case-row');
const findCaseRowAtIndex = index => wrapper.findAll('.js-case-row').at(index);
+ const allCaseNames = () =>
+ wrapper.findAll('[data-testid="caseName"]').wrappers.map(el => el.attributes('text'));
const findIconForRow = (row, status) => row.find(`.ci-status-icon-${status}`);
const createComponent = (suite = testSuite) => {
@@ -61,18 +63,14 @@ describe('Test reports suite table', () => {
expect(allCaseRows().length).toBe(testCases.length);
});
- it('renders the failed tests first', () => {
- const failedCaseNames = testCases
- .filter(x => x.status === TestStatus.FAILED)
- .map(x => x.name);
+ it('renders the failed tests first, skipped tests next, then successful tests', () => {
+ const expectedCaseOrder = [
+ ...testCases.filter(x => x.status === TestStatus.FAILED),
+ ...testCases.filter(x => x.status === TestStatus.SKIPPED),
+ ...testCases.filter(x => x.status === TestStatus.SUCCESS),
+ ].map(x => x.name);
- const skippedCaseNames = testCases
- .filter(x => x.status === TestStatus.SKIPPED)
- .map(x => x.name);
-
- expect(findCaseRowAtIndex(0).text()).toContain(failedCaseNames[0]);
- expect(findCaseRowAtIndex(1).text()).toContain(failedCaseNames[1]);
- expect(findCaseRowAtIndex(2).text()).toContain(skippedCaseNames[0]);
+ expect(allCaseNames()).toEqual(expectedCaseOrder);
});
it('renders the correct icon for each status', () => {
diff --git a/spec/frontend/pipelines/tokens/pipeline_branch_name_token_spec.js b/spec/frontend/pipelines/tokens/pipeline_branch_name_token_spec.js
index 650dd8a1def..2e32d62b4bd 100644
--- a/spec/frontend/pipelines/tokens/pipeline_branch_name_token_spec.js
+++ b/spec/frontend/pipelines/tokens/pipeline_branch_name_token_spec.js
@@ -1,6 +1,6 @@
-import Api from '~/api';
import { GlFilteredSearchToken, GlFilteredSearchSuggestion, GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import Api from '~/api';
import PipelineBranchNameToken from '~/pipelines/components/pipelines_list/tokens/pipeline_branch_name_token.vue';
import { branches, mockBranchesAfterMap } from '../mock_data';
diff --git a/spec/frontend/pipelines/tokens/pipeline_tag_name_token_spec.js b/spec/frontend/pipelines/tokens/pipeline_tag_name_token_spec.js
index 15b283dc2ff..42c9dfc9ff0 100644
--- a/spec/frontend/pipelines/tokens/pipeline_tag_name_token_spec.js
+++ b/spec/frontend/pipelines/tokens/pipeline_tag_name_token_spec.js
@@ -1,6 +1,6 @@
-import Api from '~/api';
import { GlFilteredSearchToken, GlFilteredSearchSuggestion, GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import Api from '~/api';
import PipelineTagNameToken from '~/pipelines/components/pipelines_list/tokens/pipeline_tag_name_token.vue';
import { tags, mockTagsAfterMap } from '../mock_data';
diff --git a/spec/frontend/pipelines/tokens/pipeline_trigger_author_token_spec.js b/spec/frontend/pipelines/tokens/pipeline_trigger_author_token_spec.js
index 0b5cf2e202b..c95d2ea1b7b 100644
--- a/spec/frontend/pipelines/tokens/pipeline_trigger_author_token_spec.js
+++ b/spec/frontend/pipelines/tokens/pipeline_trigger_author_token_spec.js
@@ -1,6 +1,6 @@
-import Api from '~/api';
import { GlFilteredSearchToken, GlFilteredSearchSuggestion, GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import Api from '~/api';
import PipelineTriggerAuthorToken from '~/pipelines/components/pipelines_list/tokens/pipeline_trigger_author_token.vue';
import { users } from '../mock_data';
diff --git a/spec/frontend/project_find_file_spec.js b/spec/frontend/project_find_file_spec.js
index b4c6d202e14..757a02a04a3 100644
--- a/spec/frontend/project_find_file_spec.js
+++ b/spec/frontend/project_find_file_spec.js
@@ -1,11 +1,13 @@
import MockAdapter from 'axios-mock-adapter';
import $ from 'jquery';
import { TEST_HOST } from 'helpers/test_constants';
-import sanitize from 'sanitize-html';
+import { sanitize } from 'dompurify';
import ProjectFindFile from '~/project_find_file';
import axios from '~/lib/utils/axios_utils';
-jest.mock('sanitize-html', () => jest.fn(val => val));
+jest.mock('dompurify', () => ({
+ sanitize: jest.fn(val => val),
+}));
const BLOB_URL_TEMPLATE = `${TEST_HOST}/namespace/project/blob/master`;
const FILE_FIND_URL = `${TEST_HOST}/namespace/project/files/master?format=json`;
diff --git a/spec/frontend/projects/commits/components/author_select_spec.js b/spec/frontend/projects/commits/components/author_select_spec.js
index dab91d8b37c..d6fac6f5f79 100644
--- a/spec/frontend/projects/commits/components/author_select_spec.js
+++ b/spec/frontend/projects/commits/components/author_select_spec.js
@@ -1,14 +1,14 @@
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
-import * as urlUtility from '~/lib/utils/url_utility';
-import AuthorSelect from '~/projects/commits/components/author_select.vue';
-import { createStore } from '~/projects/commits/store';
import {
GlNewDropdown,
GlNewDropdownHeader,
GlSearchBoxByType,
GlNewDropdownItem,
} from '@gitlab/ui';
+import * as urlUtility from '~/lib/utils/url_utility';
+import AuthorSelect from '~/projects/commits/components/author_select.vue';
+import { createStore } from '~/projects/commits/store';
const localVue = createLocalVue();
localVue.use(Vuex);
diff --git a/spec/frontend/projects/commits/store/actions_spec.js b/spec/frontend/projects/commits/store/actions_spec.js
index 886224252ad..a842aaa2a76 100644
--- a/spec/frontend/projects/commits/store/actions_spec.js
+++ b/spec/frontend/projects/commits/store/actions_spec.js
@@ -1,10 +1,10 @@
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
-import * as types from '~/projects/commits/store/mutation_types';
import testAction from 'helpers/vuex_action_helper';
+import * as types from '~/projects/commits/store/mutation_types';
import actions from '~/projects/commits/store/actions';
import createState from '~/projects/commits/store/state';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
jest.mock('~/flash');
diff --git a/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap b/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
new file mode 100644
index 00000000000..44220bdef64
--- /dev/null
+++ b/spec/frontend/projects/components/__snapshots__/project_delete_button_spec.js.snap
@@ -0,0 +1,83 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Project remove modal initialized matches the snapshot 1`] = `
+<form
+ action="some/path"
+ method="post"
+>
+ <input
+ name="_method"
+ type="hidden"
+ value="delete"
+ />
+
+ <input
+ name="authenticity_token"
+ type="hidden"
+ />
+
+ <gl-button-stub
+ category="primary"
+ icon=""
+ role="button"
+ size="medium"
+ tabindex="0"
+ variant="danger"
+ >
+ Delete project
+ </gl-button-stub>
+
+ <gl-modal-stub
+ actioncancel="[object Object]"
+ actionprimary="[object Object]"
+ footer-class="gl-bg-gray-10 gl-p-5"
+ modalclass=""
+ modalid="fakeUniqueId"
+ ok-variant="danger"
+ size="sm"
+ title-class="gl-text-red-500"
+ titletag="h4"
+ >
+
+ <div>
+ <gl-alert-stub
+ class="gl-mb-5"
+ dismisslabel="Dismiss"
+ primarybuttonlink=""
+ primarybuttontext=""
+ secondarybuttonlink=""
+ secondarybuttontext=""
+ title="You are about to permanently delete this project"
+ variant="danger"
+ >
+ <gl-sprintf-stub
+ message="Once a project is permanently deleted it %{strongStart}cannot be recovered%{strongEnd}. Permanently deleting this project will %{strongStart}immediately delete%{strongEnd} its respositories and %{strongStart}all related resources%{strongEnd} including issues, merge requests etc."
+ />
+ </gl-alert-stub>
+
+ <p>
+ This action cannot be undone. You will lose the project's respository and all conent: issues, merge requests, etc.
+ </p>
+
+ <p
+ class="gl-mb-1"
+ >
+ Please type the following to confirm:
+ </p>
+
+ <p>
+ <code>
+ foo
+ </code>
+ </p>
+
+ <gl-form-input-stub
+ id="confirm_name_input"
+ name="confirm_name_input"
+ type="text"
+ />
+
+ </div>
+ </gl-modal-stub>
+</form>
+`;
diff --git a/spec/frontend/projects/components/__snapshots__/remove_modal_spec.js.snap b/spec/frontend/projects/components/__snapshots__/remove_modal_spec.js.snap
deleted file mode 100644
index 4d5b6c56a34..00000000000
--- a/spec/frontend/projects/components/__snapshots__/remove_modal_spec.js.snap
+++ /dev/null
@@ -1,126 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`Project remove modal initialized matches the snapshot 1`] = `
-<form
- action="some/path"
- method="post"
->
- <input
- name="_method"
- type="hidden"
- value="delete"
- />
-
- <input
- name="authenticity_token"
- type="hidden"
- />
-
- <b-button-stub
- class="[object Object]"
- event="click"
- role="button"
- routertag="a"
- size="md"
- tabindex="0"
- tag="button"
- type="button"
- variant="danger"
- >
- <!---->
-
- <!---->
-
- <span
- class="gl-button-text"
- >
- Remove project
- </span>
- </b-button-stub>
-
- <b-modal-stub
- canceltitle="Cancel"
- cancelvariant="secondary"
- footerclass="bg-gray-light gl-p-5"
- headerclosecontent="&times;"
- headercloselabel="Close"
- id="remove-project-modal"
- ignoreenforcefocusselector=""
- lazy="true"
- modalclass="gl-modal,"
- oktitle="OK"
- okvariant="danger"
- size="sm"
- title=""
- titletag="h4"
- >
-
- <div>
- <p
- class="gl-text-red-500 gl-font-weight-bold"
- >
- This can lead to data loss.
- </p>
-
- <p
- class="gl-mb-0"
- >
- This action can lead to data loss. To prevent accidental actions we ask you to confirm your intention.
- </p>
-
- <p>
- <gl-sprintf-stub
- message="Please type %{phrase_code} to proceed or close this modal to cancel."
- />
- </p>
-
- <gl-form-input-stub
- id="confirm_name_input"
- name="confirm_name_input"
- type="text"
- />
- </div>
-
- <template />
-
- <template>
- Confirmation required
- </template>
-
- <template />
-
- <template />
-
- <template />
-
- <template>
- <div
- class="gl-w-full gl-display-flex gl-just-content-start gl-m-0"
- >
- <b-button-stub
- class="[object Object]"
- disabled="true"
- event="click"
- routertag="a"
- size="md"
- tag="button"
- type="button"
- variant="danger"
- >
- <!---->
-
- <!---->
-
- <span
- class="gl-button-text"
- >
-
- Confirm
-
- </span>
- </b-button-stub>
- </div>
- </template>
- </b-modal-stub>
-</form>
-`;
diff --git a/spec/frontend/projects/components/project_delete_button_spec.js b/spec/frontend/projects/components/project_delete_button_spec.js
new file mode 100644
index 00000000000..444e465ebaa
--- /dev/null
+++ b/spec/frontend/projects/components/project_delete_button_spec.js
@@ -0,0 +1,47 @@
+import { shallowMount } from '@vue/test-utils';
+import ProjectDeleteButton from '~/projects/components/project_delete_button.vue';
+import SharedDeleteButton from '~/projects/components/shared/delete_button.vue';
+
+jest.mock('lodash/uniqueId', () => () => 'fakeUniqueId');
+
+describe('Project remove modal', () => {
+ let wrapper;
+
+ const findSharedDeleteButton = () => wrapper.find(SharedDeleteButton);
+
+ const defaultProps = {
+ confirmPhrase: 'foo',
+ formPath: 'some/path',
+ };
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(ProjectDeleteButton, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ stubs: {
+ SharedDeleteButton,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('initialized', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('matches the snapshot', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('passes confirmPhrase and formPath props to the shared delete button', () => {
+ expect(findSharedDeleteButton().props()).toEqual(defaultProps);
+ });
+ });
+});
diff --git a/spec/frontend/projects/components/remove_modal_spec.js b/spec/frontend/projects/components/remove_modal_spec.js
deleted file mode 100644
index 339aee65b99..00000000000
--- a/spec/frontend/projects/components/remove_modal_spec.js
+++ /dev/null
@@ -1,62 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { GlButton, GlModal } from '@gitlab/ui';
-import ProjectRemoveModal from '~/projects/components/remove_modal.vue';
-
-describe('Project remove modal', () => {
- let wrapper;
-
- const findFormElement = () => wrapper.find('form').element;
- const findConfirmButton = () => wrapper.find(GlModal).find(GlButton);
-
- const defaultProps = {
- formPath: 'some/path',
- confirmPhrase: 'foo',
- warningMessage: 'This can lead to data loss.',
- };
-
- const createComponent = (data = {}) => {
- wrapper = shallowMount(ProjectRemoveModal, {
- propsData: defaultProps,
- data: () => data,
- stubs: {
- GlButton,
- GlModal,
- },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- describe('initialized', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('matches the snapshot', () => {
- expect(wrapper.element).toMatchSnapshot();
- });
- });
-
- describe('user input matches the confirmPhrase', () => {
- beforeEach(() => {
- createComponent({ userInput: defaultProps.confirmPhrase });
- });
-
- it('the confirm button is not dislabled', () => {
- expect(findConfirmButton().attributes('disabled')).toBe(undefined);
- });
-
- describe('and when the confirmation button is clicked', () => {
- beforeEach(() => {
- findConfirmButton().vm.$emit('click');
- });
-
- it('submits the form element', () => {
- expect(findFormElement().submit).toHaveBeenCalled();
- });
- });
- });
-});
diff --git a/spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap b/spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap
new file mode 100644
index 00000000000..a43acc8c002
--- /dev/null
+++ b/spec/frontend/projects/components/shared/__snapshots__/delete_button_spec.js.snap
@@ -0,0 +1,113 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Project remove modal intialized matches the snapshot 1`] = `
+<form
+ action="some/path"
+ method="post"
+>
+ <input
+ name="_method"
+ type="hidden"
+ value="delete"
+ />
+
+ <input
+ name="authenticity_token"
+ type="hidden"
+ value="test-csrf-token"
+ />
+
+ <gl-button-stub
+ category="primary"
+ icon=""
+ role="button"
+ size="medium"
+ tabindex="0"
+ variant="danger"
+ >
+ Delete project
+ </gl-button-stub>
+
+ <b-modal-stub
+ canceltitle="Cancel"
+ cancelvariant="secondary"
+ footerclass="gl-bg-gray-10 gl-p-5"
+ headerclosecontent="&times;"
+ headercloselabel="Close"
+ id="delete-project-modal-2"
+ ignoreenforcefocusselector=""
+ lazy="true"
+ modalclass="gl-modal,"
+ oktitle="OK"
+ okvariant="danger"
+ size="sm"
+ title=""
+ titleclass="gl-text-red-500"
+ titletag="h4"
+ >
+
+ <div>
+
+ <p
+ class="gl-mb-1"
+ >
+ Please type the following to confirm:
+ </p>
+
+ <p>
+ <code>
+ foo
+ </code>
+ </p>
+
+ <gl-form-input-stub
+ id="confirm_name_input"
+ name="confirm_name_input"
+ type="text"
+ />
+
+ </div>
+
+ <template />
+
+ <template>
+ Delete project. Are you ABSOLUTELY SURE?
+ </template>
+
+ <template />
+
+ <template />
+
+ <template />
+
+ <template>
+ <gl-button-stub
+ category="primary"
+ class="js-modal-action-cancel"
+ icon=""
+ size="medium"
+ variant="default"
+ >
+
+ Cancel, keep project
+
+ </gl-button-stub>
+
+ <!---->
+
+ <gl-button-stub
+ category="primary"
+ class="js-modal-action-primary"
+ disabled="true"
+ icon=""
+ size="medium"
+ variant="danger"
+ >
+
+ Yes, delete project
+
+ </gl-button-stub>
+ </template>
+ </b-modal-stub>
+</form>
+`;
diff --git a/spec/frontend/projects/components/shared/delete_button_spec.js b/spec/frontend/projects/components/shared/delete_button_spec.js
new file mode 100644
index 00000000000..a6394a50011
--- /dev/null
+++ b/spec/frontend/projects/components/shared/delete_button_spec.js
@@ -0,0 +1,83 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlModal } from '@gitlab/ui';
+import SharedDeleteButton from '~/projects/components/shared/delete_button.vue';
+
+jest.mock('~/lib/utils/csrf', () => ({ token: 'test-csrf-token' }));
+
+describe('Project remove modal', () => {
+ let wrapper;
+
+ const findFormElement = () => wrapper.find('form');
+ const findConfirmButton = () => wrapper.find('.js-modal-action-primary');
+ const findAuthenticityTokenInput = () => findFormElement().find('input[name=authenticity_token]');
+ const findModal = () => wrapper.find(GlModal);
+
+ const defaultProps = {
+ confirmPhrase: 'foo',
+ formPath: 'some/path',
+ };
+
+ const createComponent = (data = {}) => {
+ wrapper = shallowMount(SharedDeleteButton, {
+ propsData: defaultProps,
+ data: () => data,
+ stubs: {
+ GlModal,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('intialized', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('matches the snapshot', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+
+ it('sets a csrf token on the authenticity form input', () => {
+ expect(findAuthenticityTokenInput().element.value).toEqual('test-csrf-token');
+ });
+
+ it('sets the form action to the provided path', () => {
+ expect(findFormElement().attributes('action')).toEqual(defaultProps.formPath);
+ });
+ });
+
+ describe('when the user input does not match the confirmPhrase', () => {
+ beforeEach(() => {
+ createComponent({ userInput: 'bar' });
+ });
+
+ it('the confirm button is disabled', () => {
+ expect(findConfirmButton().attributes('disabled')).toBe('true');
+ });
+ });
+
+ describe('when the user input matches the confirmPhrase', () => {
+ beforeEach(() => {
+ createComponent({ userInput: defaultProps.confirmPhrase });
+ });
+
+ it('the confirm button is not disabled', () => {
+ expect(findConfirmButton().attributes('disabled')).toBe(undefined);
+ });
+ });
+
+ describe('when the modal is confirmed', () => {
+ beforeEach(() => {
+ createComponent();
+ findModal().vm.$emit('ok');
+ });
+
+ it('submits the form element', () => {
+ expect(findFormElement().element.submit).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/projects/experiment_new_project_creation/components/legacy_container_spec.js b/spec/frontend/projects/experiment_new_project_creation/components/legacy_container_spec.js
index cd8b39f0426..42a7aa6bc88 100644
--- a/spec/frontend/projects/experiment_new_project_creation/components/legacy_container_spec.js
+++ b/spec/frontend/projects/experiment_new_project_creation/components/legacy_container_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import LegacyContainer from '~/projects/experiment_new_project_creation/components/legacy_container.vue';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
+import LegacyContainer from '~/projects/experiment_new_project_creation/components/legacy_container.vue';
describe('Legacy container component', () => {
let wrapper;
diff --git a/spec/frontend/projects/experiment_new_project_creation/components/welcome_spec.js b/spec/frontend/projects/experiment_new_project_creation/components/welcome_spec.js
index acd142fa5ba..cf23ba281f9 100644
--- a/spec/frontend/projects/experiment_new_project_creation/components/welcome_spec.js
+++ b/spec/frontend/projects/experiment_new_project_creation/components/welcome_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import WelcomePage from '~/projects/experiment_new_project_creation/components/welcome.vue';
import { mockTracking } from 'helpers/tracking_helper';
+import WelcomePage from '~/projects/experiment_new_project_creation/components/welcome.vue';
describe('Welcome page', () => {
let wrapper;
diff --git a/spec/frontend/projects/project_new_spec.js b/spec/frontend/projects/project_new_spec.js
index 7aafbd33fc8..c32979dcd74 100644
--- a/spec/frontend/projects/project_new_spec.js
+++ b/spec/frontend/projects/project_new_spec.js
@@ -1,6 +1,6 @@
import $ from 'jquery';
-import projectNew from '~/projects/project_new';
import { TEST_HOST } from 'jest/helpers/test_constants';
+import projectNew from '~/projects/project_new';
describe('New Project', () => {
let $projectImportUrl;
diff --git a/spec/frontend/projects/settings/access_dropdown_spec.js b/spec/frontend/projects/settings/access_dropdown_spec.js
new file mode 100644
index 00000000000..6d323b0408b
--- /dev/null
+++ b/spec/frontend/projects/settings/access_dropdown_spec.js
@@ -0,0 +1,140 @@
+import $ from 'jquery';
+import '~/gl_dropdown';
+import AccessDropdown from '~/projects/settings/access_dropdown';
+import { LEVEL_TYPES } from '~/projects/settings/constants';
+
+describe('AccessDropdown', () => {
+ const defaultLabel = 'dummy default label';
+ let dropdown;
+
+ beforeEach(() => {
+ setFixtures(`
+ <div id="dummy-dropdown">
+ <span class="dropdown-toggle-text"></span>
+ </div>
+ `);
+ const $dropdown = $('#dummy-dropdown');
+ $dropdown.data('defaultLabel', defaultLabel);
+ const options = {
+ $dropdown,
+ accessLevelsData: {
+ roles: [
+ {
+ id: 42,
+ text: 'Dummy Role',
+ },
+ ],
+ },
+ };
+ dropdown = new AccessDropdown(options);
+ });
+
+ describe('toggleLabel', () => {
+ let $dropdownToggleText;
+ const dummyItems = [
+ { type: LEVEL_TYPES.ROLE, access_level: 42 },
+ { type: LEVEL_TYPES.USER },
+ { type: LEVEL_TYPES.USER },
+ { type: LEVEL_TYPES.GROUP },
+ { type: LEVEL_TYPES.GROUP },
+ { type: LEVEL_TYPES.GROUP },
+ ];
+
+ beforeEach(() => {
+ $dropdownToggleText = $('.dropdown-toggle-text');
+ });
+
+ it('displays number of items', () => {
+ dropdown.setSelectedItems(dummyItems);
+ $dropdownToggleText.addClass('is-default');
+
+ const label = dropdown.toggleLabel();
+
+ expect(label).toBe('1 role, 2 users, 3 groups');
+ expect($dropdownToggleText).not.toHaveClass('is-default');
+ });
+
+ describe('without selected items', () => {
+ beforeEach(() => {
+ dropdown.setSelectedItems([]);
+ });
+
+ it('falls back to default label', () => {
+ const label = dropdown.toggleLabel();
+
+ expect(label).toBe(defaultLabel);
+ expect($dropdownToggleText).toHaveClass('is-default');
+ });
+ });
+
+ describe('with only role', () => {
+ beforeEach(() => {
+ dropdown.setSelectedItems(dummyItems.filter(item => item.type === LEVEL_TYPES.ROLE));
+ $dropdownToggleText.addClass('is-default');
+ });
+
+ it('displays the role name', () => {
+ const label = dropdown.toggleLabel();
+
+ expect(label).toBe('Dummy Role');
+ expect($dropdownToggleText).not.toHaveClass('is-default');
+ });
+ });
+
+ describe('with only users', () => {
+ beforeEach(() => {
+ dropdown.setSelectedItems(dummyItems.filter(item => item.type === LEVEL_TYPES.USER));
+ $dropdownToggleText.addClass('is-default');
+ });
+
+ it('displays number of users', () => {
+ const label = dropdown.toggleLabel();
+
+ expect(label).toBe('2 users');
+ expect($dropdownToggleText).not.toHaveClass('is-default');
+ });
+ });
+
+ describe('with only groups', () => {
+ beforeEach(() => {
+ dropdown.setSelectedItems(dummyItems.filter(item => item.type === LEVEL_TYPES.GROUP));
+ $dropdownToggleText.addClass('is-default');
+ });
+
+ it('displays number of groups', () => {
+ const label = dropdown.toggleLabel();
+
+ expect(label).toBe('3 groups');
+ expect($dropdownToggleText).not.toHaveClass('is-default');
+ });
+ });
+
+ describe('with users and groups', () => {
+ beforeEach(() => {
+ const selectedTypes = [LEVEL_TYPES.GROUP, LEVEL_TYPES.USER];
+ dropdown.setSelectedItems(dummyItems.filter(item => selectedTypes.includes(item.type)));
+ $dropdownToggleText.addClass('is-default');
+ });
+
+ it('displays number of groups', () => {
+ const label = dropdown.toggleLabel();
+
+ expect(label).toBe('2 users, 3 groups');
+ expect($dropdownToggleText).not.toHaveClass('is-default');
+ });
+ });
+ });
+
+ describe('userRowHtml', () => {
+ it('escapes users name', () => {
+ const user = {
+ avatar_url: '',
+ name: '<img src=x onerror=alert(document.domain)>',
+ username: 'test',
+ };
+ const template = dropdown.userRowHtml(user);
+
+ expect(template).not.toContain(user.name);
+ });
+ });
+});
diff --git a/spec/frontend/prometheus_alerts/components/reset_key_spec.js b/spec/frontend/prometheus_alerts/components/reset_key_spec.js
index df52baafa29..489586a60fe 100644
--- a/spec/frontend/prometheus_alerts/components/reset_key_spec.js
+++ b/spec/frontend/prometheus_alerts/components/reset_key_spec.js
@@ -1,8 +1,8 @@
import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
-import ResetKey from '~/prometheus_alerts/components/reset_key.vue';
import { GlModal } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
+import ResetKey from '~/prometheus_alerts/components/reset_key.vue';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import axios from '~/lib/utils/axios_utils';
diff --git a/spec/frontend/ref/components/ref_selector_spec.js b/spec/frontend/ref/components/ref_selector_spec.js
index 2688e4b3428..1556f5b19dc 100644
--- a/spec/frontend/ref/components/ref_selector_spec.js
+++ b/spec/frontend/ref/components/ref_selector_spec.js
@@ -26,12 +26,14 @@ describe('Ref selector component', () => {
let tagsApiCallSpy;
let commitApiCallSpy;
- const createComponent = () => {
+ const createComponent = (props = {}, attrs = {}) => {
wrapper = mount(RefSelector, {
propsData: {
projectId,
value: '',
+ ...props,
},
+ attrs,
listeners: {
// simulate a parent component v-model binding
input: selectedRef => {
@@ -163,6 +165,52 @@ describe('Ref selector component', () => {
});
describe('post-initialization behavior', () => {
+ describe('when the parent component provides an `id` binding', () => {
+ const id = 'git-ref';
+
+ beforeEach(() => {
+ createComponent({}, { id });
+
+ return waitForRequests();
+ });
+
+ it('adds the provided ID to the GlNewDropdown instance', () => {
+ expect(wrapper.attributes().id).toBe(id);
+ });
+ });
+
+ describe('when a ref is pre-selected', () => {
+ const preselectedRef = fixtures.branches[0].name;
+
+ beforeEach(() => {
+ createComponent({ value: preselectedRef });
+
+ return waitForRequests();
+ });
+
+ it('renders the pre-selected ref name', () => {
+ expect(findButtonContent().text()).toBe(preselectedRef);
+ });
+ });
+
+ describe('when the selected ref is updated by the parent component', () => {
+ const updatedRef = fixtures.branches[0].name;
+
+ beforeEach(() => {
+ createComponent();
+
+ return waitForRequests();
+ });
+
+ it('renders the updated ref name', () => {
+ wrapper.setProps({ value: updatedRef });
+
+ return localVue.nextTick().then(() => {
+ expect(findButtonContent().text()).toBe(updatedRef);
+ });
+ });
+ });
+
describe('when the search query is updated', () => {
beforeEach(() => {
createComponent();
diff --git a/spec/frontend/registry/explorer/components/details_page/details_row_spec.js b/spec/frontend/registry/explorer/components/details_page/details_row_spec.js
deleted file mode 100644
index 95b8e18d677..00000000000
--- a/spec/frontend/registry/explorer/components/details_page/details_row_spec.js
+++ /dev/null
@@ -1,43 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { GlIcon } from '@gitlab/ui';
-import component from '~/registry/explorer/components/details_page/details_row.vue';
-
-describe('DetailsRow', () => {
- let wrapper;
-
- const findIcon = () => wrapper.find(GlIcon);
- const findDefaultSlot = () => wrapper.find('[data-testid="default-slot"]');
-
- const mountComponent = () => {
- wrapper = shallowMount(component, {
- propsData: {
- icon: 'clock',
- },
- slots: {
- default: '<div data-testid="default-slot"></div>',
- },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- it('contains an icon', () => {
- mountComponent();
- expect(findIcon().exists()).toBe(true);
- });
-
- it('icon has the correct props', () => {
- mountComponent();
- expect(findIcon().props()).toMatchObject({
- name: 'clock',
- });
- });
-
- it('has a default slot', () => {
- mountComponent();
- expect(findDefaultSlot().exists()).toBe(true);
- });
-});
diff --git a/spec/frontend/registry/explorer/components/details_page/tags_list_row_spec.js b/spec/frontend/registry/explorer/components/details_page/tags_list_row_spec.js
index 9e876d6d8a3..a21facefc97 100644
--- a/spec/frontend/registry/explorer/components/details_page/tags_list_row_spec.js
+++ b/spec/frontend/registry/explorer/components/details_page/tags_list_row_spec.js
@@ -1,11 +1,12 @@
import { shallowMount } from '@vue/test-utils';
import { GlFormCheckbox, GlSprintf, GlIcon } from '@gitlab/ui';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
import component from '~/registry/explorer/components/details_page/tags_list_row.vue';
import DeleteButton from '~/registry/explorer/components/delete_button.vue';
-import DetailsRow from '~/registry/explorer/components/details_page/details_row.vue';
+import DetailsRow from '~/registry/shared/components/details_row.vue';
import {
REMOVE_TAG_BUTTON_TITLE,
REMOVE_TAG_BUTTON_DISABLE_TOOLTIP,
@@ -13,7 +14,6 @@ import {
NOT_AVAILABLE_TEXT,
NOT_AVAILABLE_SIZE,
} from '~/registry/explorer/constants/index';
-import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { tagsListResponse } from '../../mock_data';
import { ListItem } from '../../stubs';
diff --git a/spec/frontend/registry/explorer/components/list_page/cli_commands_spec.js b/spec/frontend/registry/explorer/components/list_page/cli_commands_spec.js
index a556be12089..b0291de5f3c 100644
--- a/spec/frontend/registry/explorer/components/list_page/cli_commands_spec.js
+++ b/spec/frontend/registry/explorer/components/list_page/cli_commands_spec.js
@@ -1,6 +1,6 @@
import Vuex from 'vuex';
import { mount, createLocalVue } from '@vue/test-utils';
-import { GlDropdown, GlFormGroup, GlFormInputGroup } from '@gitlab/ui';
+import { GlDeprecatedDropdown, GlFormGroup, GlFormInputGroup } from '@gitlab/ui';
import Tracking from '~/tracking';
import * as getters from '~/registry/explorer/stores/getters';
import QuickstartDropdown from '~/registry/explorer/components/list_page/cli_commands.vue';
@@ -23,7 +23,7 @@ describe('cli_commands', () => {
let wrapper;
let store;
- const findDropdownButton = () => wrapper.find(GlDropdown);
+ const findDropdownButton = () => wrapper.find(GlDeprecatedDropdown);
const findFormGroups = () => wrapper.findAll(GlFormGroup);
const mountComponent = () => {
diff --git a/spec/frontend/registry/explorer/pages/details_spec.js b/spec/frontend/registry/explorer/pages/details_spec.js
index 9bc0bae5c23..66e8a4aea0d 100644
--- a/spec/frontend/registry/explorer/pages/details_spec.js
+++ b/spec/frontend/registry/explorer/pages/details_spec.js
@@ -13,7 +13,7 @@ import {
SET_TAGS_LIST_SUCCESS,
SET_TAGS_PAGINATION,
SET_INITIAL_STATE,
-} from '~/registry/explorer/stores/mutation_types/';
+} from '~/registry/explorer/stores/mutation_types';
import { tagsListResponse } from '../mock_data';
import { DeleteModal } from '../stubs';
diff --git a/spec/frontend/registry/explorer/pages/list_spec.js b/spec/frontend/registry/explorer/pages/list_spec.js
index 2ece7593b41..b4e46fda2c4 100644
--- a/spec/frontend/registry/explorer/pages/list_spec.js
+++ b/spec/frontend/registry/explorer/pages/list_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import { GlSkeletonLoader, GlSprintf, GlAlert, GlSearchBoxByClick } from '@gitlab/ui';
-import Tracking from '~/tracking';
import waitForPromises from 'helpers/wait_for_promises';
+import Tracking from '~/tracking';
import component from '~/registry/explorer/pages/list.vue';
import CliCommands from '~/registry/explorer/components/list_page/cli_commands.vue';
import GroupEmptyState from '~/registry/explorer/components/list_page/group_empty_state.vue';
@@ -14,7 +14,7 @@ import {
SET_IMAGES_LIST_SUCCESS,
SET_PAGINATION,
SET_INITIAL_STATE,
-} from '~/registry/explorer/stores/mutation_types/';
+} from '~/registry/explorer/stores/mutation_types';
import {
DELETE_IMAGE_SUCCESS_MESSAGE,
DELETE_IMAGE_ERROR_MESSAGE,
diff --git a/spec/frontend/registry/explorer/stores/actions_spec.js b/spec/frontend/registry/explorer/stores/actions_spec.js
index 15f9db90910..fb93ab06ca8 100644
--- a/spec/frontend/registry/explorer/stores/actions_spec.js
+++ b/spec/frontend/registry/explorer/stores/actions_spec.js
@@ -1,10 +1,10 @@
-import axios from '~/lib/utils/axios_utils';
import MockAdapter from 'axios-mock-adapter';
-import * as actions from '~/registry/explorer/stores/actions';
-import * as types from '~/registry/explorer/stores/mutation_types';
import testAction from 'helpers/vuex_action_helper';
-import createFlash from '~/flash';
import { TEST_HOST } from 'helpers/test_constants';
+import axios from '~/lib/utils/axios_utils';
+import * as actions from '~/registry/explorer/stores/actions';
+import * as types from '~/registry/explorer/stores/mutation_types';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import { reposServerResponse, registryServerResponse } from '../mock_data';
jest.mock('~/flash.js');
diff --git a/spec/frontend/registry/settings/components/settings_form_spec.js b/spec/frontend/registry/settings/components/settings_form_spec.js
index 9b9ca92270c..6f9518808db 100644
--- a/spec/frontend/registry/settings/components/settings_form_spec.js
+++ b/spec/frontend/registry/settings/components/settings_form_spec.js
@@ -1,4 +1,5 @@
import { shallowMount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
import Tracking from '~/tracking';
import component from '~/registry/settings/components/settings_form.vue';
import expirationPolicyFields from '~/registry/shared/components/expiration_policy_fields.vue';
@@ -7,7 +8,6 @@ import {
UPDATE_SETTINGS_ERROR_MESSAGE,
UPDATE_SETTINGS_SUCCESS_MESSAGE,
} from '~/registry/shared/constants';
-import waitForPromises from 'helpers/wait_for_promises';
import { stringifiedFormOptions } from '../../shared/mock_data';
describe('Settings Form', () => {
diff --git a/spec/frontend/registry/settings/store/actions_spec.js b/spec/frontend/registry/settings/store/actions_spec.js
index f92d10d087f..51b89f96ef2 100644
--- a/spec/frontend/registry/settings/store/actions_spec.js
+++ b/spec/frontend/registry/settings/store/actions_spec.js
@@ -1,5 +1,5 @@
-import Api from '~/api';
import testAction from 'helpers/vuex_action_helper';
+import Api from '~/api';
import * as actions from '~/registry/settings/store/actions';
import * as types from '~/registry/settings/store/mutation_types';
diff --git a/spec/frontend/registry/shared/components/details_row_spec.js b/spec/frontend/registry/shared/components/details_row_spec.js
new file mode 100644
index 00000000000..5ae4e0ab37f
--- /dev/null
+++ b/spec/frontend/registry/shared/components/details_row_spec.js
@@ -0,0 +1,71 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlIcon } from '@gitlab/ui';
+import component from '~/registry/shared/components/details_row.vue';
+
+describe('DetailsRow', () => {
+ let wrapper;
+
+ const findIcon = () => wrapper.find(GlIcon);
+ const findDefaultSlot = () => wrapper.find('[data-testid="default-slot"]');
+
+ const mountComponent = props => {
+ wrapper = shallowMount(component, {
+ propsData: {
+ icon: 'clock',
+ ...props,
+ },
+ slots: {
+ default: '<div data-testid="default-slot"></div>',
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('has a default slot', () => {
+ mountComponent();
+ expect(findDefaultSlot().exists()).toBe(true);
+ });
+
+ describe('icon prop', () => {
+ it('contains an icon', () => {
+ mountComponent();
+ expect(findIcon().exists()).toBe(true);
+ });
+
+ it('icon has the correct props', () => {
+ mountComponent();
+ expect(findIcon().props()).toMatchObject({
+ name: 'clock',
+ });
+ });
+ });
+
+ describe('padding prop', () => {
+ it('padding has a default', () => {
+ mountComponent();
+ expect(wrapper.classes('gl-py-2')).toBe(true);
+ });
+
+ it('is reflected in the template', () => {
+ mountComponent({ padding: 'gl-py-4' });
+ expect(wrapper.classes('gl-py-4')).toBe(true);
+ });
+ });
+
+ describe('dashed prop', () => {
+ const borderClasses = ['gl-border-b-solid', 'gl-border-gray-100', 'gl-border-b-1'];
+ it('by default component has no border', () => {
+ mountComponent();
+ expect(wrapper.classes).not.toEqual(expect.arrayContaining(borderClasses));
+ });
+
+ it('has a border when dashed is true', () => {
+ mountComponent({ dashed: true });
+ expect(wrapper.classes()).toEqual(expect.arrayContaining(borderClasses));
+ });
+ });
+});
diff --git a/spec/frontend/related_merge_requests/store/actions_spec.js b/spec/frontend/related_merge_requests/store/actions_spec.js
index 26c5977cb5f..fa031a91c83 100644
--- a/spec/frontend/related_merge_requests/store/actions_spec.js
+++ b/spec/frontend/related_merge_requests/store/actions_spec.js
@@ -1,6 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
-import createFlash from '~/flash';
import testAction from 'helpers/vuex_action_helper';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import * as types from '~/related_merge_requests/store/mutation_types';
import * as actions from '~/related_merge_requests/store/actions';
diff --git a/spec/frontend/releases/components/app_edit_spec.js b/spec/frontend/releases/components/app_edit_new_spec.js
index 4450b047acd..e9727801c1a 100644
--- a/spec/frontend/releases/components/app_edit_spec.js
+++ b/spec/frontend/releases/components/app_edit_new_spec.js
@@ -1,15 +1,15 @@
import Vuex from 'vuex';
import { mount } from '@vue/test-utils';
-import ReleaseEditApp from '~/releases/components/app_edit.vue';
+import { merge } from 'lodash';
+import axios from 'axios';
+import MockAdapter from 'axios-mock-adapter';
+import ReleaseEditNewApp from '~/releases/components/app_edit_new.vue';
import { release as originalRelease, milestones as originalMilestones } from '../mock_data';
import * as commonUtils from '~/lib/utils/common_utils';
import { BACK_URL_PARAM } from '~/releases/constants';
import AssetLinksForm from '~/releases/components/asset_links_form.vue';
-import { merge } from 'lodash';
-import axios from 'axios';
-import MockAdapter from 'axios-mock-adapter';
-describe('Release edit component', () => {
+describe('Release edit/new component', () => {
let wrapper;
let release;
let actions;
@@ -27,13 +27,14 @@ describe('Release edit component', () => {
};
actions = {
- fetchRelease: jest.fn(),
- updateRelease: jest.fn(),
+ initializeRelease: jest.fn(),
+ saveRelease: jest.fn(),
addEmptyAssetLink: jest.fn(),
};
getters = {
isValid: () => true,
+ isExistingRelease: () => true,
validationErrors: () => ({
assets: {
links: [],
@@ -57,12 +58,14 @@ describe('Release edit component', () => {
),
);
- wrapper = mount(ReleaseEditApp, {
+ wrapper = mount(ReleaseEditNewApp, {
store,
provide: {
glFeatures: featureFlags,
},
});
+
+ wrapper.element.querySelectorAll('input').forEach(input => jest.spyOn(input, 'focus'));
};
beforeEach(() => {
@@ -80,14 +83,23 @@ describe('Release edit component', () => {
});
const findSubmitButton = () => wrapper.find('button[type=submit]');
+ const findForm = () => wrapper.find('form');
describe(`basic functionality tests: all tests unrelated to the "${BACK_URL_PARAM}" parameter`, () => {
- beforeEach(() => {
- factory();
+ beforeEach(factory);
+
+ it('calls initializeRelease when the component is created', () => {
+ expect(actions.initializeRelease).toHaveBeenCalledTimes(1);
});
- it('calls fetchRelease when the component is created', () => {
- expect(actions.fetchRelease).toHaveBeenCalledTimes(1);
+ it('focuses the first non-disabled input element once the page is shown', () => {
+ const firstEnabledInput = wrapper.element.querySelector('input:enabled');
+ const allInputs = wrapper.element.querySelectorAll('input');
+
+ allInputs.forEach(input => {
+ const expectedFocusCalls = input === firstEnabledInput ? 1 : 0;
+ expect(input.focus).toHaveBeenCalledTimes(expectedFocusCalls);
+ });
});
it('renders the description text at the top of the page', () => {
@@ -96,28 +108,6 @@ describe('Release edit component', () => {
);
});
- it('renders the correct tag name in the "Tag name" field', () => {
- expect(wrapper.find('#git-ref').element.value).toBe(release.tagName);
- });
-
- it('renders the correct help text under the "Tag name" field', () => {
- const helperText = wrapper.find('#tag-name-help');
- const helperTextLink = helperText.find('a');
- const helperTextLinkAttrs = helperTextLink.attributes();
-
- expect(helperText.text()).toBe(
- 'Changing a Release tag is only supported via Releases API. More information',
- );
- expect(helperTextLink.text()).toBe('More information');
- expect(helperTextLinkAttrs).toEqual(
- expect.objectContaining({
- href: state.updateReleaseApiDocsPath,
- rel: 'noopener noreferrer',
- target: '_blank',
- }),
- );
- });
-
it('renders the correct release title in the "Release title" field', () => {
expect(wrapper.find('#release-title').element.value).toBe(release.name);
});
@@ -130,16 +120,15 @@ describe('Release edit component', () => {
expect(findSubmitButton().attributes('type')).toBe('submit');
});
- it('calls updateRelease when the form is submitted', () => {
- wrapper.find('form').trigger('submit');
- expect(actions.updateRelease).toHaveBeenCalledTimes(1);
+ it('calls saveRelease when the form is submitted', () => {
+ findForm().trigger('submit');
+
+ expect(actions.saveRelease).toHaveBeenCalledTimes(1);
});
});
describe(`when the URL does not contain a "${BACK_URL_PARAM}" parameter`, () => {
- beforeEach(() => {
- factory();
- });
+ beforeEach(factory);
it(`renders a "Cancel" button with an href pointing to "${BACK_URL_PARAM}"`, () => {
const cancelButton = wrapper.find('.js-cancel-button');
@@ -164,6 +153,34 @@ describe('Release edit component', () => {
});
});
+ describe('when creating a new release', () => {
+ beforeEach(() => {
+ factory({
+ store: {
+ modules: {
+ detail: {
+ getters: {
+ isExistingRelease: () => false,
+ },
+ },
+ },
+ },
+ });
+ });
+
+ it('renders the submit button with the text "Create release"', () => {
+ expect(findSubmitButton().text()).toBe('Create release');
+ });
+ });
+
+ describe('when editing an existing release', () => {
+ beforeEach(factory);
+
+ it('renders the submit button with the text "Save changes"', () => {
+ expect(findSubmitButton().text()).toBe('Save changes');
+ });
+ });
+
describe('asset links form', () => {
const findAssetLinksForm = () => wrapper.find(AssetLinksForm);
@@ -227,6 +244,12 @@ describe('Release edit component', () => {
it('renders the submit button as disabled', () => {
expect(findSubmitButton().attributes('disabled')).toBe('disabled');
});
+
+ it('does not allow the form to be submitted', () => {
+ findForm().trigger('submit');
+
+ expect(actions.saveRelease).not.toHaveBeenCalled();
+ });
});
});
});
diff --git a/spec/frontend/releases/components/app_index_spec.js b/spec/frontend/releases/components/app_index_spec.js
index 91beb5b1418..8eafe07cb2f 100644
--- a/spec/frontend/releases/components/app_index_spec.js
+++ b/spec/frontend/releases/components/app_index_spec.js
@@ -1,6 +1,7 @@
import { range as rge } from 'lodash';
import Vue from 'vue';
import { mountComponentWithStore } from 'helpers/vue_mount_component_helper';
+import waitForPromises from 'helpers/wait_for_promises';
import app from '~/releases/components/app_index.vue';
import createStore from '~/releases/stores';
import listModule from '~/releases/stores/modules/list';
@@ -13,7 +14,6 @@ import {
releases,
} from '../mock_data';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
-import waitForPromises from 'helpers/wait_for_promises';
describe('Releases App ', () => {
const Component = Vue.extend(app);
diff --git a/spec/frontend/releases/components/app_new_spec.js b/spec/frontend/releases/components/app_new_spec.js
deleted file mode 100644
index 0d5664766e5..00000000000
--- a/spec/frontend/releases/components/app_new_spec.js
+++ /dev/null
@@ -1,26 +0,0 @@
-import Vue from 'vue';
-import Vuex from 'vuex';
-import { mount } from '@vue/test-utils';
-import ReleaseNewApp from '~/releases/components/app_new.vue';
-
-Vue.use(Vuex);
-
-describe('Release new component', () => {
- let wrapper;
-
- const factory = () => {
- const store = new Vuex.Store();
- wrapper = mount(ReleaseNewApp, { store });
- };
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- it('renders the app', () => {
- factory();
-
- expect(wrapper.exists()).toBe(true);
- });
-});
diff --git a/spec/frontend/releases/components/app_show_spec.js b/spec/frontend/releases/components/app_show_spec.js
index 3dc9964c25c..e757fe98661 100644
--- a/spec/frontend/releases/components/app_show_spec.js
+++ b/spec/frontend/releases/components/app_show_spec.js
@@ -1,8 +1,8 @@
import Vuex from 'vuex';
import { shallowMount } from '@vue/test-utils';
+import { GlSkeletonLoading } from '@gitlab/ui';
import ReleaseShowApp from '~/releases/components/app_show.vue';
import { release as originalRelease } from '../mock_data';
-import { GlSkeletonLoading } from '@gitlab/ui';
import ReleaseBlock from '~/releases/components/release_block.vue';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
diff --git a/spec/frontend/releases/components/asset_links_form_spec.js b/spec/frontend/releases/components/asset_links_form_spec.js
index e1f8592270e..727d593d851 100644
--- a/spec/frontend/releases/components/asset_links_form_spec.js
+++ b/spec/frontend/releases/components/asset_links_form_spec.js
@@ -3,6 +3,7 @@ import { mount, createLocalVue } from '@vue/test-utils';
import AssetLinksForm from '~/releases/components/asset_links_form.vue';
import { release as originalRelease } from '../mock_data';
import * as commonUtils from '~/lib/utils/common_utils';
+import { ENTER_KEY } from '~/lib/utils/keys';
import { ASSET_LINK_TYPE, DEFAULT_ASSET_LINK_TYPE } from '~/releases/constants';
const localVue = createLocalVue();
@@ -91,42 +92,128 @@ describe('Release edit component', () => {
expect(actions.removeAssetLink).toHaveBeenCalledTimes(1);
});
- it('calls the "updateAssetLinkUrl" store method when text is entered into the "URL" input field', () => {
- const linkIdToUpdate = release.assets.links[0].id;
- const newUrl = 'updated url';
+ describe('URL input field', () => {
+ let input;
+ let linkIdToUpdate;
+ let newUrl;
- expect(actions.updateAssetLinkUrl).not.toHaveBeenCalled();
+ beforeEach(() => {
+ input = wrapper.find({ ref: 'urlInput' }).element;
+ linkIdToUpdate = release.assets.links[0].id;
+ newUrl = 'updated url';
+ });
- wrapper.find({ ref: 'urlInput' }).vm.$emit('change', newUrl);
+ const expectStoreMethodNotToBeCalled = () => {
+ expect(actions.updateAssetLinkUrl).not.toHaveBeenCalled();
+ };
- expect(actions.updateAssetLinkUrl).toHaveBeenCalledTimes(1);
- expect(actions.updateAssetLinkUrl).toHaveBeenCalledWith(
- expect.anything(),
- {
- linkIdToUpdate,
- newUrl,
- },
- undefined,
- );
+ const dispatchKeydowEvent = eventParams => {
+ const event = new KeyboardEvent('keydown', eventParams);
+
+ input.dispatchEvent(event);
+ };
+
+ const expectStoreMethodToBeCalled = () => {
+ expect(actions.updateAssetLinkUrl).toHaveBeenCalledTimes(1);
+ expect(actions.updateAssetLinkUrl).toHaveBeenCalledWith(
+ expect.anything(),
+ {
+ linkIdToUpdate,
+ newUrl,
+ },
+ undefined,
+ );
+ };
+
+ it('calls the "updateAssetLinkUrl" store method when text is entered into the "URL" input field', () => {
+ expectStoreMethodNotToBeCalled();
+
+ wrapper.find({ ref: 'urlInput' }).vm.$emit('change', newUrl);
+
+ expectStoreMethodToBeCalled();
+ });
+
+ it('calls the "updateAssetLinkUrl" store method when Ctrl+Enter is pressed inside the "URL" input field', () => {
+ expectStoreMethodNotToBeCalled();
+
+ input.value = newUrl;
+
+ dispatchKeydowEvent({ key: ENTER_KEY, ctrlKey: true });
+
+ expectStoreMethodToBeCalled();
+ });
+
+ it('calls the "updateAssetLinkUrl" store method when Cmd+Enter is pressed inside the "URL" input field', () => {
+ expectStoreMethodNotToBeCalled();
+
+ input.value = newUrl;
+
+ dispatchKeydowEvent({ key: ENTER_KEY, metaKey: true });
+
+ expectStoreMethodToBeCalled();
+ });
});
- it('calls the "updateAssetLinkName" store method when text is entered into the "Link title" input field', () => {
- const linkIdToUpdate = release.assets.links[0].id;
- const newName = 'updated name';
+ describe('Link title field', () => {
+ let input;
+ let linkIdToUpdate;
+ let newName;
- expect(actions.updateAssetLinkName).not.toHaveBeenCalled();
+ beforeEach(() => {
+ input = wrapper.find({ ref: 'nameInput' }).element;
+ linkIdToUpdate = release.assets.links[0].id;
+ newName = 'updated name';
+ });
- wrapper.find({ ref: 'nameInput' }).vm.$emit('change', newName);
+ const expectStoreMethodNotToBeCalled = () => {
+ expect(actions.updateAssetLinkUrl).not.toHaveBeenCalled();
+ };
- expect(actions.updateAssetLinkName).toHaveBeenCalledTimes(1);
- expect(actions.updateAssetLinkName).toHaveBeenCalledWith(
- expect.anything(),
- {
- linkIdToUpdate,
- newName,
- },
- undefined,
- );
+ const dispatchKeydowEvent = eventParams => {
+ const event = new KeyboardEvent('keydown', eventParams);
+
+ input.dispatchEvent(event);
+ };
+
+ const expectStoreMethodToBeCalled = () => {
+ expect(actions.updateAssetLinkName).toHaveBeenCalledTimes(1);
+ expect(actions.updateAssetLinkName).toHaveBeenCalledWith(
+ expect.anything(),
+ {
+ linkIdToUpdate,
+ newName,
+ },
+ undefined,
+ );
+ };
+
+ it('calls the "updateAssetLinkName" store method when text is entered into the "Link title" input field', () => {
+ expectStoreMethodNotToBeCalled();
+
+ wrapper.find({ ref: 'nameInput' }).vm.$emit('change', newName);
+
+ expectStoreMethodToBeCalled();
+ });
+
+ it('calls the "updateAssetLinkName" store method when Ctrl+Enter is pressed inside the "Link title" input field', () => {
+ expectStoreMethodNotToBeCalled();
+
+ input.value = newName;
+
+ dispatchKeydowEvent({ key: ENTER_KEY, ctrlKey: true });
+
+ expectStoreMethodToBeCalled();
+ });
+
+ it('calls the "updateAssetLinkName" store method when Cmd+Enter is pressed inside the "Link title" input field', () => {
+ expectStoreMethodNotToBeCalled();
+
+ input.value = newName;
+
+ dispatchKeydowEvent({ key: ENTER_KEY, metaKey: true });
+
+ expectStoreMethodToBeCalled();
+ });
});
it('calls the "updateAssetLinkType" store method when an option is selected from the "Type" dropdown', () => {
diff --git a/spec/frontend/releases/components/release_block_assets_spec.js b/spec/frontend/releases/components/release_block_assets_spec.js
index a85532a8118..5e84290716c 100644
--- a/spec/frontend/releases/components/release_block_assets_spec.js
+++ b/spec/frontend/releases/components/release_block_assets_spec.js
@@ -1,10 +1,10 @@
import { mount } from '@vue/test-utils';
import { GlCollapse } from '@gitlab/ui';
+import { trimText } from 'helpers/text_helper';
+import { cloneDeep } from 'lodash';
import ReleaseBlockAssets from '~/releases/components/release_block_assets.vue';
import { ASSET_LINK_TYPE } from '~/releases/constants';
-import { trimText } from 'helpers/text_helper';
import { assets } from '../mock_data';
-import { cloneDeep } from 'lodash';
describe('Release block assets', () => {
let wrapper;
diff --git a/spec/frontend/releases/components/release_block_footer_spec.js b/spec/frontend/releases/components/release_block_footer_spec.js
index b91cfb82b65..c066bfbf020 100644
--- a/spec/frontend/releases/components/release_block_footer_spec.js
+++ b/spec/frontend/releases/components/release_block_footer_spec.js
@@ -1,11 +1,11 @@
import { mount } from '@vue/test-utils';
import { GlLink } from '@gitlab/ui';
import { trimText } from 'helpers/text_helper';
+import { cloneDeep } from 'lodash';
import ReleaseBlockFooter from '~/releases/components/release_block_footer.vue';
import Icon from '~/vue_shared/components/icon.vue';
import { release as originalRelease } from '../mock_data';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
-import { cloneDeep } from 'lodash';
const mockFutureDate = new Date(9999, 0, 0).toISOString();
let mockIsFutureRelease = false;
diff --git a/spec/frontend/releases/components/release_block_metadata_spec.js b/spec/frontend/releases/components/release_block_metadata_spec.js
index cbe478bfa1f..6f184e45600 100644
--- a/spec/frontend/releases/components/release_block_metadata_spec.js
+++ b/spec/frontend/releases/components/release_block_metadata_spec.js
@@ -1,9 +1,9 @@
import { mount } from '@vue/test-utils';
import { trimText } from 'helpers/text_helper';
+import { cloneDeep } from 'lodash';
import ReleaseBlockMetadata from '~/releases/components/release_block_metadata.vue';
import { release as originalRelease } from '../mock_data';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
-import { cloneDeep } from 'lodash';
const mockFutureDate = new Date(9999, 0, 0).toISOString();
let mockIsFutureRelease = false;
diff --git a/spec/frontend/releases/components/tag_field_exsting_spec.js b/spec/frontend/releases/components/tag_field_exsting_spec.js
new file mode 100644
index 00000000000..0a04f68bd67
--- /dev/null
+++ b/spec/frontend/releases/components/tag_field_exsting_spec.js
@@ -0,0 +1,78 @@
+import { GlFormInput } from '@gitlab/ui';
+import { shallowMount, mount } from '@vue/test-utils';
+import TagFieldExisting from '~/releases/components/tag_field_existing.vue';
+import createStore from '~/releases/stores';
+import createDetailModule from '~/releases/stores/modules/detail';
+
+const TEST_TAG_NAME = 'test-tag-name';
+const TEST_DOCS_PATH = '/help/test/docs/path';
+
+describe('releases/components/tag_field_existing', () => {
+ let store;
+ let wrapper;
+
+ const createComponent = (mountFn = shallowMount) => {
+ wrapper = mountFn(TagFieldExisting, {
+ store,
+ });
+ };
+
+ const findInput = () => wrapper.find(GlFormInput);
+ const findHelp = () => wrapper.find('[data-testid="tag-name-help"]');
+ const findHelpLink = () => {
+ const link = findHelp().find('a');
+
+ return {
+ text: link.text(),
+ href: link.attributes('href'),
+ target: link.attributes('target'),
+ };
+ };
+
+ beforeEach(() => {
+ store = createStore({
+ modules: {
+ detail: createDetailModule({
+ updateReleaseApiDocsPath: TEST_DOCS_PATH,
+ tagName: TEST_TAG_NAME,
+ }),
+ },
+ });
+
+ store.state.detail.release = {
+ tagName: TEST_TAG_NAME,
+ };
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('default', () => {
+ it('shows the tag name', () => {
+ createComponent();
+
+ expect(findInput().attributes()).toMatchObject({
+ disabled: '',
+ value: TEST_TAG_NAME,
+ });
+ });
+
+ it('shows help', () => {
+ createComponent(mount);
+
+ expect(findHelp().text()).toMatchInterpolatedText(
+ 'Changing a Release tag is only supported via Releases API. More information',
+ );
+
+ const helpLink = findHelpLink();
+
+ expect(helpLink).toEqual({
+ text: 'More information',
+ href: TEST_DOCS_PATH,
+ target: '_blank',
+ });
+ });
+ });
+});
diff --git a/spec/frontend/releases/components/tag_field_new_spec.js b/spec/frontend/releases/components/tag_field_new_spec.js
new file mode 100644
index 00000000000..b6ebc496f33
--- /dev/null
+++ b/spec/frontend/releases/components/tag_field_new_spec.js
@@ -0,0 +1,144 @@
+import { mount, shallowMount } from '@vue/test-utils';
+import { GlFormInput } from '@gitlab/ui';
+import TagFieldNew from '~/releases/components/tag_field_new.vue';
+import createStore from '~/releases/stores';
+import createDetailModule from '~/releases/stores/modules/detail';
+import RefSelector from '~/ref/components/ref_selector.vue';
+
+const TEST_TAG_NAME = 'test-tag-name';
+const TEST_PROJECT_ID = '1234';
+const TEST_CREATE_FROM = 'test-create-from';
+
+describe('releases/components/tag_field_new', () => {
+ let store;
+ let wrapper;
+
+ const createComponent = (mountFn = shallowMount) => {
+ wrapper = mountFn(TagFieldNew, {
+ store,
+ stubs: {
+ RefSelector: true,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ store = createStore({
+ modules: {
+ detail: createDetailModule({
+ projectId: TEST_PROJECT_ID,
+ }),
+ },
+ });
+
+ store.state.detail.createFrom = TEST_CREATE_FROM;
+
+ store.state.detail.release = {
+ tagName: TEST_TAG_NAME,
+ assets: {
+ links: [],
+ },
+ };
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findTagNameFormGroup = () => wrapper.find('[data-testid="tag-name-field"]');
+ const findTagNameGlInput = () => findTagNameFormGroup().find(GlFormInput);
+ const findTagNameInput = () => findTagNameFormGroup().find('input');
+
+ const findCreateFromFormGroup = () => wrapper.find('[data-testid="create-from-field"]');
+ const findCreateFromDropdown = () => findCreateFromFormGroup().find(RefSelector);
+
+ describe('"Tag name" field', () => {
+ describe('rendering and behavior', () => {
+ beforeEach(() => createComponent());
+
+ it('renders a label', () => {
+ expect(findTagNameFormGroup().attributes().label).toBe('Tag name');
+ });
+
+ describe('when the user updates the field', () => {
+ it("updates the store's release.tagName property", () => {
+ const updatedTagName = 'updated-tag-name';
+ findTagNameGlInput().vm.$emit('input', updatedTagName);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(store.state.detail.release.tagName).toBe(updatedTagName);
+ });
+ });
+ });
+ });
+
+ describe('validation', () => {
+ beforeEach(() => {
+ createComponent(mount);
+ });
+
+ /**
+ * Utility function to test the visibility of the validation message
+ * @param {'shown' | 'hidden'} state The expected state of the validation message.
+ * Should be passed either 'shown' or 'hidden'
+ */
+ const expectValidationMessageToBe = state => {
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findTagNameFormGroup().element).toHaveClass(
+ state === 'shown' ? 'is-invalid' : 'is-valid',
+ );
+ expect(findTagNameFormGroup().element).not.toHaveClass(
+ state === 'shown' ? 'is-valid' : 'is-invalid',
+ );
+ });
+ };
+
+ describe('when the user has not yet interacted with the component', () => {
+ it('does not display a validation error', () => {
+ findTagNameInput().setValue('');
+
+ return expectValidationMessageToBe('hidden');
+ });
+ });
+
+ describe('when the user has interacted with the component and the value is not empty', () => {
+ it('does not display validation error', () => {
+ findTagNameInput().trigger('blur');
+
+ return expectValidationMessageToBe('hidden');
+ });
+ });
+
+ describe('when the user has interacted with the component and the value is empty', () => {
+ it('displays a validation error', () => {
+ const tagNameInput = findTagNameInput();
+
+ tagNameInput.setValue('');
+ tagNameInput.trigger('blur');
+
+ return expectValidationMessageToBe('shown');
+ });
+ });
+ });
+ });
+
+ describe('"Create from" field', () => {
+ beforeEach(() => createComponent());
+
+ it('renders a label', () => {
+ expect(findCreateFromFormGroup().attributes().label).toBe('Create from');
+ });
+
+ describe('when the user selects a git ref', () => {
+ it("updates the store's createFrom property", () => {
+ const updatedCreateFrom = 'update-create-from';
+ findCreateFromDropdown().vm.$emit('input', updatedCreateFrom);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(store.state.detail.createFrom).toBe(updatedCreateFrom);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/releases/components/tag_field_spec.js b/spec/frontend/releases/components/tag_field_spec.js
new file mode 100644
index 00000000000..c7909a2369b
--- /dev/null
+++ b/spec/frontend/releases/components/tag_field_spec.js
@@ -0,0 +1,59 @@
+import { shallowMount } from '@vue/test-utils';
+import TagField from '~/releases/components/tag_field.vue';
+import TagFieldNew from '~/releases/components/tag_field_new.vue';
+import TagFieldExisting from '~/releases/components/tag_field_existing.vue';
+import createStore from '~/releases/stores';
+import createDetailModule from '~/releases/stores/modules/detail';
+
+describe('releases/components/tag_field', () => {
+ let store;
+ let wrapper;
+
+ const createComponent = ({ tagName }) => {
+ store = createStore({
+ modules: {
+ detail: createDetailModule({}),
+ },
+ });
+
+ store.state.detail.tagName = tagName;
+
+ wrapper = shallowMount(TagField, { store });
+ };
+
+ const findTagFieldNew = () => wrapper.find(TagFieldNew);
+ const findTagFieldExisting = () => wrapper.find(TagFieldExisting);
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('when an existing release is being edited', () => {
+ beforeEach(() => {
+ createComponent({ tagName: 'v1.0' });
+ });
+
+ it('renders the TagFieldExisting component', () => {
+ expect(findTagFieldExisting().exists()).toBe(true);
+ });
+
+ it('does not render the TagFieldNew component', () => {
+ expect(findTagFieldNew().exists()).toBe(false);
+ });
+ });
+
+ describe('when a new release is being created', () => {
+ beforeEach(() => {
+ createComponent({ tagName: null });
+ });
+
+ it('renders the TagFieldNew component', () => {
+ expect(findTagFieldNew().exists()).toBe(true);
+ });
+
+ it('does not render the TagFieldExisting component', () => {
+ expect(findTagFieldExisting().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/releases/stores/modules/detail/actions_spec.js b/spec/frontend/releases/stores/modules/detail/actions_spec.js
index 345be2acc71..1b2a705e8f4 100644
--- a/spec/frontend/releases/stores/modules/detail/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/actions_spec.js
@@ -1,18 +1,20 @@
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
-import { cloneDeep, merge } from 'lodash';
+import { cloneDeep } from 'lodash';
import * as actions from '~/releases/stores/modules/detail/actions';
import * as types from '~/releases/stores/modules/detail/mutation_types';
import { release as originalRelease } from '../../../mock_data';
import createState from '~/releases/stores/modules/detail/state';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import { redirectTo } from '~/lib/utils/url_utility';
import api from '~/api';
+import httpStatus from '~/lib/utils/http_status';
import { ASSET_LINK_TYPE } from '~/releases/constants';
+import { releaseToApiJson, apiJsonToRelease } from '~/releases/util';
-jest.mock('~/flash', () => jest.fn());
+jest.mock('~/flash');
jest.mock('~/lib/utils/url_utility', () => ({
redirectTo: jest.fn(),
@@ -25,15 +27,26 @@ describe('Release detail actions', () => {
let mock;
let error;
+ const setupState = (updates = {}) => {
+ const getters = {
+ isExistingRelease: true,
+ };
+
+ state = {
+ ...createState({
+ projectId: '18',
+ tagName: release.tag_name,
+ releasesPagePath: 'path/to/releases/page',
+ markdownDocsPath: 'path/to/markdown/docs',
+ markdownPreviewPath: 'path/to/markdown/preview',
+ updateReleaseApiDocsPath: 'path/to/api/docs',
+ }),
+ ...getters,
+ ...updates,
+ };
+ };
+
beforeEach(() => {
- state = createState({
- projectId: '18',
- tagName: 'v1.3',
- releasesPagePath: 'path/to/releases/page',
- markdownDocsPath: 'path/to/markdown/docs',
- markdownPreviewPath: 'path/to/markdown/preview',
- updateReleaseApiDocsPath: 'path/to/api/docs',
- });
release = cloneDeep(originalRelease);
mock = new MockAdapter(axios);
gon.api_version = 'v4';
@@ -45,284 +58,424 @@ describe('Release detail actions', () => {
mock.restore();
});
- describe('requestRelease', () => {
- it(`commits ${types.REQUEST_RELEASE}`, () =>
- testAction(actions.requestRelease, undefined, state, [{ type: types.REQUEST_RELEASE }]));
- });
+ describe('when creating a new release', () => {
+ beforeEach(() => {
+ setupState({ isExistingRelease: false });
+ });
- describe('receiveReleaseSuccess', () => {
- it(`commits ${types.RECEIVE_RELEASE_SUCCESS}`, () =>
- testAction(actions.receiveReleaseSuccess, release, state, [
- { type: types.RECEIVE_RELEASE_SUCCESS, payload: release },
- ]));
+ describe('initializeRelease', () => {
+ it(`commits ${types.INITIALIZE_EMPTY_RELEASE}`, () => {
+ testAction(actions.initializeRelease, undefined, state, [
+ { type: types.INITIALIZE_EMPTY_RELEASE },
+ ]);
+ });
+ });
+
+ describe('saveRelease', () => {
+ it(`commits ${types.REQUEST_SAVE_RELEASE} and then dispatched "createRelease"`, () => {
+ testAction(
+ actions.saveRelease,
+ undefined,
+ state,
+ [{ type: types.REQUEST_SAVE_RELEASE }],
+ [{ type: 'createRelease' }],
+ );
+ });
+ });
});
- describe('receiveReleaseError', () => {
- it(`commits ${types.RECEIVE_RELEASE_ERROR}`, () =>
- testAction(actions.receiveReleaseError, error, state, [
- { type: types.RECEIVE_RELEASE_ERROR, payload: error },
- ]));
+ describe('when editing an existing release', () => {
+ beforeEach(setupState);
- it('shows a flash with an error message', () => {
- actions.receiveReleaseError({ commit: jest.fn() }, error);
+ describe('initializeRelease', () => {
+ it('dispatches "fetchRelease"', () => {
+ testAction(actions.initializeRelease, undefined, state, [], [{ type: 'fetchRelease' }]);
+ });
+ });
- expect(createFlash).toHaveBeenCalledTimes(1);
- expect(createFlash).toHaveBeenCalledWith(
- 'Something went wrong while getting the release details',
- );
+ describe('saveRelease', () => {
+ it(`commits ${types.REQUEST_SAVE_RELEASE} and then dispatched "updateRelease"`, () => {
+ testAction(
+ actions.saveRelease,
+ undefined,
+ state,
+ [{ type: types.REQUEST_SAVE_RELEASE }],
+ [{ type: 'updateRelease' }],
+ );
+ });
});
});
- describe('fetchRelease', () => {
- let getReleaseUrl;
+ describe('actions that behave the same whether creating a new release or editing an existing release', () => {
+ beforeEach(setupState);
- beforeEach(() => {
- state.projectId = '18';
- state.tagName = 'v1.3';
- getReleaseUrl = `/api/v4/projects/${state.projectId}/releases/${state.tagName}`;
- });
+ describe('fetchRelease', () => {
+ let getReleaseUrl;
+
+ beforeEach(() => {
+ getReleaseUrl = `/api/v4/projects/${state.projectId}/releases/${state.tagName}`;
+ });
+
+ describe('when the network request to the Release API is successful', () => {
+ beforeEach(() => {
+ mock.onGet(getReleaseUrl).replyOnce(httpStatus.OK, release);
+ });
+
+ it(`commits ${types.REQUEST_RELEASE} and then commits ${types.RECEIVE_RELEASE_SUCCESS} with the converted release object`, () => {
+ return testAction(actions.fetchRelease, undefined, state, [
+ {
+ type: types.REQUEST_RELEASE,
+ },
+ {
+ type: types.RECEIVE_RELEASE_SUCCESS,
+ payload: apiJsonToRelease(release, { deep: true }),
+ },
+ ]);
+ });
+ });
- it(`dispatches requestRelease and receiveReleaseSuccess with the camel-case'd release object`, () => {
- mock.onGet(getReleaseUrl).replyOnce(200, release);
-
- return testAction(
- actions.fetchRelease,
- undefined,
- state,
- [],
- [
- { type: 'requestRelease' },
- {
- type: 'receiveReleaseSuccess',
- payload: convertObjectPropsToCamelCase(release, { deep: true }),
- },
- ],
- );
+ describe('when the network request to the Release API fails', () => {
+ beforeEach(() => {
+ mock.onGet(getReleaseUrl).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+ });
+
+ it(`commits ${types.REQUEST_RELEASE} and then commits ${types.RECEIVE_RELEASE_ERROR} with an error object`, () => {
+ return testAction(actions.fetchRelease, undefined, state, [
+ {
+ type: types.REQUEST_RELEASE,
+ },
+ {
+ type: types.RECEIVE_RELEASE_ERROR,
+ payload: expect.any(Error),
+ },
+ ]);
+ });
+
+ it(`shows a flash message`, () => {
+ return actions.fetchRelease({ commit: jest.fn(), state }).then(() => {
+ expect(createFlash).toHaveBeenCalledTimes(1);
+ expect(createFlash).toHaveBeenCalledWith(
+ 'Something went wrong while getting the release details',
+ );
+ });
+ });
+ });
});
- it(`dispatches requestRelease and receiveReleaseError with an error object`, () => {
- mock.onGet(getReleaseUrl).replyOnce(500);
+ describe('updateReleaseTagName', () => {
+ it(`commits ${types.UPDATE_RELEASE_TAG_NAME} with the updated tag name`, () => {
+ const newTag = 'updated-tag-name';
+ return testAction(actions.updateReleaseTagName, newTag, state, [
+ { type: types.UPDATE_RELEASE_TAG_NAME, payload: newTag },
+ ]);
+ });
+ });
- return testAction(
- actions.fetchRelease,
- undefined,
- state,
- [],
- [{ type: 'requestRelease' }, { type: 'receiveReleaseError', payload: expect.anything() }],
- );
+ describe('updateCreateFrom', () => {
+ it(`commits ${types.UPDATE_CREATE_FROM} with the updated ref`, () => {
+ const newRef = 'my-feature-branch';
+ return testAction(actions.updateCreateFrom, newRef, state, [
+ { type: types.UPDATE_CREATE_FROM, payload: newRef },
+ ]);
+ });
});
- });
- describe('updateReleaseTitle', () => {
- it(`commits ${types.UPDATE_RELEASE_TITLE} with the updated release title`, () => {
- const newTitle = 'The new release title';
- return testAction(actions.updateReleaseTitle, newTitle, state, [
- { type: types.UPDATE_RELEASE_TITLE, payload: newTitle },
- ]);
+ describe('updateReleaseTitle', () => {
+ it(`commits ${types.UPDATE_RELEASE_TITLE} with the updated release title`, () => {
+ const newTitle = 'The new release title';
+ return testAction(actions.updateReleaseTitle, newTitle, state, [
+ { type: types.UPDATE_RELEASE_TITLE, payload: newTitle },
+ ]);
+ });
});
- });
- describe('updateReleaseNotes', () => {
- it(`commits ${types.UPDATE_RELEASE_NOTES} with the updated release notes`, () => {
- const newReleaseNotes = 'The new release notes';
- return testAction(actions.updateReleaseNotes, newReleaseNotes, state, [
- { type: types.UPDATE_RELEASE_NOTES, payload: newReleaseNotes },
- ]);
+ describe('updateReleaseNotes', () => {
+ it(`commits ${types.UPDATE_RELEASE_NOTES} with the updated release notes`, () => {
+ const newReleaseNotes = 'The new release notes';
+ return testAction(actions.updateReleaseNotes, newReleaseNotes, state, [
+ { type: types.UPDATE_RELEASE_NOTES, payload: newReleaseNotes },
+ ]);
+ });
});
- });
- describe('updateAssetLinkUrl', () => {
- it(`commits ${types.UPDATE_ASSET_LINK_URL} with the updated link URL`, () => {
- const params = {
- linkIdToUpdate: 2,
- newUrl: 'https://example.com/updated',
- };
+ describe('updateReleaseMilestones', () => {
+ it(`commits ${types.UPDATE_RELEASE_MILESTONES} with the updated release milestones`, () => {
+ const newReleaseMilestones = ['v0.0', 'v0.1'];
+ return testAction(actions.updateReleaseMilestones, newReleaseMilestones, state, [
+ { type: types.UPDATE_RELEASE_MILESTONES, payload: newReleaseMilestones },
+ ]);
+ });
+ });
- return testAction(actions.updateAssetLinkUrl, params, state, [
- { type: types.UPDATE_ASSET_LINK_URL, payload: params },
- ]);
+ describe('addEmptyAssetLink', () => {
+ it(`commits ${types.ADD_EMPTY_ASSET_LINK}`, () => {
+ return testAction(actions.addEmptyAssetLink, undefined, state, [
+ { type: types.ADD_EMPTY_ASSET_LINK },
+ ]);
+ });
});
- });
- describe('updateAssetLinkName', () => {
- it(`commits ${types.UPDATE_ASSET_LINK_NAME} with the updated link name`, () => {
- const params = {
- linkIdToUpdate: 2,
- newName: 'Updated link name',
- };
+ describe('updateAssetLinkUrl', () => {
+ it(`commits ${types.UPDATE_ASSET_LINK_URL} with the updated link URL`, () => {
+ const params = {
+ linkIdToUpdate: 2,
+ newUrl: 'https://example.com/updated',
+ };
- return testAction(actions.updateAssetLinkName, params, state, [
- { type: types.UPDATE_ASSET_LINK_NAME, payload: params },
- ]);
+ return testAction(actions.updateAssetLinkUrl, params, state, [
+ { type: types.UPDATE_ASSET_LINK_URL, payload: params },
+ ]);
+ });
});
- });
- describe('updateAssetLinkType', () => {
- it(`commits ${types.UPDATE_ASSET_LINK_TYPE} with the updated link type`, () => {
- const params = {
- linkIdToUpdate: 2,
- newType: ASSET_LINK_TYPE.RUNBOOK,
- };
+ describe('updateAssetLinkName', () => {
+ it(`commits ${types.UPDATE_ASSET_LINK_NAME} with the updated link name`, () => {
+ const params = {
+ linkIdToUpdate: 2,
+ newName: 'Updated link name',
+ };
- return testAction(actions.updateAssetLinkType, params, state, [
- { type: types.UPDATE_ASSET_LINK_TYPE, payload: params },
- ]);
+ return testAction(actions.updateAssetLinkName, params, state, [
+ { type: types.UPDATE_ASSET_LINK_NAME, payload: params },
+ ]);
+ });
});
- });
- describe('removeAssetLink', () => {
- it(`commits ${types.REMOVE_ASSET_LINK} with the ID of the asset link to remove`, () => {
- const idToRemove = 2;
- return testAction(actions.removeAssetLink, idToRemove, state, [
- { type: types.REMOVE_ASSET_LINK, payload: idToRemove },
- ]);
+ describe('updateAssetLinkType', () => {
+ it(`commits ${types.UPDATE_ASSET_LINK_TYPE} with the updated link type`, () => {
+ const params = {
+ linkIdToUpdate: 2,
+ newType: ASSET_LINK_TYPE.RUNBOOK,
+ };
+
+ return testAction(actions.updateAssetLinkType, params, state, [
+ { type: types.UPDATE_ASSET_LINK_TYPE, payload: params },
+ ]);
+ });
});
- });
- describe('updateReleaseMilestones', () => {
- it(`commits ${types.UPDATE_RELEASE_MILESTONES} with the updated release milestones`, () => {
- const newReleaseMilestones = ['v0.0', 'v0.1'];
- return testAction(actions.updateReleaseMilestones, newReleaseMilestones, state, [
- { type: types.UPDATE_RELEASE_MILESTONES, payload: newReleaseMilestones },
- ]);
+ describe('removeAssetLink', () => {
+ it(`commits ${types.REMOVE_ASSET_LINK} with the ID of the asset link to remove`, () => {
+ const idToRemove = 2;
+ return testAction(actions.removeAssetLink, idToRemove, state, [
+ { type: types.REMOVE_ASSET_LINK, payload: idToRemove },
+ ]);
+ });
});
- });
- describe('requestUpdateRelease', () => {
- it(`commits ${types.REQUEST_UPDATE_RELEASE}`, () =>
- testAction(actions.requestUpdateRelease, undefined, state, [
- { type: types.REQUEST_UPDATE_RELEASE },
- ]));
- });
+ describe('receiveSaveReleaseSuccess', () => {
+ it(`commits ${types.RECEIVE_SAVE_RELEASE_SUCCESS}`, () =>
+ testAction(actions.receiveSaveReleaseSuccess, undefined, { ...state, featureFlags: {} }, [
+ { type: types.RECEIVE_SAVE_RELEASE_SUCCESS },
+ ]));
- describe('receiveUpdateReleaseSuccess', () => {
- it(`commits ${types.RECEIVE_UPDATE_RELEASE_SUCCESS}`, () =>
- testAction(actions.receiveUpdateReleaseSuccess, undefined, { ...state, featureFlags: {} }, [
- { type: types.RECEIVE_UPDATE_RELEASE_SUCCESS },
- ]));
+ describe('when the releaseShowPage feature flag is enabled', () => {
+ beforeEach(() => {
+ const rootState = { featureFlags: { releaseShowPage: true } };
+ actions.receiveSaveReleaseSuccess({ commit: jest.fn(), state, rootState }, release);
+ });
- it('redirects to the releases page if releaseShowPage feature flag is enabled', () => {
- const rootState = { featureFlags: { releaseShowPage: true } };
- const updatedState = merge({}, state, {
- releasesPagePath: 'path/to/releases/page',
- release: {
- _links: {
- self: 'path/to/self',
- },
- },
+ it("redirects to the release's dedicated page", () => {
+ expect(redirectTo).toHaveBeenCalledTimes(1);
+ expect(redirectTo).toHaveBeenCalledWith(release._links.self);
+ });
});
- actions.receiveUpdateReleaseSuccess({ commit: jest.fn(), state: updatedState, rootState });
+ describe('when the releaseShowPage feature flag is disabled', () => {
+ beforeEach(() => {
+ const rootState = { featureFlags: { releaseShowPage: false } };
+ actions.receiveSaveReleaseSuccess({ commit: jest.fn(), state, rootState }, release);
+ });
- expect(redirectTo).toHaveBeenCalledTimes(1);
- expect(redirectTo).toHaveBeenCalledWith(updatedState.release._links.self);
+ it("redirects to the project's main Releases page", () => {
+ expect(redirectTo).toHaveBeenCalledTimes(1);
+ expect(redirectTo).toHaveBeenCalledWith(state.releasesPagePath);
+ });
+ });
});
- describe('when the releaseShowPage feature flag is disabled', () => {});
- });
-
- describe('receiveUpdateReleaseError', () => {
- it(`commits ${types.RECEIVE_UPDATE_RELEASE_ERROR}`, () =>
- testAction(actions.receiveUpdateReleaseError, error, state, [
- { type: types.RECEIVE_UPDATE_RELEASE_ERROR, payload: error },
- ]));
+ describe('createRelease', () => {
+ let createReleaseUrl;
+ let releaseLinksToCreate;
- it('shows a flash with an error message', () => {
- actions.receiveUpdateReleaseError({ commit: jest.fn() }, error);
+ beforeEach(() => {
+ const camelCasedRelease = convertObjectPropsToCamelCase(release);
- expect(createFlash).toHaveBeenCalledTimes(1);
- expect(createFlash).toHaveBeenCalledWith(
- 'Something went wrong while saving the release details',
- );
- });
- });
+ releaseLinksToCreate = camelCasedRelease.assets.links.slice(0, 1);
- describe('updateRelease', () => {
- let getters;
- let dispatch;
- let callOrder;
+ setupState({
+ release: camelCasedRelease,
+ releaseLinksToCreate,
+ });
- beforeEach(() => {
- state.release = convertObjectPropsToCamelCase(release);
- state.projectId = '18';
- state.tagName = state.release.tagName;
+ createReleaseUrl = `/api/v4/projects/${state.projectId}/releases`;
+ });
- getters = {
- releaseLinksToDelete: [{ id: '1' }, { id: '2' }],
- releaseLinksToCreate: [{ id: 'new-link-1' }, { id: 'new-link-2' }],
- };
+ describe('when the network request to the Release API is successful', () => {
+ beforeEach(() => {
+ const expectedRelease = releaseToApiJson({
+ ...state.release,
+ assets: {
+ links: releaseLinksToCreate,
+ },
+ });
- dispatch = jest.fn();
+ mock.onPost(createReleaseUrl, expectedRelease).replyOnce(httpStatus.CREATED, release);
+ });
- callOrder = [];
- jest.spyOn(api, 'updateRelease').mockImplementation(() => {
- callOrder.push('updateRelease');
- return Promise.resolve();
- });
- jest.spyOn(api, 'deleteReleaseLink').mockImplementation(() => {
- callOrder.push('deleteReleaseLink');
- return Promise.resolve();
- });
- jest.spyOn(api, 'createReleaseLink').mockImplementation(() => {
- callOrder.push('createReleaseLink');
- return Promise.resolve();
+ it(`dispatches "receiveSaveReleaseSuccess" with the converted release object`, () => {
+ return testAction(
+ actions.createRelease,
+ undefined,
+ state,
+ [],
+ [
+ {
+ type: 'receiveSaveReleaseSuccess',
+ payload: apiJsonToRelease(release, { deep: true }),
+ },
+ ],
+ );
+ });
});
- });
- it('dispatches requestUpdateRelease and receiveUpdateReleaseSuccess', () => {
- return actions.updateRelease({ dispatch, state, getters }).then(() => {
- expect(dispatch.mock.calls).toEqual([
- ['requestUpdateRelease'],
- ['receiveUpdateReleaseSuccess'],
- ]);
+ describe('when the network request to the Release API fails', () => {
+ beforeEach(() => {
+ mock.onPost(createReleaseUrl).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+ });
+
+ it(`commits ${types.RECEIVE_SAVE_RELEASE_ERROR} with an error object`, () => {
+ return testAction(actions.createRelease, undefined, state, [
+ {
+ type: types.RECEIVE_SAVE_RELEASE_ERROR,
+ payload: expect.any(Error),
+ },
+ ]);
+ });
+
+ it(`shows a flash message`, () => {
+ return actions
+ .createRelease({ commit: jest.fn(), dispatch: jest.fn(), state, getters: {} })
+ .then(() => {
+ expect(createFlash).toHaveBeenCalledTimes(1);
+ expect(createFlash).toHaveBeenCalledWith(
+ 'Something went wrong while creating a new release',
+ );
+ });
+ });
});
});
- it('dispatches requestUpdateRelease and receiveUpdateReleaseError with an error object', () => {
- jest.spyOn(api, 'updateRelease').mockRejectedValue(error);
+ describe('updateRelease', () => {
+ let getters;
+ let dispatch;
+ let commit;
+ let callOrder;
+
+ beforeEach(() => {
+ getters = {
+ releaseLinksToDelete: [{ id: '1' }, { id: '2' }],
+ releaseLinksToCreate: [{ id: 'new-link-1' }, { id: 'new-link-2' }],
+ };
+
+ setupState({
+ release: convertObjectPropsToCamelCase(release),
+ ...getters,
+ });
- return actions.updateRelease({ dispatch, state, getters }).then(() => {
- expect(dispatch.mock.calls).toEqual([
- ['requestUpdateRelease'],
- ['receiveUpdateReleaseError', error],
- ]);
+ dispatch = jest.fn();
+ commit = jest.fn();
+
+ callOrder = [];
+ jest.spyOn(api, 'updateRelease').mockImplementation(() => {
+ callOrder.push('updateRelease');
+ return Promise.resolve({ data: release });
+ });
+ jest.spyOn(api, 'deleteReleaseLink').mockImplementation(() => {
+ callOrder.push('deleteReleaseLink');
+ return Promise.resolve();
+ });
+ jest.spyOn(api, 'createReleaseLink').mockImplementation(() => {
+ callOrder.push('createReleaseLink');
+ return Promise.resolve();
+ });
});
- });
- it('updates the Release, then deletes all existing links, and then recreates new links', () => {
- return actions.updateRelease({ dispatch, state, getters }).then(() => {
- expect(callOrder).toEqual([
- 'updateRelease',
- 'deleteReleaseLink',
- 'deleteReleaseLink',
- 'createReleaseLink',
- 'createReleaseLink',
- ]);
+ describe('when the network request to the Release API is successful', () => {
+ it('dispatches receiveSaveReleaseSuccess', () => {
+ return actions.updateRelease({ commit, dispatch, state, getters }).then(() => {
+ expect(dispatch.mock.calls).toEqual([
+ ['receiveSaveReleaseSuccess', apiJsonToRelease(release)],
+ ]);
+ });
+ });
- expect(api.updateRelease.mock.calls).toEqual([
- [
- state.projectId,
- state.tagName,
- {
- name: state.release.name,
- description: state.release.description,
- milestones: state.release.milestones.map(milestone => milestone.title),
- },
- ],
- ]);
+ it('updates the Release, then deletes all existing links, and then recreates new links', () => {
+ return actions.updateRelease({ dispatch, state, getters }).then(() => {
+ expect(callOrder).toEqual([
+ 'updateRelease',
+ 'deleteReleaseLink',
+ 'deleteReleaseLink',
+ 'createReleaseLink',
+ 'createReleaseLink',
+ ]);
+
+ expect(api.updateRelease.mock.calls).toEqual([
+ [
+ state.projectId,
+ state.tagName,
+ releaseToApiJson({
+ ...state.release,
+ assets: {
+ links: getters.releaseLinksToCreate,
+ },
+ }),
+ ],
+ ]);
+
+ expect(api.deleteReleaseLink).toHaveBeenCalledTimes(
+ getters.releaseLinksToDelete.length,
+ );
+ getters.releaseLinksToDelete.forEach(link => {
+ expect(api.deleteReleaseLink).toHaveBeenCalledWith(
+ state.projectId,
+ state.tagName,
+ link.id,
+ );
+ });
+
+ expect(api.createReleaseLink).toHaveBeenCalledTimes(
+ getters.releaseLinksToCreate.length,
+ );
+ getters.releaseLinksToCreate.forEach(link => {
+ expect(api.createReleaseLink).toHaveBeenCalledWith(
+ state.projectId,
+ state.tagName,
+ link,
+ );
+ });
+ });
+ });
+ });
- expect(api.deleteReleaseLink).toHaveBeenCalledTimes(getters.releaseLinksToDelete.length);
- getters.releaseLinksToDelete.forEach(link => {
- expect(api.deleteReleaseLink).toHaveBeenCalledWith(
- state.projectId,
- state.tagName,
- link.id,
- );
+ describe('when the network request to the Release API fails', () => {
+ beforeEach(() => {
+ jest.spyOn(api, 'updateRelease').mockRejectedValue(error);
+ });
+
+ it('dispatches requestUpdateRelease and receiveUpdateReleaseError with an error object', () => {
+ return actions.updateRelease({ commit, dispatch, state, getters }).then(() => {
+ expect(commit.mock.calls).toEqual([[types.RECEIVE_SAVE_RELEASE_ERROR, error]]);
+ });
});
- expect(api.createReleaseLink).toHaveBeenCalledTimes(getters.releaseLinksToCreate.length);
- getters.releaseLinksToCreate.forEach(link => {
- expect(api.createReleaseLink).toHaveBeenCalledWith(state.projectId, state.tagName, link);
+ it('shows a flash message', () => {
+ return actions.updateRelease({ commit, dispatch, state, getters }).then(() => {
+ expect(createFlash).toHaveBeenCalledTimes(1);
+ expect(createFlash).toHaveBeenCalledWith(
+ 'Something went wrong while saving the release details',
+ );
+ });
});
});
});
diff --git a/spec/frontend/releases/stores/modules/detail/getters_spec.js b/spec/frontend/releases/stores/modules/detail/getters_spec.js
index 8945ad97c93..2d9f35428f2 100644
--- a/spec/frontend/releases/stores/modules/detail/getters_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/getters_spec.js
@@ -1,6 +1,20 @@
import * as getters from '~/releases/stores/modules/detail/getters';
describe('Release detail getters', () => {
+ describe('isExistingRelease', () => {
+ it('returns true if the release is an existing release that already exists in the database', () => {
+ const state = { tagName: 'test-tag-name' };
+
+ expect(getters.isExistingRelease(state)).toBe(true);
+ });
+
+ it('returns false if the release is a new release that has not yet been saved to the database', () => {
+ const state = { tagName: null };
+
+ expect(getters.isExistingRelease(state)).toBe(false);
+ });
+ });
+
describe('releaseLinksToCreate', () => {
it("returns an empty array if state.release doesn't exist", () => {
const state = {};
@@ -62,6 +76,7 @@ describe('Release detail getters', () => {
it('returns no validation errors', () => {
const state = {
release: {
+ tagName: 'test-tag-name',
assets: {
links: [
{ id: 1, url: 'https://example.com/valid', name: 'Link 1' },
@@ -96,6 +111,9 @@ describe('Release detail getters', () => {
beforeEach(() => {
const state = {
release: {
+ // empty tag name
+ tagName: '',
+
assets: {
links: [
// Duplicate URLs
@@ -124,7 +142,15 @@ describe('Release detail getters', () => {
actualErrors = getters.validationErrors(state);
});
- it('returns a validation errors if links share a URL', () => {
+ it('returns a validation error if the tag name is empty', () => {
+ const expectedErrors = {
+ isTagNameEmpty: true,
+ };
+
+ expect(actualErrors).toMatchObject(expectedErrors);
+ });
+
+ it('returns a validation error if links share a URL', () => {
const expectedErrors = {
assets: {
links: {
@@ -182,32 +208,53 @@ describe('Release detail getters', () => {
// the value of state is not actually used by this getter
const state = {};
- it('returns true when the form is valid', () => {
- const mockGetters = {
- validationErrors: {
- assets: {
- links: {
- 1: {},
+ describe('when the form is valid', () => {
+ it('returns true', () => {
+ const mockGetters = {
+ validationErrors: {
+ assets: {
+ links: {
+ 1: {},
+ },
},
},
- },
- };
+ };
- expect(getters.isValid(state, mockGetters)).toBe(true);
+ expect(getters.isValid(state, mockGetters)).toBe(true);
+ });
});
- it('returns false when the form is invalid', () => {
- const mockGetters = {
- validationErrors: {
- assets: {
- links: {
- 1: { isNameEmpty: true },
+ describe('when an asset link contains a validation error', () => {
+ it('returns false', () => {
+ const mockGetters = {
+ validationErrors: {
+ assets: {
+ links: {
+ 1: { isNameEmpty: true },
+ },
},
},
- },
- };
+ };
- expect(getters.isValid(state, mockGetters)).toBe(false);
+ expect(getters.isValid(state, mockGetters)).toBe(false);
+ });
+ });
+
+ describe('when the tag name is empty', () => {
+ it('returns false', () => {
+ const mockGetters = {
+ validationErrors: {
+ isTagNameEmpty: true,
+ assets: {
+ links: {
+ 1: {},
+ },
+ },
+ },
+ };
+
+ expect(getters.isValid(state, mockGetters)).toBe(false);
+ });
});
});
});
diff --git a/spec/frontend/releases/stores/modules/detail/mutations_spec.js b/spec/frontend/releases/stores/modules/detail/mutations_spec.js
index a34c1be64d9..cd7c6b7d275 100644
--- a/spec/frontend/releases/stores/modules/detail/mutations_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/mutations_spec.js
@@ -21,6 +21,22 @@ describe('Release detail mutations', () => {
release = convertObjectPropsToCamelCase(originalRelease);
});
+ describe(`${types.INITIALIZE_EMPTY_RELEASE}`, () => {
+ it('set state.release to an empty release object', () => {
+ mutations[types.INITIALIZE_EMPTY_RELEASE](state);
+
+ expect(state.release).toEqual({
+ tagName: null,
+ name: '',
+ description: '',
+ milestones: [],
+ assets: {
+ links: [],
+ },
+ });
+ });
+ });
+
describe(`${types.REQUEST_RELEASE}`, () => {
it('set state.isFetchingRelease to true', () => {
mutations[types.REQUEST_RELEASE](state);
@@ -56,6 +72,26 @@ describe('Release detail mutations', () => {
});
});
+ describe(`${types.UPDATE_RELEASE_TAG_NAME}`, () => {
+ it("updates the release's tag name", () => {
+ state.release = release;
+ const newTag = 'updated-tag-name';
+ mutations[types.UPDATE_RELEASE_TAG_NAME](state, newTag);
+
+ expect(state.release.tagName).toBe(newTag);
+ });
+ });
+
+ describe(`${types.UPDATE_CREATE_FROM}`, () => {
+ it('updates the ref that the ref will be created from', () => {
+ state.createFrom = 'main';
+ const newRef = 'my-feature-branch';
+ mutations[types.UPDATE_CREATE_FROM](state, newRef);
+
+ expect(state.createFrom).toBe(newRef);
+ });
+ });
+
describe(`${types.UPDATE_RELEASE_TITLE}`, () => {
it("updates the release's title", () => {
state.release = release;
@@ -76,17 +112,17 @@ describe('Release detail mutations', () => {
});
});
- describe(`${types.REQUEST_UPDATE_RELEASE}`, () => {
+ describe(`${types.REQUEST_SAVE_RELEASE}`, () => {
it('set state.isUpdatingRelease to true', () => {
- mutations[types.REQUEST_UPDATE_RELEASE](state);
+ mutations[types.REQUEST_SAVE_RELEASE](state);
expect(state.isUpdatingRelease).toBe(true);
});
});
- describe(`${types.RECEIVE_UPDATE_RELEASE_SUCCESS}`, () => {
+ describe(`${types.RECEIVE_SAVE_RELEASE_SUCCESS}`, () => {
it('handles a successful response from the server', () => {
- mutations[types.RECEIVE_UPDATE_RELEASE_SUCCESS](state, release);
+ mutations[types.RECEIVE_SAVE_RELEASE_SUCCESS](state, release);
expect(state.updateError).toBeUndefined();
@@ -94,10 +130,10 @@ describe('Release detail mutations', () => {
});
});
- describe(`${types.RECEIVE_UPDATE_RELEASE_ERROR}`, () => {
+ describe(`${types.RECEIVE_SAVE_RELEASE_ERROR}`, () => {
it('handles an unsuccessful response from the server', () => {
const error = { message: 'An error occurred!' };
- mutations[types.RECEIVE_UPDATE_RELEASE_ERROR](state, error);
+ mutations[types.RECEIVE_SAVE_RELEASE_ERROR](state, error);
expect(state.isUpdatingRelease).toBe(false);
diff --git a/spec/frontend/releases/util_spec.js b/spec/frontend/releases/util_spec.js
new file mode 100644
index 00000000000..90aa9c4c7d8
--- /dev/null
+++ b/spec/frontend/releases/util_spec.js
@@ -0,0 +1,103 @@
+import { releaseToApiJson, apiJsonToRelease } from '~/releases/util';
+
+describe('releases/util.js', () => {
+ describe('releaseToApiJson', () => {
+ it('converts a release JavaScript object into JSON that the Release API can accept', () => {
+ const release = {
+ tagName: 'tag-name',
+ name: 'Release name',
+ description: 'Release description',
+ milestones: [{ id: 1, title: '13.2' }, { id: 2, title: '13.3' }],
+ assets: {
+ links: [{ url: 'https://gitlab.example.com/link', linkType: 'other' }],
+ },
+ };
+
+ const expectedJson = {
+ tag_name: 'tag-name',
+ ref: null,
+ name: 'Release name',
+ description: 'Release description',
+ milestones: ['13.2', '13.3'],
+ assets: {
+ links: [{ url: 'https://gitlab.example.com/link', link_type: 'other' }],
+ },
+ };
+
+ expect(releaseToApiJson(release)).toEqual(expectedJson);
+ });
+
+ describe('when createFrom is provided', () => {
+ it('adds the provided createFrom ref to the JSON as a "ref" property', () => {
+ const createFrom = 'main';
+
+ const release = {};
+
+ const expectedJson = {
+ ref: createFrom,
+ };
+
+ expect(releaseToApiJson(release, createFrom)).toMatchObject(expectedJson);
+ });
+ });
+
+ describe('release.name', () => {
+ it.each`
+ input | output
+ ${null} | ${null}
+ ${''} | ${null}
+ ${' \t\n\r\n'} | ${null}
+ ${' Release name '} | ${'Release name'}
+ `('converts a name like `$input` to `$output`', ({ input, output }) => {
+ const release = { name: input };
+
+ const expectedJson = {
+ name: output,
+ };
+
+ expect(releaseToApiJson(release)).toMatchObject(expectedJson);
+ });
+ });
+
+ describe('when release.milestones is falsy', () => {
+ it('includes a "milestone" property in the returned result as an empty array', () => {
+ const release = {};
+
+ const expectedJson = {
+ milestones: [],
+ };
+
+ expect(releaseToApiJson(release)).toMatchObject(expectedJson);
+ });
+ });
+ });
+
+ describe('apiJsonToRelease', () => {
+ it('converts JSON received from the Release API into an object usable by the Vue application', () => {
+ const json = {
+ tag_name: 'tag-name',
+ assets: {
+ links: [
+ {
+ link_type: 'other',
+ },
+ ],
+ },
+ };
+
+ const expectedRelease = {
+ tagName: 'tag-name',
+ assets: {
+ links: [
+ {
+ linkType: 'other',
+ },
+ ],
+ },
+ milestones: [],
+ };
+
+ expect(apiJsonToRelease(json)).toEqual(expectedRelease);
+ });
+ });
+});
diff --git a/spec/frontend/reports/accessibility_report/mock_data.js b/spec/frontend/reports/accessibility_report/mock_data.js
index f8e832c1ce5..20ad01bd802 100644
--- a/spec/frontend/reports/accessibility_report/mock_data.js
+++ b/spec/frontend/reports/accessibility_report/mock_data.js
@@ -1,3 +1,4 @@
+// eslint-disable-next-line import/prefer-default-export
export const mockReport = {
status: 'failed',
summary: {
@@ -51,5 +52,3 @@ export const mockReport = {
existing_notes: [],
existing_warnings: [],
};
-
-export default () => {};
diff --git a/spec/frontend/reports/accessibility_report/store/actions_spec.js b/spec/frontend/reports/accessibility_report/store/actions_spec.js
index 129a5bade86..9f210659cfd 100644
--- a/spec/frontend/reports/accessibility_report/store/actions_spec.js
+++ b/spec/frontend/reports/accessibility_report/store/actions_spec.js
@@ -1,10 +1,10 @@
-import axios from '~/lib/utils/axios_utils';
import MockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'spec/test_constants';
+import testAction from 'helpers/vuex_action_helper';
+import axios from '~/lib/utils/axios_utils';
import * as actions from '~/reports/accessibility_report/store/actions';
import * as types from '~/reports/accessibility_report/store/mutation_types';
import createStore from '~/reports/accessibility_report/store';
-import { TEST_HOST } from 'spec/test_constants';
-import testAction from 'helpers/vuex_action_helper';
import { mockReport } from '../mock_data';
describe('Accessibility Reports actions', () => {
diff --git a/spec/frontend/reports/codequality_report/store/actions_spec.js b/spec/frontend/reports/codequality_report/store/actions_spec.js
index 6c30fdb7871..7d9e4bbbe9f 100644
--- a/spec/frontend/reports/codequality_report/store/actions_spec.js
+++ b/spec/frontend/reports/codequality_report/store/actions_spec.js
@@ -1,10 +1,10 @@
-import axios from '~/lib/utils/axios_utils';
import MockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'spec/test_constants';
+import testAction from 'helpers/vuex_action_helper';
+import axios from '~/lib/utils/axios_utils';
import * as actions from '~/reports/codequality_report/store/actions';
import * as types from '~/reports/codequality_report/store/mutation_types';
import createStore from '~/reports/codequality_report/store';
-import { TEST_HOST } from 'spec/test_constants';
-import testAction from 'helpers/vuex_action_helper';
import { headIssues, baseIssues, mockParsedHeadIssues, mockParsedBaseIssues } from '../mock_data';
// mock codequality comparison worker
diff --git a/spec/frontend/reports/components/grouped_test_reports_app_spec.js b/spec/frontend/reports/components/grouped_test_reports_app_spec.js
index 017e0335569..c26e2fbc19a 100644
--- a/spec/frontend/reports/components/grouped_test_reports_app_spec.js
+++ b/spec/frontend/reports/components/grouped_test_reports_app_spec.js
@@ -20,10 +20,7 @@ describe('Grouped test reports app', () => {
let wrapper;
let mockStore;
- const mountComponent = ({
- glFeatures = { junitPipelineView: false },
- props = { pipelinePath },
- } = {}) => {
+ const mountComponent = ({ props = { pipelinePath } } = {}) => {
wrapper = mount(Component, {
store: mockStore,
localVue,
@@ -35,9 +32,6 @@ describe('Grouped test reports app', () => {
methods: {
fetchReports: () => {},
},
- provide: {
- glFeatures,
- },
});
};
@@ -78,28 +72,17 @@ describe('Grouped test reports app', () => {
});
describe('`View full report` button', () => {
- it('should not render the full test report link', () => {
- expect(findFullTestReportLink().exists()).toBe(false);
- });
+ it('should render the full test report link', () => {
+ const fullTestReportLink = findFullTestReportLink();
- describe('With junitPipelineView feature flag enabled', () => {
- beforeEach(() => {
- mountComponent({ glFeatures: { junitPipelineView: true } });
- });
-
- it('should render the full test report link', () => {
- const fullTestReportLink = findFullTestReportLink();
-
- expect(fullTestReportLink.exists()).toBe(true);
- expect(pipelinePath).not.toBe('');
- expect(fullTestReportLink.attributes('href')).toBe(`${pipelinePath}/test_report`);
- });
+ expect(fullTestReportLink.exists()).toBe(true);
+ expect(pipelinePath).not.toBe('');
+ expect(fullTestReportLink.attributes('href')).toBe(`${pipelinePath}/test_report`);
});
describe('Without a pipelinePath', () => {
beforeEach(() => {
mountComponent({
- glFeatures: { junitPipelineView: true },
props: { pipelinePath: '' },
});
});
diff --git a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
index 1dca65dd862..cf2e6b00800 100644
--- a/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
+++ b/spec/frontend/repository/components/__snapshots__/last_commit_spec.js.snap
@@ -10,7 +10,7 @@ exports[`Repository last commit component renders commit widget 1`] = `
imgcssclasses=""
imgsize="40"
imgsrc="https://test.com"
- linkhref="https://test.com/test"
+ linkhref="/test"
tooltipplacement="top"
tooltiptext=""
username=""
@@ -24,7 +24,7 @@ exports[`Repository last commit component renders commit widget 1`] = `
>
<gl-link-stub
class="commit-row-message item-title"
- href="https://test.com/commit/123"
+ href="/commit/123"
>
Commit title
</gl-link-stub>
@@ -36,7 +36,7 @@ exports[`Repository last commit component renders commit widget 1`] = `
>
<gl-link-stub
class="commit-author-link js-user-link"
- href="https://test.com/test"
+ href="/test"
>
Test
@@ -110,7 +110,7 @@ exports[`Repository last commit component renders the signature HTML as returned
imgcssclasses=""
imgsize="40"
imgsrc="https://test.com"
- linkhref="https://test.com/test"
+ linkhref="/test"
tooltipplacement="top"
tooltiptext=""
username=""
@@ -124,7 +124,7 @@ exports[`Repository last commit component renders the signature HTML as returned
>
<gl-link-stub
class="commit-row-message item-title"
- href="https://test.com/commit/123"
+ href="/commit/123"
>
Commit title
</gl-link-stub>
@@ -136,7 +136,7 @@ exports[`Repository last commit component renders the signature HTML as returned
>
<gl-link-stub
class="commit-author-link js-user-link"
- href="https://test.com/test"
+ href="/test"
>
Test
diff --git a/spec/frontend/repository/components/breadcrumbs_spec.js b/spec/frontend/repository/components/breadcrumbs_spec.js
index 38e5c9aaca5..ca4120576f5 100644
--- a/spec/frontend/repository/components/breadcrumbs_spec.js
+++ b/spec/frontend/repository/components/breadcrumbs_spec.js
@@ -1,5 +1,5 @@
import { shallowMount, RouterLinkStub } from '@vue/test-utils';
-import { GlDropdown } from '@gitlab/ui';
+import { GlDeprecatedDropdown } from '@gitlab/ui';
import Breadcrumbs from '~/repository/components/breadcrumbs.vue';
let vm;
@@ -61,7 +61,7 @@ describe('Repository breadcrumbs component', () => {
vm.setData({ userPermissions: { forkProject: false, createMergeRequestIn: false } });
return vm.vm.$nextTick(() => {
- expect(vm.find(GlDropdown).exists()).toBe(false);
+ expect(vm.find(GlDeprecatedDropdown).exists()).toBe(false);
});
});
@@ -71,7 +71,7 @@ describe('Repository breadcrumbs component', () => {
vm.setData({ userPermissions: { forkProject: true, createMergeRequestIn: true } });
return vm.vm.$nextTick(() => {
- expect(vm.find(GlDropdown).exists()).toBe(true);
+ expect(vm.find(GlDeprecatedDropdown).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/repository/components/last_commit_spec.js b/spec/frontend/repository/components/last_commit_spec.js
index a5bfeb08fe4..c14a7f0e061 100644
--- a/spec/frontend/repository/components/last_commit_spec.js
+++ b/spec/frontend/repository/components/last_commit_spec.js
@@ -11,12 +11,12 @@ function createCommitData(data = {}) {
title: 'Commit title',
titleHtml: 'Commit title',
message: 'Commit message',
- webUrl: 'https://test.com/commit/123',
+ webPath: '/commit/123',
authoredDate: '2019-01-01',
author: {
name: 'Test',
avatarUrl: 'https://test.com',
- webUrl: 'https://test.com/test',
+ webPath: '/test',
},
pipeline: {
detailedStatus: {
@@ -108,7 +108,7 @@ describe('Repository last commit component', () => {
});
it('does not render description expander when description is null', () => {
- factory(createCommitData({ description: null }));
+ factory(createCommitData({ descriptionHtml: null }));
return vm.vm.$nextTick(() => {
expect(vm.find('.text-expander').exists()).toBe(false);
@@ -117,7 +117,7 @@ describe('Repository last commit component', () => {
});
it('expands commit description when clicking expander', () => {
- factory(createCommitData({ description: 'Test description' }));
+ factory(createCommitData({ descriptionHtml: 'Test description' }));
return vm.vm
.$nextTick()
diff --git a/spec/frontend/repository/components/preview/index_spec.js b/spec/frontend/repository/components/preview/index_spec.js
index 6ae323f5c3f..ebd985e640c 100644
--- a/spec/frontend/repository/components/preview/index_spec.js
+++ b/spec/frontend/repository/components/preview/index_spec.js
@@ -30,7 +30,7 @@ describe('Repository file preview component', () => {
it('renders file HTML', () => {
factory({
- webUrl: 'http://test.com',
+ webPath: 'http://test.com',
name: 'README.md',
});
@@ -43,7 +43,7 @@ describe('Repository file preview component', () => {
it('handles hash after render', () => {
factory({
- webUrl: 'http://test.com',
+ webPath: 'http://test.com',
name: 'README.md',
});
@@ -59,7 +59,7 @@ describe('Repository file preview component', () => {
it('renders loading icon', () => {
factory({
- webUrl: 'http://test.com',
+ webPath: 'http://test.com',
name: 'README.md',
});
diff --git a/spec/frontend/repository/components/table/index_spec.js b/spec/frontend/repository/components/table/index_spec.js
index ed50f292b8c..10669330b61 100644
--- a/spec/frontend/repository/components/table/index_spec.js
+++ b/spec/frontend/repository/components/table/index_spec.js
@@ -13,7 +13,7 @@ const MOCK_BLOBS = [
flatPath: 'blob',
name: 'blob.md',
type: 'blob',
- webUrl: 'http://test.com',
+ webPath: '/blob',
},
{
id: '124abc',
diff --git a/spec/frontend/repository/components/tree_content_spec.js b/spec/frontend/repository/components/tree_content_spec.js
index da892ce51d8..ea85cd34743 100644
--- a/spec/frontend/repository/components/tree_content_spec.js
+++ b/spec/frontend/repository/components/tree_content_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import TreeContent from '~/repository/components/tree_content.vue';
+import { GlButton } from '@gitlab/ui';
+import TreeContent, { INITIAL_FETCH_COUNT } from '~/repository/components/tree_content.vue';
import FilePreview from '~/repository/components/preview/index.vue';
let vm;
@@ -25,14 +26,24 @@ describe('Repository table component', () => {
vm.destroy();
});
- it('renders file preview', () => {
+ it('renders file preview', async () => {
factory('/');
vm.setData({ entries: { blobs: [{ name: 'README.md' }] } });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(FilePreview).exists()).toBe(true);
- });
+ await vm.vm.$nextTick();
+
+ expect(vm.find(FilePreview).exists()).toBe(true);
+ });
+
+ it('trigger fetchFiles when mounted', async () => {
+ factory('/');
+
+ jest.spyOn(vm.vm, 'fetchFiles').mockImplementation(() => {});
+
+ await vm.vm.$nextTick();
+
+ expect(vm.vm.fetchFiles).toHaveBeenCalled();
});
describe('normalizeData', () => {
@@ -70,4 +81,59 @@ describe('Repository table component', () => {
expect(output).toEqual({ hasNextPage: true, nextCursor: 'test' });
});
});
+
+ describe('Show more button', () => {
+ const showMoreButton = () => vm.find(GlButton);
+
+ describe('when is present', () => {
+ beforeEach(async () => {
+ factory('/');
+
+ vm.setData({ fetchCounter: 10, clickedShowMore: false });
+
+ await vm.vm.$nextTick();
+ });
+
+ it('is not rendered once it is clicked', async () => {
+ showMoreButton().vm.$emit('click');
+ await vm.vm.$nextTick();
+
+ expect(showMoreButton().exists()).toBe(false);
+ });
+
+ it('is rendered', async () => {
+ expect(showMoreButton().exists()).toBe(true);
+ });
+
+ it('changes clickedShowMore when show more button is clicked', async () => {
+ showMoreButton().vm.$emit('click');
+
+ expect(vm.vm.clickedShowMore).toBe(true);
+ });
+
+ it('triggers fetchFiles when show more button is clicked', async () => {
+ jest.spyOn(vm.vm, 'fetchFiles');
+
+ showMoreButton().vm.$emit('click');
+
+ expect(vm.vm.fetchFiles).toBeCalled();
+ });
+ });
+
+ it('is not rendered if less than 1000 files', async () => {
+ factory('/');
+
+ vm.setData({ fetchCounter: 5, clickedShowMore: false });
+
+ await vm.vm.$nextTick();
+
+ expect(showMoreButton().exists()).toBe(false);
+ });
+
+ it('has limit of 1000 files on initial load', () => {
+ factory('/');
+
+ expect(INITIAL_FETCH_COUNT * vm.vm.pageSize).toBe(1000);
+ });
+ });
});
diff --git a/spec/frontend/repository/components/web_ide_link_spec.js b/spec/frontend/repository/components/web_ide_link_spec.js
index 59e1a4fd719..877756db364 100644
--- a/spec/frontend/repository/components/web_ide_link_spec.js
+++ b/spec/frontend/repository/components/web_ide_link_spec.js
@@ -1,5 +1,5 @@
-import WebIdeLink from '~/repository/components/web_ide_link.vue';
import { mount } from '@vue/test-utils';
+import WebIdeLink from '~/repository/components/web_ide_link.vue';
describe('Web IDE link component', () => {
let wrapper;
diff --git a/spec/frontend/repository/utils/dom_spec.js b/spec/frontend/repository/utils/dom_spec.js
index e8b0565868e..26ed57f0392 100644
--- a/spec/frontend/repository/utils/dom_spec.js
+++ b/spec/frontend/repository/utils/dom_spec.js
@@ -1,6 +1,6 @@
+import { TEST_HOST } from 'helpers/test_constants';
import { setHTMLFixture } from '../../helpers/fixtures';
import { updateElementsVisibility, updateFormAction } from '~/repository/utils/dom';
-import { TEST_HOST } from 'helpers/test_constants';
describe('updateElementsVisibility', () => {
it('adds hidden class', () => {
diff --git a/spec/frontend/search_autocomplete_spec.js b/spec/frontend/search_autocomplete_spec.js
index 05b36474548..ee46dc015af 100644
--- a/spec/frontend/search_autocomplete_spec.js
+++ b/spec/frontend/search_autocomplete_spec.js
@@ -2,10 +2,11 @@
import $ from 'jquery';
import '~/gl_dropdown';
+import AxiosMockAdapter from 'axios-mock-adapter';
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import initSearchAutocomplete from '~/search_autocomplete';
import '~/lib/utils/common_utils';
import axios from '~/lib/utils/axios_utils';
-import AxiosMockAdapter from 'axios-mock-adapter';
describe('Search autocomplete dropdown', () => {
let widget = null;
@@ -274,11 +275,32 @@ describe('Search autocomplete dropdown', () => {
});
describe('enableAutocomplete', () => {
+ let toggleSpy;
+ let trackingSpy;
+
+ beforeEach(() => {
+ toggleSpy = jest.spyOn(widget.dropdownToggle, 'dropdown');
+ trackingSpy = mockTracking('_category_', undefined, jest.spyOn);
+ document.body.dataset.page = 'some:page'; // default tracking for category
+ });
+
+ afterEach(() => {
+ unmockTracking();
+ });
+
it('should open the Dropdown', () => {
- const toggleSpy = jest.spyOn(widget.dropdownToggle, 'dropdown');
widget.enableAutocomplete();
expect(toggleSpy).toHaveBeenCalledWith('toggle');
});
+
+ it('should track the opening', () => {
+ widget.enableAutocomplete();
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_search_bar', {
+ label: 'main_navigation',
+ property: 'navigation',
+ });
+ });
});
});
diff --git a/spec/frontend/self_monitor/components/self_monitor_form_spec.js b/spec/frontend/self_monitor/components/self_monitor_form_spec.js
index aa6f71b6412..ec5f7b0a394 100644
--- a/spec/frontend/self_monitor/components/self_monitor_form_spec.js
+++ b/spec/frontend/self_monitor/components/self_monitor_form_spec.js
@@ -1,8 +1,8 @@
import { shallowMount } from '@vue/test-utils';
import { GlDeprecatedButton } from '@gitlab/ui';
+import { TEST_HOST } from 'helpers/test_constants';
import SelfMonitor from '~/self_monitor/components/self_monitor_form.vue';
import { createStore } from '~/self_monitor/store';
-import { TEST_HOST } from 'helpers/test_constants';
describe('self monitor component', () => {
let wrapper;
diff --git a/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap b/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap
new file mode 100644
index 00000000000..22689080063
--- /dev/null
+++ b/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap
@@ -0,0 +1,20 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`EmptyStateComponent should render content 1`] = `
+"<section class=\\"row empty-state text-center\\">
+ <div class=\\"col-12\\">
+ <div class=\\"svg-250 svg-content\\"><img src=\\"/image.svg\\" alt=\\"Getting started with serverless\\" class=\\"gl-max-w-full\\"></div>
+ </div>
+ <div class=\\"col-12\\">
+ <div class=\\"text-content gl-mx-auto gl-my-0 gl-p-5\\">
+ <h1 class=\\"h4\\">Getting started with serverless</h1>
+ <p>In order to start using functions as a service, you must first install Knative on your Kubernetes cluster. <gl-link-stub href=\\"/help\\">More information</gl-link-stub>
+ </p>
+ <div>
+ <gl-button-stub category=\\"primary\\" variant=\\"success\\" size=\\"medium\\" icon=\\"\\" href=\\"/clusters\\">Install Knative</gl-button-stub>
+ <!---->
+ </div>
+ </div>
+ </div>
+</section>"
+`;
diff --git a/spec/frontend/serverless/components/empty_state_spec.js b/spec/frontend/serverless/components/empty_state_spec.js
new file mode 100644
index 00000000000..daa1576a4ec
--- /dev/null
+++ b/spec/frontend/serverless/components/empty_state_spec.js
@@ -0,0 +1,25 @@
+import { GlEmptyState, GlSprintf } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { createStore } from '~/serverless/store';
+import EmptyStateComponent from '~/serverless/components/empty_state.vue';
+
+describe('EmptyStateComponent', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ const store = createStore({
+ clustersPath: '/clusters',
+ helpPath: '/help',
+ emptyImagePath: '/image.svg',
+ });
+ wrapper = shallowMount(EmptyStateComponent, { store, stubs: { GlEmptyState, GlSprintf } });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should render content', () => {
+ expect(wrapper.html()).toMatchSnapshot();
+ });
+});
diff --git a/spec/frontend/serverless/components/function_details_spec.js b/spec/frontend/serverless/components/function_details_spec.js
index 40d2bbb0291..248dd418941 100644
--- a/spec/frontend/serverless/components/function_details_spec.js
+++ b/spec/frontend/serverless/components/function_details_spec.js
@@ -13,7 +13,7 @@ describe('functionDetailsComponent', () => {
localVue = createLocalVue();
localVue.use(Vuex);
- store = createStore();
+ store = createStore({ clustersPath: '/clusters', helpPath: '/help' });
});
afterEach(() => {
@@ -38,8 +38,6 @@ describe('functionDetailsComponent', () => {
propsData: {
func: serviceStub,
hasPrometheus: false,
- clustersPath: '/clusters',
- helpPath: '/help',
},
});
@@ -65,8 +63,6 @@ describe('functionDetailsComponent', () => {
propsData: {
func: serviceStub,
hasPrometheus: false,
- clustersPath: '/clusters',
- helpPath: '/help',
},
});
@@ -82,8 +78,6 @@ describe('functionDetailsComponent', () => {
propsData: {
func: serviceStub,
hasPrometheus: false,
- clustersPath: '/clusters',
- helpPath: '/help',
},
});
@@ -99,8 +93,6 @@ describe('functionDetailsComponent', () => {
propsData: {
func: serviceStub,
hasPrometheus: false,
- clustersPath: '/clusters',
- helpPath: '/help',
},
});
diff --git a/spec/frontend/serverless/components/functions_spec.js b/spec/frontend/serverless/components/functions_spec.js
index 8db04409357..0fca027fe56 100644
--- a/spec/frontend/serverless/components/functions_spec.js
+++ b/spec/frontend/serverless/components/functions_spec.js
@@ -25,55 +25,31 @@ describe('functionsComponent', () => {
localVue = createLocalVue();
localVue.use(Vuex);
- store = createStore();
+ store = createStore({});
});
afterEach(() => {
- component.vm.$destroy();
+ component.destroy();
axiosMock.restore();
});
it('should render empty state when Knative is not installed', () => {
store.dispatch('receiveFunctionsSuccess', { knative_installed: false });
- component = shallowMount(functionsComponent, {
- localVue,
- store,
- propsData: {
- clustersPath: '',
- helpPath: '',
- statusPath: '',
- },
- });
+ component = shallowMount(functionsComponent, { localVue, store });
expect(component.find(EmptyState).exists()).toBe(true);
});
it('should render a loading component', () => {
store.dispatch('requestFunctionsLoading');
- component = shallowMount(functionsComponent, {
- localVue,
- store,
- propsData: {
- clustersPath: '',
- helpPath: '',
- statusPath: '',
- },
- });
+ component = shallowMount(functionsComponent, { localVue, store });
expect(component.find(GlLoadingIcon).exists()).toBe(true);
});
it('should render empty state when there is no function data', () => {
store.dispatch('receiveFunctionsNoDataSuccess', { knative_installed: true });
- component = shallowMount(functionsComponent, {
- localVue,
- store,
- propsData: {
- clustersPath: '',
- helpPath: '',
- statusPath: '',
- },
- });
+ component = shallowMount(functionsComponent, { localVue, store });
expect(
component.vm.$el
@@ -91,30 +67,17 @@ describe('functionsComponent', () => {
...mockServerlessFunctions,
knative_installed: 'checking',
});
- component = shallowMount(functionsComponent, {
- localVue,
- store,
- propsData: {
- clustersPath: '',
- helpPath: '',
- statusPath: '',
- },
- });
+
+ component = shallowMount(functionsComponent, { localVue, store });
expect(component.find('.js-functions-wrapper').exists()).toBe(true);
expect(component.find('.js-functions-loader').exists()).toBe(true);
});
it('should render the functions list', () => {
- component = shallowMount(functionsComponent, {
- localVue,
- store,
- propsData: {
- clustersPath: 'clustersPath',
- helpPath: 'helpPath',
- statusPath,
- },
- });
+ store = createStore({ clustersPath: 'clustersPath', helpPath: 'helpPath', statusPath });
+
+ component = shallowMount(functionsComponent, { localVue, store });
component.vm.$store.dispatch('receiveFunctionsSuccess', mockServerlessFunctions);
diff --git a/spec/frontend/serverless/components/missing_prometheus_spec.js b/spec/frontend/serverless/components/missing_prometheus_spec.js
index 90730765f7c..9ca4a45dd5f 100644
--- a/spec/frontend/serverless/components/missing_prometheus_spec.js
+++ b/spec/frontend/serverless/components/missing_prometheus_spec.js
@@ -1,25 +1,23 @@
import { GlDeprecatedButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { createStore } from '~/serverless/store';
import missingPrometheusComponent from '~/serverless/components/missing_prometheus.vue';
-const createComponent = missingData =>
- shallowMount(missingPrometheusComponent, {
- propsData: {
- clustersPath: '/clusters',
- helpPath: '/help',
- missingData,
- },
- });
-
describe('missingPrometheusComponent', () => {
let wrapper;
+ const createComponent = missingData => {
+ const store = createStore({ clustersPath: '/clusters', helpPath: '/help' });
+
+ wrapper = shallowMount(missingPrometheusComponent, { store, propsData: { missingData } });
+ };
+
afterEach(() => {
wrapper.destroy();
});
it('should render missing prometheus message', () => {
- wrapper = createComponent(false);
+ createComponent(false);
const { vm } = wrapper;
expect(vm.$el.querySelector('.state-description').innerHTML.trim()).toContain(
@@ -30,7 +28,7 @@ describe('missingPrometheusComponent', () => {
});
it('should render no prometheus data message', () => {
- wrapper = createComponent(true);
+ createComponent(true);
const { vm } = wrapper;
expect(vm.$el.querySelector('.state-description').innerHTML.trim()).toContain(
diff --git a/spec/frontend/serverless/survey_banner_spec.js b/spec/frontend/serverless/survey_banner_spec.js
index 15e9c6ec350..29b36fb9b5f 100644
--- a/spec/frontend/serverless/survey_banner_spec.js
+++ b/spec/frontend/serverless/survey_banner_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import Cookies from 'js-cookie';
-import SurveyBanner from '~/serverless/survey_banner.vue';
import { GlBanner } from '@gitlab/ui';
+import SurveyBanner from '~/serverless/survey_banner.vue';
describe('Knative survey banner', () => {
let wrapper;
diff --git a/spec/frontend/serverless/utils.js b/spec/frontend/serverless/utils.js
index 5ce2e37d493..ba451b7d573 100644
--- a/spec/frontend/serverless/utils.js
+++ b/spec/frontend/serverless/utils.js
@@ -1,3 +1,4 @@
+// eslint-disable-next-line import/prefer-default-export
export const adjustMetricQuery = data => {
const updatedMetric = data.metrics;
@@ -15,6 +16,3 @@ export const adjustMetricQuery = data => {
updatedMetric.queries = queries;
return updatedMetric;
};
-
-// prevent babel-plugin-rewire from generating an invalid default during karma tests
-export default () => {};
diff --git a/spec/frontend/sidebar/__snapshots__/confidential_issue_sidebar_spec.js.snap b/spec/frontend/sidebar/__snapshots__/confidential_issue_sidebar_spec.js.snap
index da571af3a0d..4c1ab4a499c 100644
--- a/spec/frontend/sidebar/__snapshots__/confidential_issue_sidebar_spec.js.snap
+++ b/spec/frontend/sidebar/__snapshots__/confidential_issue_sidebar_spec.js.snap
@@ -49,8 +49,6 @@ exports[`Confidential Issue Sidebar Block renders for confidential = false and i
</div>
</div>
-
- <!---->
</div>
`;
@@ -111,8 +109,6 @@ exports[`Confidential Issue Sidebar Block renders for confidential = false and i
</div>
</div>
-
- <!---->
</div>
`;
@@ -164,8 +160,6 @@ exports[`Confidential Issue Sidebar Block renders for confidential = true and is
</div>
</div>
-
- <!---->
</div>
`;
@@ -225,7 +219,5 @@ exports[`Confidential Issue Sidebar Block renders for confidential = true and is
</div>
</div>
-
- <!---->
</div>
`;
diff --git a/spec/frontend/sidebar/confidential/__snapshots__/edit_form_spec.js.snap b/spec/frontend/sidebar/confidential/__snapshots__/edit_form_spec.js.snap
new file mode 100644
index 00000000000..d33f6c7f389
--- /dev/null
+++ b/spec/frontend/sidebar/confidential/__snapshots__/edit_form_spec.js.snap
@@ -0,0 +1,50 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Edit Form Dropdown when confidential renders on or off text based on confidentiality 1`] = `
+<div
+ class="dropdown show"
+ toggleform="function () {}"
+ updateconfidentialattribute="function () {}"
+>
+ <div
+ class="dropdown-menu sidebar-item-warning-message"
+ >
+ <div>
+ <p>
+ <gl-sprintf-stub
+ message="You are going to turn off the confidentiality. This means %{strongStart}everyone%{strongEnd} will be able to see and leave a comment on this %{issuableType}."
+ />
+ </p>
+
+ <edit-form-buttons-stub
+ confidential="true"
+ fullpath=""
+ />
+ </div>
+ </div>
+</div>
+`;
+
+exports[`Edit Form Dropdown when not confidential renders "You are going to turn on the confidentiality." in the 1`] = `
+<div
+ class="dropdown show"
+ toggleform="function () {}"
+ updateconfidentialattribute="function () {}"
+>
+ <div
+ class="dropdown-menu sidebar-item-warning-message"
+ >
+ <div>
+ <p>
+ <gl-sprintf-stub
+ message="You are going to turn on the confidentiality. This means that only team members with %{strongStart}at least Reporter access%{strongEnd} are able to see and leave comments on the %{issuableType}."
+ />
+ </p>
+
+ <edit-form-buttons-stub
+ fullpath=""
+ />
+ </div>
+ </div>
+</div>
+`;
diff --git a/spec/frontend/sidebar/confidential/edit_form_buttons_spec.js b/spec/frontend/sidebar/confidential/edit_form_buttons_spec.js
index 15493d3087f..2f11c6a07c2 100644
--- a/spec/frontend/sidebar/confidential/edit_form_buttons_spec.js
+++ b/spec/frontend/sidebar/confidential/edit_form_buttons_spec.js
@@ -1,10 +1,10 @@
import { shallowMount } from '@vue/test-utils';
import { GlLoadingIcon } from '@gitlab/ui';
+import waitForPromises from 'helpers/wait_for_promises';
import EditFormButtons from '~/sidebar/components/confidential/edit_form_buttons.vue';
import eventHub from '~/sidebar/event_hub';
import createStore from '~/notes/stores';
-import waitForPromises from 'helpers/wait_for_promises';
-import flash from '~/flash';
+import { deprecatedCreateFlash as flash } from '~/flash';
jest.mock('~/sidebar/event_hub', () => ({ $emit: jest.fn() }));
jest.mock('~/flash');
@@ -14,12 +14,7 @@ describe('Edit Form Buttons', () => {
let store;
const findConfidentialToggle = () => wrapper.find('[data-testid="confidential-toggle"]');
- const createComponent = ({
- props = {},
- data = {},
- confidentialApolloSidebar = false,
- resolved = true,
- }) => {
+ const createComponent = ({ props = {}, data = {}, resolved = true }) => {
store = createStore();
if (resolved) {
jest.spyOn(store, 'dispatch').mockResolvedValue();
@@ -38,11 +33,6 @@ describe('Edit Form Buttons', () => {
...data,
};
},
- provide: {
- glFeatures: {
- confidentialApolloSidebar,
- },
- },
store,
});
};
@@ -54,9 +44,11 @@ describe('Edit Form Buttons', () => {
describe('when isLoading', () => {
beforeEach(() => {
- createComponent({});
-
- wrapper.vm.$store.state.noteableData.confidential = false;
+ createComponent({
+ props: {
+ confidential: false,
+ },
+ });
});
it('renders "Applying" in the toggle button', () => {
@@ -78,6 +70,9 @@ describe('Edit Form Buttons', () => {
data: {
isLoading: false,
},
+ props: {
+ confidential: false,
+ },
});
expect(findConfidentialToggle().text()).toBe('Turn On');
@@ -90,70 +85,63 @@ describe('Edit Form Buttons', () => {
data: {
isLoading: false,
},
+ props: {
+ confidential: true,
+ },
});
-
- wrapper.vm.$store.state.noteableData.confidential = true;
});
it('renders on or off text based on confidentiality', () => {
expect(findConfidentialToggle().text()).toBe('Turn Off');
});
-
- describe('when clicking on the confidential toggle', () => {
- it('emits updateConfidentialAttribute', () => {
- findConfidentialToggle().trigger('click');
-
- expect(eventHub.$emit).toHaveBeenCalledWith('updateConfidentialAttribute');
- });
- });
});
- describe('when confidentialApolloSidebar is turned on', () => {
- const isConfidential = true;
+ describe('when succeeds', () => {
+ beforeEach(() => {
+ createComponent({ data: { isLoading: false }, props: { confidential: true } });
+ findConfidentialToggle().trigger('click');
+ });
- describe('when succeeds', () => {
- beforeEach(() => {
- createComponent({ data: { isLoading: false }, confidentialApolloSidebar: true });
- wrapper.vm.$store.state.noteableData.confidential = isConfidential;
- findConfidentialToggle().trigger('click');
+ it('dispatches the correct action', () => {
+ expect(store.dispatch).toHaveBeenCalledWith('updateConfidentialityOnIssuable', {
+ confidential: false,
+ fullPath: '',
});
+ });
- it('dispatches the correct action', () => {
- expect(store.dispatch).toHaveBeenCalledWith('updateConfidentialityOnIssue', {
- confidential: !isConfidential,
- fullPath: '',
- });
+ it('resets loading', () => {
+ return waitForPromises().then(() => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
});
+ });
- it('resets loading', () => {
- return waitForPromises().then(() => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
- });
+ it('emits close form', () => {
+ return waitForPromises().then(() => {
+ expect(eventHub.$emit).toHaveBeenCalledWith('closeConfidentialityForm');
});
+ });
- it('emits close form', () => {
- return waitForPromises().then(() => {
- expect(eventHub.$emit).toHaveBeenCalledWith('closeConfidentialityForm');
- });
+ it('emits updateOnConfidentiality event', () => {
+ return waitForPromises().then(() => {
+ expect(eventHub.$emit).toHaveBeenCalledWith('updateIssuableConfidentiality', false);
});
});
+ });
- describe('when fails', () => {
- beforeEach(() => {
- createComponent({
- data: { isLoading: false },
- confidentialApolloSidebar: true,
- resolved: false,
- });
- wrapper.vm.$store.state.noteableData.confidential = isConfidential;
- findConfidentialToggle().trigger('click');
+ describe('when fails', () => {
+ beforeEach(() => {
+ createComponent({
+ data: { isLoading: false },
+ props: { confidential: true },
+ resolved: false,
});
+ findConfidentialToggle().trigger('click');
+ });
- it('calls flash with the correct message', () => {
- expect(flash).toHaveBeenCalledWith(
- 'Something went wrong trying to change the confidentiality of this issue',
- );
- });
+ it('calls flash with the correct message', () => {
+ expect(flash).toHaveBeenCalledWith(
+ 'Something went wrong trying to change the confidentiality of this issue',
+ );
});
});
});
diff --git a/spec/frontend/sidebar/confidential/edit_form_spec.js b/spec/frontend/sidebar/confidential/edit_form_spec.js
index a22bbe5ae0d..56f163eecd1 100644
--- a/spec/frontend/sidebar/confidential/edit_form_spec.js
+++ b/spec/frontend/sidebar/confidential/edit_form_spec.js
@@ -12,6 +12,7 @@ describe('Edit Form Dropdown', () => {
...props,
isLoading: false,
fullPath: '',
+ issuableType: 'issue',
},
});
};
@@ -22,26 +23,26 @@ describe('Edit Form Dropdown', () => {
});
describe('when not confidential', () => {
- it('renders "You are going to turn off the confidentiality." in the ', () => {
+ it('renders "You are going to turn on the confidentiality." in the ', () => {
createComponent({
- isConfidential: false,
+ confidential: false,
toggleForm,
updateConfidentialAttribute,
});
- expect(wrapper.find('p').text()).toContain('You are going to turn on the confidentiality.');
+ expect(wrapper.element).toMatchSnapshot();
});
});
describe('when confidential', () => {
it('renders on or off text based on confidentiality', () => {
createComponent({
- isConfidential: true,
+ confidential: true,
toggleForm,
updateConfidentialAttribute,
});
- expect(wrapper.find('p').text()).toContain('You are going to turn off the confidentiality.');
+ expect(wrapper.element).toMatchSnapshot();
});
});
});
diff --git a/spec/frontend/sidebar/confidential_issue_sidebar_spec.js b/spec/frontend/sidebar/confidential_issue_sidebar_spec.js
index 06cf1e6166c..bc2df9305d0 100644
--- a/spec/frontend/sidebar/confidential_issue_sidebar_spec.js
+++ b/spec/frontend/sidebar/confidential_issue_sidebar_spec.js
@@ -1,13 +1,10 @@
import { shallowMount } from '@vue/test-utils';
import { mockTracking, triggerEvent } from 'helpers/tracking_helper';
+import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import ConfidentialIssueSidebar from '~/sidebar/components/confidential/confidential_issue_sidebar.vue';
import EditForm from '~/sidebar/components/confidential/edit_form.vue';
-import SidebarService from '~/sidebar/services/sidebar_service';
-import createFlash from '~/flash';
-import RecaptchaModal from '~/vue_shared/components/recaptcha_modal.vue';
import createStore from '~/notes/stores';
-import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
-import eventHub from '~/sidebar/event_hub';
+import * as types from '~/notes/stores/mutation_types';
jest.mock('~/flash');
jest.mock('~/sidebar/services/sidebar_service');
@@ -20,32 +17,14 @@ describe('Confidential Issue Sidebar Block', () => {
.fn()
.mockResolvedValue({ data: { issueSetConfidential: { issue: { confidential: true } } } });
- const findRecaptchaModal = () => wrapper.find(RecaptchaModal);
-
- const triggerUpdateConfidentialAttribute = () => {
- wrapper.setData({ edit: true });
- return (
- // wait for edit form to become visible
- wrapper.vm
- .$nextTick()
- .then(() => {
- eventHub.$emit('updateConfidentialAttribute');
- })
- // wait for reCAPTCHA modal to render
- .then(() => wrapper.vm.$nextTick())
- );
- };
-
const createComponent = ({ propsData, data = {} }) => {
const store = createStore();
- const service = new SidebarService();
wrapper = shallowMount(ConfidentialIssueSidebar, {
store,
data() {
return data;
},
propsData: {
- service,
iid: '',
fullPath: '',
...propsData,
@@ -133,61 +112,48 @@ describe('Confidential Issue Sidebar Block', () => {
property: 'confidentiality',
});
});
-
- describe('for successful update', () => {
- beforeEach(() => {
- SidebarService.prototype.update.mockResolvedValue({ data: 'irrelevant' });
+ });
+ describe('computed confidential', () => {
+ beforeEach(() => {
+ createComponent({
+ propsData: {
+ isEditable: true,
+ },
});
+ });
- it('reloads the page', () =>
- triggerUpdateConfidentialAttribute().then(() => {
- expect(window.location.reload).toHaveBeenCalled();
- }));
+ it('returns false when noteableData is not present', () => {
+ wrapper.vm.$store.commit(types.SET_NOTEABLE_DATA, null);
- it('does not show an error message', () =>
- triggerUpdateConfidentialAttribute().then(() => {
- expect(createFlash).not.toHaveBeenCalled();
- }));
+ expect(wrapper.vm.confidential).toBe(false);
});
- describe('for update error', () => {
- beforeEach(() => {
- SidebarService.prototype.update.mockRejectedValue(new Error('updating failed!'));
- });
-
- it('does not reload the page', () =>
- triggerUpdateConfidentialAttribute().then(() => {
- expect(window.location.reload).not.toHaveBeenCalled();
- }));
+ it('returns true when noteableData has confidential attr as true', () => {
+ wrapper.vm.$store.commit(types.SET_NOTEABLE_DATA, {});
+ wrapper.vm.$store.commit(types.SET_ISSUE_CONFIDENTIAL, true);
- it('shows an error message', () =>
- triggerUpdateConfidentialAttribute().then(() => {
- expect(createFlash).toHaveBeenCalled();
- }));
+ expect(wrapper.vm.confidential).toBe(true);
});
- describe('for spam error', () => {
- beforeEach(() => {
- SidebarService.prototype.update.mockRejectedValue({ name: 'SpamError' });
- });
+ it('returns false when noteableData has confidential attr as false', () => {
+ wrapper.vm.$store.commit(types.SET_NOTEABLE_DATA, {});
+ wrapper.vm.$store.commit(types.SET_ISSUE_CONFIDENTIAL, false);
+
+ expect(wrapper.vm.confidential).toBe(false);
+ });
- it('does not reload the page', () =>
- triggerUpdateConfidentialAttribute().then(() => {
- expect(window.location.reload).not.toHaveBeenCalled();
- }));
+ it('returns true when confidential attr is true', () => {
+ wrapper.vm.$store.commit(types.SET_NOTEABLE_DATA, {});
+ wrapper.vm.$store.commit(types.SET_ISSUE_CONFIDENTIAL, true);
- it('does not show an error message', () =>
- triggerUpdateConfidentialAttribute().then(() => {
- expect(createFlash).not.toHaveBeenCalled();
- }));
+ expect(wrapper.vm.confidential).toBe(true);
+ });
- it('shows a reCAPTCHA modal', () => {
- expect(findRecaptchaModal().exists()).toBe(false);
+ it('returns false when confidential attr is false', () => {
+ wrapper.vm.$store.commit(types.SET_NOTEABLE_DATA, {});
+ wrapper.vm.$store.commit(types.SET_ISSUE_CONFIDENTIAL, false);
- return triggerUpdateConfidentialAttribute().then(() => {
- expect(findRecaptchaModal().exists()).toBe(true);
- });
- });
+ expect(wrapper.vm.confidential).toBe(false);
});
});
});
diff --git a/spec/frontend/sidebar/lock/__snapshots__/edit_form_spec.js.snap b/spec/frontend/sidebar/lock/__snapshots__/edit_form_spec.js.snap
new file mode 100644
index 00000000000..18d4df297df
--- /dev/null
+++ b/spec/frontend/sidebar/lock/__snapshots__/edit_form_spec.js.snap
@@ -0,0 +1,79 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Edit Form Dropdown In issue page when locked the appropriate warning text is rendered 1`] = `
+<div
+ class="dropdown-menu sidebar-item-warning-message"
+ data-testid="warning-text"
+>
+ <p
+ class="text"
+ >
+ <gl-sprintf-stub
+ message="Unlock this %{issuableDisplayName}? %{strongStart}Everyone%{strongEnd} will be able to comment."
+ />
+ </p>
+
+ <edit-form-buttons-stub
+ islocked="true"
+ issuabledisplayname="issue"
+ />
+</div>
+`;
+
+exports[`Edit Form Dropdown In issue page when unlocked the appropriate warning text is rendered 1`] = `
+<div
+ class="dropdown-menu sidebar-item-warning-message"
+ data-testid="warning-text"
+>
+ <p
+ class="text"
+ >
+ <gl-sprintf-stub
+ message="Lock this %{issuableDisplayName}? Only %{strongStart}project members%{strongEnd} will be able to comment."
+ />
+ </p>
+
+ <edit-form-buttons-stub
+ issuabledisplayname="issue"
+ />
+</div>
+`;
+
+exports[`Edit Form Dropdown In merge request page when locked the appropriate warning text is rendered 1`] = `
+<div
+ class="dropdown-menu sidebar-item-warning-message"
+ data-testid="warning-text"
+>
+ <p
+ class="text"
+ >
+ <gl-sprintf-stub
+ message="Unlock this %{issuableDisplayName}? %{strongStart}Everyone%{strongEnd} will be able to comment."
+ />
+ </p>
+
+ <edit-form-buttons-stub
+ islocked="true"
+ issuabledisplayname="merge request"
+ />
+</div>
+`;
+
+exports[`Edit Form Dropdown In merge request page when unlocked the appropriate warning text is rendered 1`] = `
+<div
+ class="dropdown-menu sidebar-item-warning-message"
+ data-testid="warning-text"
+>
+ <p
+ class="text"
+ >
+ <gl-sprintf-stub
+ message="Lock this %{issuableDisplayName}? Only %{strongStart}project members%{strongEnd} will be able to comment."
+ />
+ </p>
+
+ <edit-form-buttons-stub
+ issuabledisplayname="merge request"
+ />
+</div>
+`;
diff --git a/spec/frontend/sidebar/lock/constants.js b/spec/frontend/sidebar/lock/constants.js
new file mode 100644
index 00000000000..b9f08e9286d
--- /dev/null
+++ b/spec/frontend/sidebar/lock/constants.js
@@ -0,0 +1,2 @@
+export const ISSUABLE_TYPE_ISSUE = 'issue';
+export const ISSUABLE_TYPE_MR = 'merge request';
diff --git a/spec/frontend/sidebar/lock/edit_form_buttons_spec.js b/spec/frontend/sidebar/lock/edit_form_buttons_spec.js
index 66f9237ce97..de1da3456f8 100644
--- a/spec/frontend/sidebar/lock/edit_form_buttons_spec.js
+++ b/spec/frontend/sidebar/lock/edit_form_buttons_spec.js
@@ -1,31 +1,178 @@
import { shallowMount } from '@vue/test-utils';
+import { GlLoadingIcon } from '@gitlab/ui';
import EditFormButtons from '~/sidebar/components/lock/edit_form_buttons.vue';
+import eventHub from '~/sidebar/event_hub';
+import { deprecatedCreateFlash as flash } from '~/flash';
+import createStore from '~/notes/stores';
+import { createStore as createMrStore } from '~/mr_notes/stores';
+import { ISSUABLE_TYPE_ISSUE, ISSUABLE_TYPE_MR } from './constants';
+
+jest.mock('~/sidebar/event_hub', () => ({ $emit: jest.fn() }));
+jest.mock('~/flash');
describe('EditFormButtons', () => {
let wrapper;
+ let store;
+ let issuableType;
+ let issuableDisplayName;
+
+ const setIssuableType = pageType => {
+ issuableType = pageType;
+ issuableDisplayName = issuableType.replace(/_/g, ' ');
+ };
+
+ const findLockToggle = () => wrapper.find('[data-testid="lock-toggle"]');
+ const findGlLoadingIcon = () => wrapper.find(GlLoadingIcon);
- const mountComponent = propsData => shallowMount(EditFormButtons, { propsData });
+ const createComponent = ({ props = {}, data = {}, resolved = true }) => {
+ store = issuableType === ISSUABLE_TYPE_ISSUE ? createStore() : createMrStore();
+
+ if (resolved) {
+ jest.spyOn(store, 'dispatch').mockResolvedValue();
+ } else {
+ jest.spyOn(store, 'dispatch').mockRejectedValue();
+ }
+
+ wrapper = shallowMount(EditFormButtons, {
+ store,
+ provide: {
+ fullPath: '',
+ },
+ propsData: {
+ isLocked: false,
+ issuableDisplayName,
+ ...props,
+ },
+ data() {
+ return {
+ isLoading: false,
+ ...data,
+ };
+ },
+ });
+ };
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
- it('displays "Unlock" when locked', () => {
- wrapper = mountComponent({
- isLocked: true,
- updateLockedAttribute: () => {},
+ describe.each`
+ pageType
+ ${ISSUABLE_TYPE_ISSUE} | ${ISSUABLE_TYPE_MR}
+ `('In $pageType page', ({ pageType }) => {
+ beforeEach(() => {
+ setIssuableType(pageType);
});
- expect(wrapper.text()).toContain('Unlock');
- });
+ describe('when isLoading', () => {
+ beforeEach(() => {
+ createComponent({ data: { isLoading: true } });
+ });
+
+ it('renders "Applying" in the toggle button', () => {
+ expect(findLockToggle().text()).toBe('Applying');
+ });
+
+ it('disables the toggle button', () => {
+ expect(findLockToggle().attributes('disabled')).toBe('disabled');
+ });
- it('displays "Lock" when unlocked', () => {
- wrapper = mountComponent({
- isLocked: false,
- updateLockedAttribute: () => {},
+ it('displays the GlLoadingIcon', () => {
+ expect(findGlLoadingIcon().exists()).toBe(true);
+ });
});
- expect(wrapper.text()).toContain('Lock');
+ describe.each`
+ isLocked | toggleText | statusText
+ ${false} | ${'Lock'} | ${'unlocked'}
+ ${true} | ${'Unlock'} | ${'locked'}
+ `('when $statusText', ({ isLocked, toggleText }) => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ isLocked,
+ },
+ });
+ });
+
+ it(`toggle button displays "${toggleText}"`, () => {
+ expect(findLockToggle().text()).toContain(toggleText);
+ });
+
+ describe('when toggled', () => {
+ describe(`when resolved`, () => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ isLocked,
+ },
+ resolved: true,
+ });
+ findLockToggle().trigger('click');
+ });
+
+ it('dispatches the correct action', () => {
+ expect(store.dispatch).toHaveBeenCalledWith('updateLockedAttribute', {
+ locked: !isLocked,
+ fullPath: '',
+ });
+ });
+
+ it('resets loading', async () => {
+ await wrapper.vm.$nextTick().then(() => {
+ expect(findGlLoadingIcon().exists()).toBe(false);
+ });
+ });
+
+ it('emits close form', () => {
+ return wrapper.vm.$nextTick().then(() => {
+ expect(eventHub.$emit).toHaveBeenCalledWith('closeLockForm');
+ });
+ });
+
+ it('does not flash an error message', () => {
+ expect(flash).not.toHaveBeenCalled();
+ });
+ });
+
+ describe(`when not resolved`, () => {
+ beforeEach(() => {
+ createComponent({
+ props: {
+ isLocked,
+ },
+ resolved: false,
+ });
+ findLockToggle().trigger('click');
+ });
+
+ it('dispatches the correct action', () => {
+ expect(store.dispatch).toHaveBeenCalledWith('updateLockedAttribute', {
+ locked: !isLocked,
+ fullPath: '',
+ });
+ });
+
+ it('resets loading', async () => {
+ await wrapper.vm.$nextTick().then(() => {
+ expect(findGlLoadingIcon().exists()).toBe(false);
+ });
+ });
+
+ it('emits close form', () => {
+ return wrapper.vm.$nextTick().then(() => {
+ expect(eventHub.$emit).toHaveBeenCalledWith('closeLockForm');
+ });
+ });
+
+ it('calls flash with the correct message', () => {
+ expect(flash).toHaveBeenCalledWith(
+ `Something went wrong trying to change the locked state of this ${issuableDisplayName}`,
+ );
+ });
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/sidebar/lock/edit_form_spec.js b/spec/frontend/sidebar/lock/edit_form_spec.js
index ec10a999a40..b1c3bfe3ef5 100644
--- a/spec/frontend/sidebar/lock/edit_form_spec.js
+++ b/spec/frontend/sidebar/lock/edit_form_spec.js
@@ -1,37 +1,54 @@
-import Vue from 'vue';
-import editForm from '~/sidebar/components/lock/edit_form.vue';
+import { shallowMount } from '@vue/test-utils';
+import EditForm from '~/sidebar/components/lock/edit_form.vue';
+import { ISSUABLE_TYPE_ISSUE, ISSUABLE_TYPE_MR } from './constants';
-describe('EditForm', () => {
- let vm1;
- let vm2;
+describe('Edit Form Dropdown', () => {
+ let wrapper;
+ let issuableType; // Either ISSUABLE_TYPE_ISSUE or ISSUABLE_TYPE_MR
+ let issuableDisplayName;
- beforeEach(() => {
- const Component = Vue.extend(editForm);
- const toggleForm = () => {};
- const updateLockedAttribute = () => {};
+ const setIssuableType = pageType => {
+ issuableType = pageType;
+ issuableDisplayName = issuableType.replace(/_/g, ' ');
+ };
- vm1 = new Component({
- propsData: {
- isLocked: true,
- toggleForm,
- updateLockedAttribute,
- issuableType: 'issue',
- },
- }).$mount();
+ const findWarningText = () => wrapper.find('[data-testid="warning-text"]');
- vm2 = new Component({
+ const createComponent = ({ props }) => {
+ wrapper = shallowMount(EditForm, {
propsData: {
isLocked: false,
- toggleForm,
- updateLockedAttribute,
- issuableType: 'merge_request',
+ issuableDisplayName,
+ ...props,
},
- }).$mount();
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
});
- it('renders on the appropriate warning text', () => {
- expect(vm1.$el.innerHTML.includes('Unlock this issue?')).toBe(true);
+ describe.each`
+ pageType
+ ${ISSUABLE_TYPE_ISSUE} | ${ISSUABLE_TYPE_MR}
+ `('In $pageType page', ({ pageType }) => {
+ beforeEach(() => {
+ setIssuableType(pageType);
+ });
+
+ describe.each`
+ isLocked | lockStatusText
+ ${false} | ${'unlocked'}
+ ${true} | ${'locked'}
+ `('when $lockStatusText', ({ isLocked }) => {
+ beforeEach(() => {
+ createComponent({ props: { isLocked } });
+ });
- expect(vm2.$el.innerHTML.includes('Lock this merge request?')).toBe(true);
+ it(`the appropriate warning text is rendered`, () => {
+ expect(findWarningText().element).toMatchSnapshot();
+ });
+ });
});
});
diff --git a/spec/frontend/sidebar/lock/issuable_lock_form_spec.js b/spec/frontend/sidebar/lock/issuable_lock_form_spec.js
new file mode 100644
index 00000000000..ab1423a9bbb
--- /dev/null
+++ b/spec/frontend/sidebar/lock/issuable_lock_form_spec.js
@@ -0,0 +1,133 @@
+import { shallowMount } from '@vue/test-utils';
+import { mockTracking, triggerEvent } from 'helpers/tracking_helper';
+import IssuableLockForm from '~/sidebar/components/lock/issuable_lock_form.vue';
+import EditForm from '~/sidebar/components/lock/edit_form.vue';
+import createStore from '~/notes/stores';
+import { createStore as createMrStore } from '~/mr_notes/stores';
+import { ISSUABLE_TYPE_ISSUE, ISSUABLE_TYPE_MR } from './constants';
+
+describe('IssuableLockForm', () => {
+ let wrapper;
+ let store;
+ let issuableType; // Either ISSUABLE_TYPE_ISSUE or ISSUABLE_TYPE_MR
+
+ const setIssuableType = pageType => {
+ issuableType = pageType;
+ };
+
+ const findSidebarCollapseIcon = () => wrapper.find('[data-testid="sidebar-collapse-icon"]');
+ const findLockStatus = () => wrapper.find('[data-testid="lock-status"]');
+ const findEditLink = () => wrapper.find('[data-testid="edit-link"]');
+ const findEditForm = () => wrapper.find(EditForm);
+
+ const initStore = isLocked => {
+ if (issuableType === ISSUABLE_TYPE_ISSUE) {
+ store = createStore();
+ store.getters.getNoteableData.targetType = 'issue';
+ } else {
+ store = createMrStore();
+ }
+ store.getters.getNoteableData.discussion_locked = isLocked;
+ };
+
+ const createComponent = ({ props = {} }) => {
+ wrapper = shallowMount(IssuableLockForm, {
+ store,
+ propsData: {
+ isEditable: true,
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe.each`
+ pageType
+ ${ISSUABLE_TYPE_ISSUE} | ${ISSUABLE_TYPE_MR}
+ `('In $pageType page', ({ pageType }) => {
+ beforeEach(() => {
+ setIssuableType(pageType);
+ });
+
+ describe.each`
+ isLocked
+ ${false} | ${true}
+ `(`renders for isLocked = $isLocked`, ({ isLocked }) => {
+ beforeEach(() => {
+ initStore(isLocked);
+ createComponent({});
+ });
+
+ it('shows the lock status', () => {
+ expect(findLockStatus().text()).toBe(isLocked ? 'Locked' : 'Unlocked');
+ });
+
+ describe('edit form', () => {
+ let isEditable;
+ beforeEach(() => {
+ isEditable = false;
+ createComponent({ props: { isEditable } });
+ });
+
+ describe('when not editable', () => {
+ it('does not display the edit form when opened if not editable', () => {
+ expect(findEditForm().exists()).toBe(false);
+ findSidebarCollapseIcon().trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findEditForm().exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('when editable', () => {
+ beforeEach(() => {
+ isEditable = true;
+ createComponent({ props: { isEditable } });
+ });
+
+ it('shows the editable status', () => {
+ expect(findEditLink().exists()).toBe(isEditable);
+ expect(findEditLink().text()).toBe('Edit');
+ });
+
+ describe("when 'Edit' is clicked", () => {
+ it('displays the edit form when editable', () => {
+ expect(findEditForm().exists()).toBe(false);
+ findEditLink().trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findEditForm().exists()).toBe(true);
+ });
+ });
+
+ it('tracks the event ', () => {
+ const spy = mockTracking('_category_', wrapper.element, jest.spyOn);
+ triggerEvent(findEditLink().element);
+
+ expect(spy).toHaveBeenCalledWith('_category_', 'click_edit_button', {
+ label: 'right_sidebar',
+ property: 'lock_issue',
+ });
+ });
+ });
+
+ describe('When sidebar is collapsed', () => {
+ it('displays the edit form when opened', () => {
+ expect(findEditForm().exists()).toBe(false);
+ findSidebarCollapseIcon().trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(findEditForm().exists()).toBe(true);
+ });
+ });
+ });
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/sidebar/lock/lock_issue_sidebar_spec.js b/spec/frontend/sidebar/lock/lock_issue_sidebar_spec.js
deleted file mode 100644
index 00997326d87..00000000000
--- a/spec/frontend/sidebar/lock/lock_issue_sidebar_spec.js
+++ /dev/null
@@ -1,99 +0,0 @@
-import Vue from 'vue';
-import { mockTracking, triggerEvent } from 'helpers/tracking_helper';
-import lockIssueSidebar from '~/sidebar/components/lock/lock_issue_sidebar.vue';
-
-describe('LockIssueSidebar', () => {
- let vm1;
- let vm2;
-
- beforeEach(() => {
- const Component = Vue.extend(lockIssueSidebar);
-
- const mediator = {
- service: {
- update: Promise.resolve(true),
- },
-
- store: {
- isLockDialogOpen: false,
- },
- };
-
- vm1 = new Component({
- propsData: {
- isLocked: true,
- isEditable: true,
- mediator,
- issuableType: 'issue',
- },
- }).$mount();
-
- vm2 = new Component({
- propsData: {
- isLocked: false,
- isEditable: false,
- mediator,
- issuableType: 'merge_request',
- },
- }).$mount();
- });
-
- it('shows if locked and/or editable', () => {
- expect(vm1.$el.innerHTML.includes('Edit')).toBe(true);
-
- expect(vm1.$el.innerHTML.includes('Locked')).toBe(true);
-
- expect(vm2.$el.innerHTML.includes('Unlocked')).toBe(true);
- });
-
- it('displays the edit form when editable', done => {
- expect(vm1.isLockDialogOpen).toBe(false);
-
- vm1.$el.querySelector('.lock-edit').click();
-
- expect(vm1.isLockDialogOpen).toBe(true);
-
- vm1.$nextTick(() => {
- expect(vm1.$el.innerHTML.includes('Unlock this issue?')).toBe(true);
-
- done();
- });
- });
-
- it('tracks an event when "Edit" is clicked', () => {
- const spy = mockTracking('_category_', vm1.$el, jest.spyOn);
- triggerEvent('.lock-edit');
-
- expect(spy).toHaveBeenCalledWith('_category_', 'click_edit_button', {
- label: 'right_sidebar',
- property: 'lock_issue',
- });
- });
-
- it('displays the edit form when opened from collapsed state', done => {
- expect(vm1.isLockDialogOpen).toBe(false);
-
- vm1.$el.querySelector('.sidebar-collapsed-icon').click();
-
- expect(vm1.isLockDialogOpen).toBe(true);
-
- setImmediate(() => {
- expect(vm1.$el.innerHTML.includes('Unlock this issue?')).toBe(true);
-
- done();
- });
- });
-
- it('does not display the edit form when opened from collapsed state if not editable', done => {
- expect(vm2.isLockDialogOpen).toBe(false);
-
- vm2.$el.querySelector('.sidebar-collapsed-icon').click();
-
- Vue.nextTick()
- .then(() => {
- expect(vm2.isLockDialogOpen).toBe(false);
- })
- .then(done)
- .catch(done.fail);
- });
-});
diff --git a/spec/frontend/sidebar/todo_spec.js b/spec/frontend/sidebar/todo_spec.js
index 18b621cd12d..e56a78989eb 100644
--- a/spec/frontend/sidebar/todo_spec.js
+++ b/spec/frontend/sidebar/todo_spec.js
@@ -36,7 +36,7 @@ describe('SidebarTodo', () => {
it.each`
isTodo | iconClass | label | icon
- ${false} | ${''} | ${'Add a To Do'} | ${'todo-add'}
+ ${false} | ${''} | ${'Add a To-Do'} | ${'todo-add'}
${true} | ${'todo-undone'} | ${'Mark as done'} | ${'todo-done'}
`(
'renders proper button when `isTodo` prop is `$isTodo`',
diff --git a/spec/frontend/snippet/collapsible_input_spec.js b/spec/frontend/snippet/collapsible_input_spec.js
index acd15164c95..aa017964437 100644
--- a/spec/frontend/snippet/collapsible_input_spec.js
+++ b/spec/frontend/snippet/collapsible_input_spec.js
@@ -1,5 +1,5 @@
-import setupCollapsibleInputs from '~/snippet/collapsible_input';
import { setHTMLFixture } from 'helpers/fixtures';
+import setupCollapsibleInputs from '~/snippet/collapsible_input';
describe('~/snippet/collapsible_input', () => {
let formEl;
diff --git a/spec/frontend/snippet/snippet_bundle_spec.js b/spec/frontend/snippet/snippet_bundle_spec.js
index 38d05243c65..ad69a91fe89 100644
--- a/spec/frontend/snippet/snippet_bundle_spec.js
+++ b/spec/frontend/snippet/snippet_bundle_spec.js
@@ -1,6 +1,6 @@
+import { setHTMLFixture } from 'helpers/fixtures';
import Editor from '~/editor/editor_lite';
import initEditor from '~/snippet/snippet_bundle';
-import { setHTMLFixture } from 'helpers/fixtures';
jest.mock('~/editor/editor_lite', () => jest.fn());
diff --git a/spec/frontend/snippet/snippet_edit_spec.js b/spec/frontend/snippet/snippet_edit_spec.js
index cfe5062c86b..7c12c0cac03 100644
--- a/spec/frontend/snippet/snippet_edit_spec.js
+++ b/spec/frontend/snippet/snippet_edit_spec.js
@@ -1,9 +1,8 @@
import '~/snippet/snippet_edit';
+import { triggerDOMEvent } from 'jest/helpers/dom_events_helper';
import { SnippetEditInit } from '~/snippets';
import initSnippet from '~/snippet/snippet_bundle';
-import { triggerDOMEvent } from 'jest/helpers/dom_events_helper';
-
jest.mock('~/snippet/snippet_bundle');
jest.mock('~/snippets');
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
index 959bc24eef6..1cf1ee74ddf 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
@@ -1,25 +1,21 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`Snippet Blob Edit component rendering matches the snapshot 1`] = `
+exports[`Snippet Blob Edit component with loaded blob matches snapshot 1`] = `
<div
- class="form-group file-editor"
+ class="file-holder snippet"
>
- <label>
- File
- </label>
+ <blob-header-edit-stub
+ candelete="true"
+ data-qa-selector="file_name_field"
+ id="blob_local_7_file_path"
+ value="foo/bar/test.md"
+ />
- <div
- class="file-holder snippet"
- >
- <blob-header-edit-stub
- data-qa-selector="file_name_field"
- value="lorem.txt"
- />
-
- <blob-content-edit-stub
- filename="lorem.txt"
- value="Lorem ipsum dolor sit amet, consectetur adipiscing elit."
- />
- </div>
+ <blob-content-edit-stub
+ fileglobalid="blob_local_7"
+ filename="foo/bar/test.md"
+ value="Lorem ipsum dolar sit amet,
+consectetur adipiscing elit."
+ />
</div>
`;
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
index 297ad16b681..6020d595e3f 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
@@ -60,7 +60,7 @@ exports[`Snippet Description Edit component rendering matches the snapshot 1`] =
<a
aria-label="Leave zen mode"
- class="zen-control zen-control-leave js-zen-leave gl-text-gray-700"
+ class="zen-control zen-control-leave js-zen-leave gl-text-gray-500"
href="#"
>
<icon-stub
diff --git a/spec/frontend/snippets/components/edit_spec.js b/spec/frontend/snippets/components/edit_spec.js
index d2265dfd506..980855a0615 100644
--- a/spec/frontend/snippets/components/edit_spec.js
+++ b/spec/frontend/snippets/components/edit_spec.js
@@ -1,134 +1,157 @@
-import { shallowMount } from '@vue/test-utils';
-import Flash from '~/flash';
-
+import { ApolloMutation } from 'vue-apollo';
import { GlLoadingIcon } from '@gitlab/ui';
-import { redirectTo } from '~/lib/utils/url_utility';
-
+import { shallowMount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
+import { deprecatedCreateFlash as Flash } from '~/flash';
+import * as urlUtils from '~/lib/utils/url_utility';
import SnippetEditApp from '~/snippets/components/edit.vue';
import SnippetDescriptionEdit from '~/snippets/components/snippet_description_edit.vue';
import SnippetVisibilityEdit from '~/snippets/components/snippet_visibility_edit.vue';
-import SnippetBlobEdit from '~/snippets/components/snippet_blob_edit.vue';
+import SnippetBlobActionsEdit from '~/snippets/components/snippet_blob_actions_edit.vue';
import TitleField from '~/vue_shared/components/form/title.vue';
import FormFooterActions from '~/vue_shared/components/form/form_footer_actions.vue';
-import { SNIPPET_CREATE_MUTATION_ERROR, SNIPPET_UPDATE_MUTATION_ERROR } from '~/snippets/constants';
-
+import { SNIPPET_VISIBILITY_PRIVATE } from '~/snippets/constants';
import UpdateSnippetMutation from '~/snippets/mutations/updateSnippet.mutation.graphql';
import CreateSnippetMutation from '~/snippets/mutations/createSnippet.mutation.graphql';
-
-import waitForPromises from 'helpers/wait_for_promises';
-import { ApolloMutation } from 'vue-apollo';
-
-jest.mock('~/lib/utils/url_utility', () => ({
- redirectTo: jest.fn().mockName('redirectTo'),
-}));
+import { testEntries } from '../test_utils';
jest.mock('~/flash');
-let flashSpy;
-
-const rawProjectPathMock = '/project/path';
-const newlyEditedSnippetUrl = 'http://foo.bar';
-const apiError = { message: 'Ufff' };
-const mutationError = 'Bummer';
-
-const attachedFilePath1 = 'foo/bar';
-const attachedFilePath2 = 'alpha/beta';
-
-const actionWithContent = {
- content: 'Foo Bar',
-};
-const actionWithoutContent = {
- content: '',
-};
+const TEST_UPLOADED_FILES = ['foo/bar.txt', 'alpha/beta.js'];
+const TEST_API_ERROR = 'Ufff';
+const TEST_MUTATION_ERROR = 'Bummer';
-const defaultProps = {
- snippetGid: 'gid://gitlab/PersonalSnippet/42',
- markdownPreviewPath: 'http://preview.foo.bar',
- markdownDocsPath: 'http://docs.foo.bar',
-};
-const defaultData = {
- blobsActions: {
- ...actionWithContent,
- action: '',
+const TEST_ACTIONS = {
+ NO_CONTENT: {
+ ...testEntries.created.diff,
+ content: '',
+ },
+ NO_PATH: {
+ ...testEntries.created.diff,
+ filePath: '',
+ },
+ VALID: {
+ ...testEntries.created.diff,
},
};
+const TEST_WEB_URL = '/snippets/7';
+
+const createTestSnippet = () => ({
+ webUrl: TEST_WEB_URL,
+ id: 7,
+ title: 'Snippet Title',
+ description: 'Lorem ipsum snippet desc',
+ visibilityLevel: SNIPPET_VISIBILITY_PRIVATE,
+});
+
describe('Snippet Edit app', () => {
let wrapper;
- const resolveMutate = jest.fn().mockResolvedValue({
- data: {
- updateSnippet: {
- errors: [],
- snippet: {
- webUrl: newlyEditedSnippetUrl,
+ const mutationTypes = {
+ RESOLVE: jest.fn().mockResolvedValue({
+ data: {
+ updateSnippet: {
+ errors: [],
+ snippet: createTestSnippet(),
},
},
- },
- });
-
- const resolveMutateWithErrors = jest.fn().mockResolvedValue({
- data: {
- updateSnippet: {
- errors: [mutationError],
- snippet: {
- webUrl: newlyEditedSnippetUrl,
+ }),
+ RESOLVE_WITH_ERRORS: jest.fn().mockResolvedValue({
+ data: {
+ updateSnippet: {
+ errors: [TEST_MUTATION_ERROR],
+ snippet: createTestSnippet(),
+ },
+ createSnippet: {
+ errors: [TEST_MUTATION_ERROR],
+ snippet: null,
},
},
- createSnippet: {
- errors: [mutationError],
- snippet: null,
- },
- },
- });
-
- const rejectMutation = jest.fn().mockRejectedValue(apiError);
-
- const mutationTypes = {
- RESOLVE: resolveMutate,
- RESOLVE_WITH_ERRORS: resolveMutateWithErrors,
- REJECT: rejectMutation,
+ }),
+ REJECT: jest.fn().mockRejectedValue(TEST_API_ERROR),
};
function createComponent({
- props = defaultProps,
- data = {},
+ props = {},
loading = false,
mutationRes = mutationTypes.RESOLVE,
} = {}) {
- const $apollo = {
- queries: {
- snippet: {
- loading,
- },
- },
- mutate: mutationRes,
- };
+ if (wrapper) {
+ throw new Error('wrapper already exists');
+ }
wrapper = shallowMount(SnippetEditApp, {
- mocks: { $apollo },
+ mocks: {
+ $apollo: {
+ queries: {
+ snippet: { loading },
+ },
+ mutate: mutationRes,
+ },
+ },
stubs: {
- FormFooterActions,
ApolloMutation,
+ FormFooterActions,
},
propsData: {
+ snippetGid: 'gid://gitlab/PersonalSnippet/42',
+ markdownPreviewPath: 'http://preview.foo.bar',
+ markdownDocsPath: 'http://docs.foo.bar',
...props,
},
- data() {
- return data;
- },
});
-
- flashSpy = jest.spyOn(wrapper.vm, 'flashAPIFailure');
}
+ beforeEach(() => {
+ jest.spyOn(urlUtils, 'redirectTo').mockImplementation();
+ });
+
afterEach(() => {
wrapper.destroy();
+ wrapper = null;
});
+ const findBlobActions = () => wrapper.find(SnippetBlobActionsEdit);
const findSubmitButton = () => wrapper.find('[data-testid="snippet-submit-btn"]');
- const findCancellButton = () => wrapper.find('[data-testid="snippet-cancel-btn"]');
+ const findCancelButton = () => wrapper.find('[data-testid="snippet-cancel-btn"]');
+ const hasDisabledSubmit = () => Boolean(findSubmitButton().attributes('disabled'));
+
const clickSubmitBtn = () => wrapper.find('[data-testid="snippet-edit-form"]').trigger('submit');
+ const triggerBlobActions = actions => findBlobActions().vm.$emit('actions', actions);
+ const setUploadFilesHtml = paths => {
+ wrapper.vm.$el.innerHTML = paths.map(path => `<input name="files[]" value="${path}">`).join('');
+ };
+ const getApiData = ({
+ id,
+ title = '',
+ description = '',
+ visibilityLevel = SNIPPET_VISIBILITY_PRIVATE,
+ } = {}) => ({
+ id,
+ title,
+ description,
+ visibilityLevel,
+ blobActions: [],
+ });
+
+ // Ideally we wouldn't call this method directly, but we don't have a way to trigger
+ // apollo responses yet.
+ const loadSnippet = (...edges) => {
+ if (edges.length) {
+ wrapper.setData({
+ snippet: edges[0],
+ });
+ }
+
+ wrapper.vm.onSnippetFetch({
+ data: {
+ snippets: {
+ edges,
+ },
+ },
+ });
+ };
describe('rendering', () => {
it('renders loader while the query is in flight', () => {
@@ -136,295 +159,163 @@ describe('Snippet Edit app', () => {
expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
});
- it('renders all required components', () => {
- createComponent();
-
- expect(wrapper.contains(TitleField)).toBe(true);
- expect(wrapper.contains(SnippetDescriptionEdit)).toBe(true);
- expect(wrapper.contains(SnippetBlobEdit)).toBe(true);
- expect(wrapper.contains(SnippetVisibilityEdit)).toBe(true);
- expect(wrapper.contains(FormFooterActions)).toBe(true);
- });
-
- it('does not fail if there is no snippet yet (new snippet creation)', () => {
- const snippetGid = '';
- createComponent({
- props: {
- ...defaultProps,
- snippetGid,
- },
- });
-
- expect(wrapper.props('snippetGid')).toBe(snippetGid);
- });
+ it.each([[{}], [{ snippetGid: '' }]])(
+ 'should render all required components with %s',
+ props => {
+ createComponent(props);
- it.each`
- title | blobsActions | expectation
- ${''} | ${{}} | ${true}
- ${''} | ${{ actionWithContent }} | ${true}
- ${''} | ${{ actionWithoutContent }} | ${true}
- ${'foo'} | ${{}} | ${true}
- ${'foo'} | ${{ actionWithoutContent }} | ${true}
- ${'foo'} | ${{ actionWithoutContent, actionWithContent }} | ${true}
- ${'foo'} | ${{ actionWithContent }} | ${false}
- `(
- 'disables submit button unless both title and content for all blobs are present',
- ({ title, blobsActions, expectation }) => {
- createComponent({
- data: {
- snippet: { title },
- blobsActions,
- },
- });
- const isBtnDisabled = Boolean(findSubmitButton().attributes('disabled'));
- expect(isBtnDisabled).toBe(expectation);
+ expect(wrapper.contains(TitleField)).toBe(true);
+ expect(wrapper.contains(SnippetDescriptionEdit)).toBe(true);
+ expect(wrapper.contains(SnippetVisibilityEdit)).toBe(true);
+ expect(wrapper.contains(FormFooterActions)).toBe(true);
+ expect(findBlobActions().exists()).toBe(true);
},
);
it.each`
- isNew | status | expectation
- ${true} | ${`new`} | ${`/snippets`}
- ${false} | ${`existing`} | ${newlyEditedSnippetUrl}
- `('sets correct href for the cancel button on a $status snippet', ({ isNew, expectation }) => {
- createComponent({
- data: {
- snippet: { webUrl: newlyEditedSnippetUrl },
- newSnippet: isNew,
- },
- });
+ title | actions | shouldDisable
+ ${''} | ${[]} | ${true}
+ ${''} | ${[TEST_ACTIONS.VALID]} | ${true}
+ ${'foo'} | ${[]} | ${false}
+ ${'foo'} | ${[TEST_ACTIONS.VALID]} | ${false}
+ ${'foo'} | ${[TEST_ACTIONS.VALID, TEST_ACTIONS.NO_CONTENT]} | ${true}
+ ${'foo'} | ${[TEST_ACTIONS.VALID, TEST_ACTIONS.NO_PATH]} | ${true}
+ `(
+ 'should handle submit disable (title=$title, actions=$actions, shouldDisable=$shouldDisable)',
+ async ({ title, actions, shouldDisable }) => {
+ createComponent();
- expect(findCancellButton().attributes('href')).toBe(expectation);
- });
- });
+ loadSnippet({ title });
+ triggerBlobActions(actions);
- describe('functionality', () => {
- describe('form submission handling', () => {
- it('does not submit unchanged blobs', () => {
- const foo = {
- action: '',
- };
- const bar = {
- action: 'update',
- };
- createComponent({
- data: {
- blobsActions: {
- foo,
- bar,
- },
- },
- });
- clickSubmitBtn();
+ await wrapper.vm.$nextTick();
- return waitForPromises().then(() => {
- expect(resolveMutate).toHaveBeenCalledWith(
- expect.objectContaining({ variables: { input: { files: [bar] } } }),
- );
- });
- });
+ expect(hasDisabledSubmit()).toBe(shouldDisable);
+ },
+ );
- it.each`
- newSnippet | projectPath | mutation | mutationName
- ${true} | ${rawProjectPathMock} | ${CreateSnippetMutation} | ${'CreateSnippetMutation with projectPath'}
- ${true} | ${''} | ${CreateSnippetMutation} | ${'CreateSnippetMutation without projectPath'}
- ${false} | ${rawProjectPathMock} | ${UpdateSnippetMutation} | ${'UpdateSnippetMutation with projectPath'}
- ${false} | ${''} | ${UpdateSnippetMutation} | ${'UpdateSnippetMutation without projectPath'}
- `('should submit $mutationName correctly', ({ newSnippet, projectPath, mutation }) => {
+ it.each`
+ projectPath | snippetArg | expectation
+ ${''} | ${[]} | ${'/-/snippets'}
+ ${'project/path'} | ${[]} | ${'/project/path/-/snippets'}
+ ${''} | ${[createTestSnippet()]} | ${TEST_WEB_URL}
+ ${'project/path'} | ${[createTestSnippet()]} | ${TEST_WEB_URL}
+ `(
+ 'should set cancel href when (projectPath=$projectPath, snippet=$snippetArg)',
+ async ({ projectPath, snippetArg, expectation }) => {
createComponent({
- data: {
- newSnippet,
- ...defaultData,
- },
- props: {
- ...defaultProps,
- projectPath,
- },
+ props: { projectPath },
});
- const mutationPayload = {
- mutation,
- variables: {
- input: newSnippet ? expect.objectContaining({ projectPath }) : expect.any(Object),
- },
- };
-
- clickSubmitBtn();
-
- expect(resolveMutate).toHaveBeenCalledWith(mutationPayload);
- });
+ loadSnippet(...snippetArg);
- it('redirects to snippet view on successful mutation', () => {
- createComponent();
- clickSubmitBtn();
+ await wrapper.vm.$nextTick();
- return waitForPromises().then(() => {
- expect(redirectTo).toHaveBeenCalledWith(newlyEditedSnippetUrl);
- });
- });
+ expect(findCancelButton().attributes('href')).toBe(expectation);
+ },
+ );
+ });
+ describe('functionality', () => {
+ describe('form submission handling', () => {
it.each`
- newSnippet | projectPath | mutationName
- ${true} | ${rawProjectPathMock} | ${'CreateSnippetMutation with projectPath'}
- ${true} | ${''} | ${'CreateSnippetMutation without projectPath'}
- ${false} | ${rawProjectPathMock} | ${'UpdateSnippetMutation with projectPath'}
- ${false} | ${''} | ${'UpdateSnippetMutation without projectPath'}
+ snippetArg | projectPath | uploadedFiles | input | mutation
+ ${[]} | ${'project/path'} | ${[]} | ${{ ...getApiData(), projectPath: 'project/path', uploadedFiles: [] }} | ${CreateSnippetMutation}
+ ${[]} | ${''} | ${[]} | ${{ ...getApiData(), projectPath: '', uploadedFiles: [] }} | ${CreateSnippetMutation}
+ ${[]} | ${''} | ${TEST_UPLOADED_FILES} | ${{ ...getApiData(), projectPath: '', uploadedFiles: TEST_UPLOADED_FILES }} | ${CreateSnippetMutation}
+ ${[createTestSnippet()]} | ${'project/path'} | ${[]} | ${getApiData(createTestSnippet())} | ${UpdateSnippetMutation}
+ ${[createTestSnippet()]} | ${''} | ${[]} | ${getApiData(createTestSnippet())} | ${UpdateSnippetMutation}
`(
- 'does not redirect to snippet view if the seemingly successful' +
- ' $mutationName response contains errors',
- ({ newSnippet, projectPath }) => {
+ 'should submit mutation with (snippet=$snippetArg, projectPath=$projectPath, uploadedFiles=$uploadedFiles)',
+ async ({ snippetArg, projectPath, uploadedFiles, mutation, input }) => {
createComponent({
- data: {
- newSnippet,
- },
props: {
- ...defaultProps,
projectPath,
},
- mutationRes: mutationTypes.RESOLVE_WITH_ERRORS,
});
+ loadSnippet(...snippetArg);
+ setUploadFilesHtml(uploadedFiles);
+
+ await wrapper.vm.$nextTick();
clickSubmitBtn();
- return waitForPromises().then(() => {
- expect(redirectTo).not.toHaveBeenCalled();
- expect(flashSpy).toHaveBeenCalledWith(mutationError);
+ expect(mutationTypes.RESOLVE).toHaveBeenCalledWith({
+ mutation,
+ variables: {
+ input,
+ },
});
},
);
- it('flashes an error if mutation failed', () => {
- createComponent({
- mutationRes: mutationTypes.REJECT,
- });
+ it('should redirect to snippet view on successful mutation', async () => {
+ createComponent();
+ loadSnippet(createTestSnippet());
clickSubmitBtn();
- return waitForPromises().then(() => {
- expect(redirectTo).not.toHaveBeenCalled();
- expect(flashSpy).toHaveBeenCalledWith(apiError);
- });
+ await waitForPromises();
+
+ expect(urlUtils.redirectTo).toHaveBeenCalledWith(TEST_WEB_URL);
});
it.each`
- isNew | status | expectation
- ${true} | ${`new`} | ${SNIPPET_CREATE_MUTATION_ERROR.replace('%{err}', '')}
- ${false} | ${`existing`} | ${SNIPPET_UPDATE_MUTATION_ERROR.replace('%{err}', '')}
+ snippetArg | projectPath | mutationRes | expectMessage
+ ${[]} | ${'project/path'} | ${mutationTypes.RESOLVE_WITH_ERRORS} | ${`Can't create snippet: ${TEST_MUTATION_ERROR}`}
+ ${[]} | ${''} | ${mutationTypes.RESOLVE_WITH_ERRORS} | ${`Can't create snippet: ${TEST_MUTATION_ERROR}`}
+ ${[]} | ${''} | ${mutationTypes.REJECT} | ${`Can't create snippet: ${TEST_API_ERROR}`}
+ ${[createTestSnippet()]} | ${'project/path'} | ${mutationTypes.RESOLVE_WITH_ERRORS} | ${`Can't update snippet: ${TEST_MUTATION_ERROR}`}
+ ${[createTestSnippet()]} | ${''} | ${mutationTypes.RESOLVE_WITH_ERRORS} | ${`Can't update snippet: ${TEST_MUTATION_ERROR}`}
`(
- `renders the correct error message if mutation fails for $status snippet`,
- ({ isNew, expectation }) => {
+ 'should flash error with (snippet=$snippetArg, projectPath=$projectPath)',
+ async ({ snippetArg, projectPath, mutationRes, expectMessage }) => {
createComponent({
- data: {
- newSnippet: isNew,
+ props: {
+ projectPath,
},
- mutationRes: mutationTypes.REJECT,
+ mutationRes,
});
+ loadSnippet(...snippetArg);
clickSubmitBtn();
- return waitForPromises().then(() => {
- expect(Flash).toHaveBeenCalledWith(expect.stringContaining(expectation));
- });
+ await waitForPromises();
+
+ expect(urlUtils.redirectTo).not.toHaveBeenCalled();
+ expect(Flash).toHaveBeenCalledWith(expectMessage);
},
);
});
- describe('correctly includes attached files into the mutation', () => {
- const createMutationPayload = expectation => {
- return expect.objectContaining({
- variables: {
- input: expect.objectContaining({ uploadedFiles: expectation }),
- },
- });
- };
-
- const updateMutationPayload = () => {
- return expect.objectContaining({
- variables: {
- input: expect.not.objectContaining({ uploadedFiles: expect.anything() }),
- },
- });
- };
-
- it.each`
- paths | expectation
- ${[attachedFilePath1]} | ${[attachedFilePath1]}
- ${[attachedFilePath1, attachedFilePath2]} | ${[attachedFilePath1, attachedFilePath2]}
- ${[]} | ${[]}
- `(`correctly sends paths for $paths.length files`, ({ paths, expectation }) => {
- createComponent({
- data: {
- newSnippet: true,
- },
- });
-
- const fixtures = paths.map(path => {
- return path ? `<input name="files[]" value="${path}">` : undefined;
- });
- wrapper.vm.$el.innerHTML += fixtures.join('');
-
- clickSubmitBtn();
-
- expect(resolveMutate).toHaveBeenCalledWith(createMutationPayload(expectation));
- });
-
- it(`neither fails nor sends 'uploadedFiles' to update mutation`, () => {
- createComponent();
-
- clickSubmitBtn();
- expect(resolveMutate).toHaveBeenCalledWith(updateMutationPayload());
- });
- });
-
describe('on before unload', () => {
- let event;
- let returnValueSetter;
-
- const bootstrap = data => {
- createComponent({
- data,
- });
-
- event = new Event('beforeunload');
- returnValueSetter = jest.spyOn(event, 'returnValue', 'set');
- };
-
- it('does not prevent page navigation if there are no blobs', () => {
- bootstrap();
- window.dispatchEvent(event);
-
- expect(returnValueSetter).not.toHaveBeenCalled();
- });
-
- it('does not prevent page navigation if there are no changes to the blobs content', () => {
- bootstrap({
- blobsActions: {
- foo: {
- ...actionWithContent,
- action: '',
- },
- },
- });
- window.dispatchEvent(event);
+ it.each`
+ condition | expectPrevented | action
+ ${'there are no actions'} | ${false} | ${() => triggerBlobActions([])}
+ ${'there are actions'} | ${true} | ${() => triggerBlobActions([testEntries.updated.diff])}
+ ${'the snippet is being saved'} | ${false} | ${() => clickSubmitBtn()}
+ `(
+ 'handles before unload prevent when $condition (expectPrevented=$expectPrevented)',
+ ({ expectPrevented, action }) => {
+ createComponent();
+ loadSnippet();
- expect(returnValueSetter).not.toHaveBeenCalled();
- });
+ action();
- it('prevents page navigation if there are some changes in the snippet content', () => {
- bootstrap({
- blobsActions: {
- foo: {
- ...actionWithContent,
- action: 'update',
- },
- },
- });
+ const event = new Event('beforeunload');
+ const returnValueSetter = jest.spyOn(event, 'returnValue', 'set');
- window.dispatchEvent(event);
+ window.dispatchEvent(event);
- expect(returnValueSetter).toHaveBeenCalledWith(
- 'Are you sure you want to lose unsaved changes?',
- );
- });
+ if (expectPrevented) {
+ expect(returnValueSetter).toHaveBeenCalledWith(
+ 'Are you sure you want to lose unsaved changes?',
+ );
+ } else {
+ expect(returnValueSetter).not.toHaveBeenCalled();
+ }
+ },
+ );
});
});
});
diff --git a/spec/frontend/snippets/components/show_spec.js b/spec/frontend/snippets/components/show_spec.js
index b5446e70028..8cccbb83d54 100644
--- a/spec/frontend/snippets/components/show_spec.js
+++ b/spec/frontend/snippets/components/show_spec.js
@@ -1,19 +1,27 @@
+import { GlLoadingIcon } from '@gitlab/ui';
+import { Blob, BinaryBlob } from 'jest/blob/components/mock_data';
+import { shallowMount } from '@vue/test-utils';
import SnippetApp from '~/snippets/components/show.vue';
import BlobEmbeddable from '~/blob/components/blob_embeddable.vue';
import SnippetHeader from '~/snippets/components/snippet_header.vue';
import SnippetTitle from '~/snippets/components/snippet_title.vue';
import SnippetBlob from '~/snippets/components/snippet_blob_view.vue';
-import { GlLoadingIcon } from '@gitlab/ui';
-import { Blob, BinaryBlob } from 'jest/blob/components/mock_data';
+import CloneDropdownButton from '~/vue_shared/components/clone_dropdown.vue';
-import { shallowMount } from '@vue/test-utils';
-import { SNIPPET_VISIBILITY_PUBLIC } from '~/snippets/constants';
+import {
+ SNIPPET_VISIBILITY_INTERNAL,
+ SNIPPET_VISIBILITY_PRIVATE,
+ SNIPPET_VISIBILITY_PUBLIC,
+} from '~/snippets/constants';
describe('Snippet view app', () => {
let wrapper;
const defaultProps = {
snippetGid: 'gid://gitlab/PersonalSnippet/42',
};
+ const webUrl = 'http://foo.bar';
+ const dummyHTTPUrl = webUrl;
+ const dummySSHUrl = 'ssh://foo.bar';
function createComponent({ props = defaultProps, data = {}, loading = false } = {}) {
const $apollo = {
@@ -72,4 +80,47 @@ describe('Snippet view app', () => {
expect(blobs.at(0).props('blob')).toEqual(Blob);
expect(blobs.at(1).props('blob')).toEqual(BinaryBlob);
});
+
+ describe('Embed dropdown rendering', () => {
+ it.each`
+ visibilityLevel | condition | isRendered
+ ${SNIPPET_VISIBILITY_INTERNAL} | ${'not render'} | ${false}
+ ${SNIPPET_VISIBILITY_PRIVATE} | ${'not render'} | ${false}
+ ${'foo'} | ${'not render'} | ${false}
+ ${SNIPPET_VISIBILITY_PUBLIC} | ${'render'} | ${true}
+ `('does $condition blob-embeddable by default', ({ visibilityLevel, isRendered }) => {
+ createComponent({
+ data: {
+ snippet: {
+ visibilityLevel,
+ webUrl,
+ },
+ },
+ });
+ expect(wrapper.contains(BlobEmbeddable)).toBe(isRendered);
+ });
+ });
+
+ describe('Clone button rendering', () => {
+ it.each`
+ httpUrlToRepo | sshUrlToRepo | shouldRender | isRendered
+ ${null} | ${null} | ${'Should not'} | ${false}
+ ${null} | ${dummySSHUrl} | ${'Should'} | ${true}
+ ${dummyHTTPUrl} | ${null} | ${'Should'} | ${true}
+ ${dummyHTTPUrl} | ${dummySSHUrl} | ${'Should'} | ${true}
+ `(
+ '$shouldRender render "Clone" button when `httpUrlToRepo` is $httpUrlToRepo and `sshUrlToRepo` is $sshUrlToRepo',
+ ({ httpUrlToRepo, sshUrlToRepo, isRendered }) => {
+ createComponent({
+ data: {
+ snippet: {
+ sshUrlToRepo,
+ httpUrlToRepo,
+ },
+ },
+ });
+ expect(wrapper.contains(CloneDropdownButton)).toBe(isRendered);
+ },
+ );
+ });
});
diff --git a/spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js b/spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js
new file mode 100644
index 00000000000..8b2051008d7
--- /dev/null
+++ b/spec/frontend/snippets/components/snippet_blob_actions_edit_spec.js
@@ -0,0 +1,301 @@
+import { times } from 'lodash';
+import { shallowMount } from '@vue/test-utils';
+import SnippetBlobActionsEdit from '~/snippets/components/snippet_blob_actions_edit.vue';
+import SnippetBlobEdit from '~/snippets/components/snippet_blob_edit.vue';
+import {
+ SNIPPET_MAX_BLOBS,
+ SNIPPET_BLOB_ACTION_CREATE,
+ SNIPPET_BLOB_ACTION_MOVE,
+} from '~/snippets/constants';
+import { testEntries, createBlobFromTestEntry } from '../test_utils';
+
+const TEST_BLOBS = [
+ createBlobFromTestEntry(testEntries.updated),
+ createBlobFromTestEntry(testEntries.deleted),
+];
+
+const TEST_BLOBS_UNLOADED = TEST_BLOBS.map(blob => ({ ...blob, content: '', isLoaded: false }));
+
+describe('snippets/components/snippet_blob_actions_edit', () => {
+ let wrapper;
+
+ const createComponent = (props = {}, snippetMultipleFiles = true) => {
+ wrapper = shallowMount(SnippetBlobActionsEdit, {
+ propsData: {
+ initBlobs: TEST_BLOBS,
+ ...props,
+ },
+ provide: {
+ glFeatures: {
+ snippetMultipleFiles,
+ },
+ },
+ });
+ };
+
+ const findLabel = () => wrapper.find('label');
+ const findBlobEdits = () => wrapper.findAll(SnippetBlobEdit);
+ const findBlobsData = () =>
+ findBlobEdits().wrappers.map(x => ({
+ blob: x.props('blob'),
+ classes: x.classes(),
+ }));
+ const findFirstBlobEdit = () => findBlobEdits().at(0);
+ const findAddButton = () => wrapper.find('[data-testid="add_button"]');
+ const getLastActions = () => {
+ const events = wrapper.emitted().actions;
+
+ return events[events.length - 1]?.[0];
+ };
+ const buildBlobsDataExpectation = blobs =>
+ blobs.map((blob, index) => ({
+ blob: {
+ ...blob,
+ id: expect.stringMatching('blob_local_'),
+ },
+ classes: index > 0 ? ['gl-mt-3'] : [],
+ }));
+ const triggerBlobDelete = idx =>
+ findBlobEdits()
+ .at(idx)
+ .vm.$emit('delete');
+ const triggerBlobUpdate = (idx, props) =>
+ findBlobEdits()
+ .at(idx)
+ .vm.$emit('blob-updated', props);
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe.each`
+ featureFlag | label | showDelete | showAdd
+ ${true} | ${'Files'} | ${true} | ${true}
+ ${false} | ${'File'} | ${false} | ${false}
+ `('with feature flag = $featureFlag', ({ featureFlag, label, showDelete, showAdd }) => {
+ beforeEach(() => {
+ createComponent({}, featureFlag);
+ });
+
+ it('renders label', () => {
+ expect(findLabel().text()).toBe(label);
+ });
+
+ it(`renders delete button (show=${showDelete})`, () => {
+ expect(findFirstBlobEdit().props()).toMatchObject({
+ showDelete,
+ canDelete: true,
+ });
+ });
+
+ it(`renders add button (show=${showAdd})`, () => {
+ expect(findAddButton().exists()).toBe(showAdd);
+ });
+ });
+
+ describe('with default', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('emits no actions', () => {
+ expect(getLastActions()).toEqual([]);
+ });
+
+ it('shows blobs', () => {
+ expect(findBlobsData()).toEqual(buildBlobsDataExpectation(TEST_BLOBS_UNLOADED));
+ });
+
+ it('shows add button', () => {
+ const button = findAddButton();
+
+ expect(button.text()).toBe(`Add another file ${TEST_BLOBS.length}/${SNIPPET_MAX_BLOBS}`);
+ expect(button.props('disabled')).toBe(false);
+ });
+
+ describe('when add is clicked', () => {
+ beforeEach(() => {
+ findAddButton().vm.$emit('click');
+ });
+
+ it('adds blob with empty content', () => {
+ expect(findBlobsData()).toEqual(
+ buildBlobsDataExpectation([
+ ...TEST_BLOBS_UNLOADED,
+ {
+ content: '',
+ isLoaded: true,
+ path: '',
+ },
+ ]),
+ );
+ });
+
+ it('emits action', () => {
+ expect(getLastActions()).toEqual([
+ expect.objectContaining({
+ action: SNIPPET_BLOB_ACTION_CREATE,
+ }),
+ ]);
+ });
+ });
+
+ describe('when blob is deleted', () => {
+ beforeEach(() => {
+ triggerBlobDelete(1);
+ });
+
+ it('removes blob', () => {
+ expect(findBlobsData()).toEqual(buildBlobsDataExpectation(TEST_BLOBS_UNLOADED.slice(0, 1)));
+ });
+
+ it('emits action', () => {
+ expect(getLastActions()).toEqual([
+ expect.objectContaining({
+ ...testEntries.deleted.diff,
+ content: '',
+ }),
+ ]);
+ });
+ });
+
+ describe('when blob changes path', () => {
+ beforeEach(() => {
+ triggerBlobUpdate(0, { path: 'new/path' });
+ });
+
+ it('renames blob', () => {
+ expect(findBlobsData()[0]).toMatchObject({
+ blob: {
+ path: 'new/path',
+ },
+ });
+ });
+
+ it('emits action', () => {
+ expect(getLastActions()).toMatchObject([
+ {
+ action: SNIPPET_BLOB_ACTION_MOVE,
+ filePath: 'new/path',
+ previousPath: testEntries.updated.diff.filePath,
+ },
+ ]);
+ });
+ });
+
+ describe('when blob emits new content', () => {
+ const { content } = testEntries.updated.diff;
+ const originalContent = `${content}\noriginal content\n`;
+
+ beforeEach(() => {
+ triggerBlobUpdate(0, { content: originalContent });
+ });
+
+ it('loads new content', () => {
+ expect(findBlobsData()[0]).toMatchObject({
+ blob: {
+ content: originalContent,
+ isLoaded: true,
+ },
+ });
+ });
+
+ it('does not emit an action', () => {
+ expect(getLastActions()).toEqual([]);
+ });
+
+ it('emits an action when content changes again', async () => {
+ triggerBlobUpdate(0, { content });
+
+ await wrapper.vm.$nextTick();
+
+ expect(getLastActions()).toEqual([testEntries.updated.diff]);
+ });
+ });
+ });
+
+ describe('with 1 blob', () => {
+ beforeEach(() => {
+ createComponent({ initBlobs: [createBlobFromTestEntry(testEntries.created)] });
+ });
+
+ it('disables delete button', () => {
+ expect(findBlobEdits()).toHaveLength(1);
+ expect(
+ findBlobEdits()
+ .at(0)
+ .props(),
+ ).toMatchObject({
+ showDelete: true,
+ canDelete: false,
+ });
+ });
+
+ describe(`when added ${SNIPPET_MAX_BLOBS} files`, () => {
+ let addButton;
+
+ beforeEach(() => {
+ addButton = findAddButton();
+
+ times(SNIPPET_MAX_BLOBS - 1, () => addButton.vm.$emit('click'));
+ });
+
+ it('should have blobs', () => {
+ expect(findBlobsData()).toHaveLength(SNIPPET_MAX_BLOBS);
+ });
+
+ it('should disable add button', () => {
+ expect(addButton.props('disabled')).toBe(true);
+ });
+ });
+ });
+
+ describe('with 0 init blob', () => {
+ beforeEach(() => {
+ createComponent({ initBlobs: [] });
+ });
+
+ it('shows 1 blob by default', () => {
+ expect(findBlobsData()).toEqual([
+ expect.objectContaining({
+ blob: {
+ id: expect.stringMatching('blob_local_'),
+ content: '',
+ path: '',
+ isLoaded: true,
+ },
+ }),
+ ]);
+ });
+
+ it('emits create action', () => {
+ expect(getLastActions()).toEqual([
+ {
+ action: SNIPPET_BLOB_ACTION_CREATE,
+ content: '',
+ filePath: '',
+ previousPath: '',
+ },
+ ]);
+ });
+ });
+
+ describe(`with ${SNIPPET_MAX_BLOBS} files`, () => {
+ beforeEach(() => {
+ const initBlobs = Array(SNIPPET_MAX_BLOBS)
+ .fill(1)
+ .map(() => createBlobFromTestEntry(testEntries.created));
+
+ createComponent({ initBlobs });
+ });
+
+ it('should have blobs', () => {
+ expect(findBlobsData()).toHaveLength(SNIPPET_MAX_BLOBS);
+ });
+
+ it('should disable add button', () => {
+ expect(findAddButton().props('disabled')).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/snippets/components/snippet_blob_edit_spec.js b/spec/frontend/snippets/components/snippet_blob_edit_spec.js
index 009074b4558..188f9ae5cf1 100644
--- a/spec/frontend/snippets/components/snippet_blob_edit_spec.js
+++ b/spec/frontend/snippets/components/snippet_blob_edit_spec.js
@@ -1,165 +1,168 @@
-import SnippetBlobEdit from '~/snippets/components/snippet_blob_edit.vue';
-import BlobHeaderEdit from '~/blob/components/blob_edit_header.vue';
-import BlobContentEdit from '~/blob/components/blob_edit_content.vue';
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
import AxiosMockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
+import { TEST_HOST } from 'helpers/test_constants';
+import SnippetBlobEdit from '~/snippets/components/snippet_blob_edit.vue';
+import BlobHeaderEdit from '~/blob/components/blob_edit_header.vue';
+import BlobContentEdit from '~/blob/components/blob_edit_content.vue';
import axios from '~/lib/utils/axios_utils';
import { joinPaths } from '~/lib/utils/url_utility';
-import waitForPromises from 'helpers/wait_for_promises';
-
-jest.mock('~/blob/utils', () => jest.fn());
-
-jest.mock('~/lib/utils/url_utility', () => ({
- getBaseURL: jest.fn().mockReturnValue('foo/'),
- joinPaths: jest
- .fn()
- .mockName('joinPaths')
- .mockReturnValue('contentApiURL'),
-}));
+import { deprecatedCreateFlash as createFlash } from '~/flash';
jest.mock('~/flash');
-let flashSpy;
+const TEST_ID = 'blob_local_7';
+const TEST_PATH = 'foo/bar/test.md';
+const TEST_RAW_PATH = '/gitlab/raw/path/to/blob/7';
+const TEST_FULL_PATH = joinPaths(TEST_HOST, TEST_RAW_PATH);
+const TEST_CONTENT = 'Lorem ipsum dolar sit amet,\nconsectetur adipiscing elit.';
+
+const TEST_BLOB = {
+ id: TEST_ID,
+ rawPath: TEST_RAW_PATH,
+ path: TEST_PATH,
+ content: '',
+ isLoaded: false,
+};
+
+const TEST_BLOB_LOADED = {
+ ...TEST_BLOB,
+ content: TEST_CONTENT,
+ isLoaded: true,
+};
describe('Snippet Blob Edit component', () => {
let wrapper;
let axiosMock;
- const contentMock = 'Lorem ipsum dolor sit amet, consectetur adipiscing elit.';
- const pathMock = 'lorem.txt';
- const rawPathMock = 'foo/bar';
- const blob = {
- path: pathMock,
- content: contentMock,
- rawPath: rawPathMock,
- };
- const findComponent = component => wrapper.find(component);
- function createComponent(props = {}, data = { isContentLoading: false }) {
+ const createComponent = (props = {}) => {
wrapper = shallowMount(SnippetBlobEdit, {
propsData: {
+ blob: TEST_BLOB,
...props,
},
- data() {
- return {
- ...data,
- };
- },
});
- flashSpy = jest.spyOn(wrapper.vm, 'flashAPIFailure');
- }
+ };
+
+ const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
+ const findHeader = () => wrapper.find(BlobHeaderEdit);
+ const findContent = () => wrapper.find(BlobContentEdit);
+ const getLastUpdatedArgs = () => {
+ const event = wrapper.emitted()['blob-updated'];
+
+ return event?.[event.length - 1][0];
+ };
beforeEach(() => {
axiosMock = new AxiosMockAdapter(axios);
- createComponent();
+ axiosMock.onGet(TEST_FULL_PATH).reply(200, TEST_CONTENT);
});
afterEach(() => {
- axiosMock.restore();
wrapper.destroy();
+ wrapper = null;
+ axiosMock.restore();
});
- describe('rendering', () => {
- it('matches the snapshot', () => {
- createComponent({ blob });
- expect(wrapper.element).toMatchSnapshot();
+ describe('with not loaded blob', () => {
+ beforeEach(async () => {
+ createComponent();
});
- it('renders required components', () => {
- expect(findComponent(BlobHeaderEdit).exists()).toBe(true);
- expect(findComponent(BlobContentEdit).exists()).toBe(true);
+ it('shows blob header', () => {
+ expect(findHeader().props()).toMatchObject({
+ value: TEST_BLOB.path,
+ });
+ expect(findHeader().attributes('id')).toBe(`${TEST_ID}_file_path`);
});
- it('renders loader if existing blob is supplied but no content is fetched yet', () => {
- createComponent({ blob }, { isContentLoading: true });
- expect(wrapper.contains(GlLoadingIcon)).toBe(true);
- expect(findComponent(BlobContentEdit).exists()).toBe(false);
+ it('emits delete when deleted', () => {
+ expect(wrapper.emitted().delete).toBeUndefined();
+
+ findHeader().vm.$emit('delete');
+
+ expect(wrapper.emitted().delete).toHaveLength(1);
});
- it('does not render loader if when blob is not supplied', () => {
- createComponent();
- expect(wrapper.contains(GlLoadingIcon)).toBe(false);
- expect(findComponent(BlobContentEdit).exists()).toBe(true);
+ it('emits update when path changes', () => {
+ const newPath = 'new/path.md';
+
+ findHeader().vm.$emit('input', newPath);
+
+ expect(getLastUpdatedArgs()).toEqual({ path: newPath });
});
- });
- describe('functionality', () => {
- it('does not fail without blob', () => {
- const spy = jest.spyOn(global.console, 'error');
- createComponent({ blob: undefined });
+ it('emits update when content is loaded', async () => {
+ await waitForPromises();
- expect(spy).not.toHaveBeenCalled();
- expect(findComponent(BlobContentEdit).exists()).toBe(true);
+ expect(getLastUpdatedArgs()).toEqual({ content: TEST_CONTENT });
});
+ });
- it.each`
- emitter | prop
- ${BlobHeaderEdit} | ${'filePath'}
- ${BlobContentEdit} | ${'content'}
- `('emits "blob-updated" event when the $prop gets changed', ({ emitter, prop }) => {
- expect(wrapper.emitted('blob-updated')).toBeUndefined();
- const newValue = 'foo.bar';
- findComponent(emitter).vm.$emit('input', newValue);
-
- return nextTick().then(() => {
- expect(wrapper.emitted('blob-updated')[0]).toEqual([
- expect.objectContaining({
- [prop]: newValue,
- }),
- ]);
- });
+ describe('with error', () => {
+ beforeEach(() => {
+ axiosMock.reset();
+ axiosMock.onGet(TEST_FULL_PATH).replyOnce(500);
+ createComponent();
});
- describe('fetching blob content', () => {
- const bootstrapForExistingSnippet = resp => {
- createComponent({
- blob: {
- ...blob,
- content: '',
- },
- });
+ it('should call flash', async () => {
+ await waitForPromises();
- if (resp === 500) {
- axiosMock.onGet('contentApiURL').reply(500);
- } else {
- axiosMock.onGet('contentApiURL').reply(200, contentMock);
- }
- };
+ expect(createFlash).toHaveBeenCalledWith(
+ "Can't fetch content for the blob: Error: Request failed with status code 500",
+ );
+ });
+ });
- const bootstrapForNewSnippet = () => {
- createComponent();
- };
+ describe('with loaded blob', () => {
+ beforeEach(() => {
+ createComponent({ blob: TEST_BLOB_LOADED });
+ });
- it('fetches blob content with the additional query', () => {
- bootstrapForExistingSnippet();
+ it('matches snapshot', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
- return waitForPromises().then(() => {
- expect(joinPaths).toHaveBeenCalledWith('foo/', rawPathMock);
- expect(findComponent(BlobHeaderEdit).props('value')).toBe(pathMock);
- expect(findComponent(BlobContentEdit).props('value')).toBe(contentMock);
- });
- });
+ it('does not make API request', () => {
+ expect(axiosMock.history.get).toHaveLength(0);
+ });
+ });
- it('flashes the error message if fetching content fails', () => {
- bootstrapForExistingSnippet(500);
+ describe.each`
+ props | showLoading | showContent
+ ${{ blob: TEST_BLOB, canDelete: true, showDelete: true }} | ${true} | ${false}
+ ${{ blob: TEST_BLOB, canDelete: false, showDelete: false }} | ${true} | ${false}
+ ${{ blob: TEST_BLOB_LOADED }} | ${false} | ${true}
+ `('with $props', ({ props, showLoading, showContent }) => {
+ beforeEach(() => {
+ createComponent(props);
+ });
- return waitForPromises().then(() => {
- expect(flashSpy).toHaveBeenCalled();
- expect(findComponent(BlobContentEdit).props('value')).toBe('');
- });
+ it('shows blob header', () => {
+ const { canDelete = true, showDelete = false } = props;
+
+ expect(findHeader().props()).toMatchObject({
+ canDelete,
+ showDelete,
});
+ });
- it('does not fetch content for new snippet', () => {
- bootstrapForNewSnippet();
+ it(`handles loading icon (show=${showLoading})`, () => {
+ expect(findLoadingIcon().exists()).toBe(showLoading);
+ });
- return waitForPromises().then(() => {
- // we keep using waitForPromises to make sure we do not run failed test
- expect(findComponent(BlobHeaderEdit).props('value')).toBe('');
- expect(findComponent(BlobContentEdit).props('value')).toBe('');
- expect(joinPaths).not.toHaveBeenCalled();
+ it(`handles content (show=${showContent})`, () => {
+ expect(findContent().exists()).toBe(showContent);
+
+ if (showContent) {
+ expect(findContent().props()).toEqual({
+ value: TEST_BLOB_LOADED.content,
+ fileGlobalId: TEST_BLOB_LOADED.id,
+ fileName: TEST_BLOB_LOADED.path,
});
- });
+ }
});
});
});
diff --git a/spec/frontend/snippets/components/snippet_blob_view_spec.js b/spec/frontend/snippets/components/snippet_blob_view_spec.js
index c8f1c8fc8a9..9c4b2734a3f 100644
--- a/spec/frontend/snippets/components/snippet_blob_view_spec.js
+++ b/spec/frontend/snippets/components/snippet_blob_view_spec.js
@@ -1,7 +1,14 @@
+import { nextTick } from 'vue';
import { mount } from '@vue/test-utils';
+import {
+ Blob as BlobMock,
+ SimpleViewerMock,
+ RichViewerMock,
+ RichBlobContentMock,
+ SimpleBlobContentMock,
+} from 'jest/blob/components/mock_data';
import SnippetBlobView from '~/snippets/components/snippet_blob_view.vue';
import BlobHeader from '~/blob/components/blob_header.vue';
-import BlobEmbeddable from '~/blob/components/blob_embeddable.vue';
import BlobContent from '~/blob/components/blob_content.vue';
import {
BLOB_RENDER_EVENT_LOAD,
@@ -9,13 +16,7 @@ import {
BLOB_RENDER_ERRORS,
} from '~/blob/components/constants';
import { RichViewer, SimpleViewer } from '~/vue_shared/components/blob_viewers';
-import {
- SNIPPET_VISIBILITY_PRIVATE,
- SNIPPET_VISIBILITY_INTERNAL,
- SNIPPET_VISIBILITY_PUBLIC,
-} from '~/snippets/constants';
-
-import { Blob as BlobMock, SimpleViewerMock, RichViewerMock } from 'jest/blob/components/mock_data';
+import { SNIPPET_VISIBILITY_PUBLIC } from '~/snippets/constants';
describe('Blob Embeddable', () => {
let wrapper;
@@ -72,18 +73,6 @@ describe('Blob Embeddable', () => {
expect(wrapper.find(BlobContent).exists()).toBe(true);
});
- it.each([SNIPPET_VISIBILITY_INTERNAL, SNIPPET_VISIBILITY_PRIVATE, 'foo'])(
- 'does not render blob-embeddable by default',
- visibilityLevel => {
- createComponent({
- snippetProps: {
- visibilityLevel,
- },
- });
- expect(wrapper.find(BlobEmbeddable).exists()).toBe(false);
- },
- );
-
it('sets simple viewer correctly', () => {
createComponent();
expect(wrapper.find(SimpleViewer).exists()).toBe(true);
@@ -128,6 +117,59 @@ describe('Blob Embeddable', () => {
expect(wrapper.find(BlobHeader).props('hasRenderError')).toBe(true);
});
+ describe('bob content in multi-file scenario', () => {
+ const SimpleBlobContentMock2 = {
+ ...SimpleBlobContentMock,
+ plainData: 'Another Plain Foo',
+ };
+ const RichBlobContentMock2 = {
+ ...SimpleBlobContentMock,
+ richData: 'Another Rich Foo',
+ };
+
+ it.each`
+ snippetBlobs | description | currentBlob | expectedContent
+ ${[SimpleBlobContentMock]} | ${'one existing textual blob'} | ${SimpleBlobContentMock} | ${SimpleBlobContentMock.plainData}
+ ${[RichBlobContentMock]} | ${'one existing rich blob'} | ${RichBlobContentMock} | ${RichBlobContentMock.richData}
+ ${[SimpleBlobContentMock, RichBlobContentMock]} | ${'mixed blobs with current textual blob'} | ${SimpleBlobContentMock} | ${SimpleBlobContentMock.plainData}
+ ${[SimpleBlobContentMock, RichBlobContentMock]} | ${'mixed blobs with current rich blob'} | ${RichBlobContentMock} | ${RichBlobContentMock.richData}
+ ${[SimpleBlobContentMock, SimpleBlobContentMock2]} | ${'textual blobs with current textual blob'} | ${SimpleBlobContentMock} | ${SimpleBlobContentMock.plainData}
+ ${[RichBlobContentMock, RichBlobContentMock2]} | ${'rich blobs with current rich blob'} | ${RichBlobContentMock} | ${RichBlobContentMock.richData}
+ `(
+ 'renders correct content for $description',
+ async ({ snippetBlobs, currentBlob, expectedContent }) => {
+ const apolloData = {
+ snippets: {
+ edges: [
+ {
+ node: {
+ blobs: snippetBlobs,
+ },
+ },
+ ],
+ },
+ };
+ createComponent({
+ blob: {
+ ...BlobMock,
+ path: currentBlob.path,
+ },
+ });
+
+ // mimic apollo's update
+ wrapper.setData({
+ blobContent: wrapper.vm.onContentUpdate(apolloData),
+ });
+
+ await nextTick();
+
+ const findContent = () => wrapper.find(BlobContent);
+
+ expect(findContent().props('content')).toBe(expectedContent);
+ },
+ );
+ });
+
describe('URLS with hash', () => {
beforeEach(() => {
window.location.hash = '#LC2';
diff --git a/spec/frontend/snippets/components/snippet_description_edit_spec.js b/spec/frontend/snippets/components/snippet_description_edit_spec.js
index 816ab4e48de..ff75515e71a 100644
--- a/spec/frontend/snippets/components/snippet_description_edit_spec.js
+++ b/spec/frontend/snippets/components/snippet_description_edit_spec.js
@@ -1,6 +1,6 @@
+import { shallowMount } from '@vue/test-utils';
import SnippetDescriptionEdit from '~/snippets/components/snippet_description_edit.vue';
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
-import { shallowMount } from '@vue/test-utils';
describe('Snippet Description Edit component', () => {
let wrapper;
diff --git a/spec/frontend/snippets/components/snippet_description_view_spec.js b/spec/frontend/snippets/components/snippet_description_view_spec.js
index 46467ef311e..14f116f2aaf 100644
--- a/spec/frontend/snippets/components/snippet_description_view_spec.js
+++ b/spec/frontend/snippets/components/snippet_description_view_spec.js
@@ -1,5 +1,5 @@
-import SnippetDescription from '~/snippets/components/snippet_description_view.vue';
import { shallowMount } from '@vue/test-utils';
+import SnippetDescription from '~/snippets/components/snippet_description_view.vue';
describe('Snippet Description component', () => {
let wrapper;
diff --git a/spec/frontend/snippets/components/snippet_header_spec.js b/spec/frontend/snippets/components/snippet_header_spec.js
index 0825da92118..da8cb2e6a8d 100644
--- a/spec/frontend/snippets/components/snippet_header_spec.js
+++ b/spec/frontend/snippets/components/snippet_header_spec.js
@@ -1,46 +1,19 @@
-import SnippetHeader from '~/snippets/components/snippet_header.vue';
-import DeleteSnippetMutation from '~/snippets/mutations/deleteSnippet.mutation.graphql';
import { ApolloMutation } from 'vue-apollo';
import { GlButton, GlModal } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import { Blob, BinaryBlob } from 'jest/blob/components/mock_data';
+import waitForPromises from 'helpers/wait_for_promises';
+import DeleteSnippetMutation from '~/snippets/mutations/deleteSnippet.mutation.graphql';
+import SnippetHeader from '~/snippets/components/snippet_header.vue';
describe('Snippet header component', () => {
let wrapper;
- const snippet = {
- id: 'gid://gitlab/PersonalSnippet/50',
- title: 'The property of Thor',
- visibilityLevel: 'private',
- webUrl: 'http://personal.dev.null/42',
- userPermissions: {
- adminSnippet: true,
- updateSnippet: true,
- reportSnippet: false,
- },
- project: null,
- author: {
- name: 'Thor Odinson',
- },
- blobs: [Blob],
- };
- const mutationVariables = {
- mutation: DeleteSnippetMutation,
- variables: {
- id: snippet.id,
- },
- };
- const errorMsg = 'Foo bar';
- const err = { message: errorMsg };
-
- const resolveMutate = jest.fn(() =>
- Promise.resolve({ data: { destroySnippet: { errors: [] } } }),
- );
- const rejectMutation = jest.fn(() => Promise.reject(err));
-
- const mutationTypes = {
- RESOLVE: resolveMutate,
- REJECT: rejectMutation,
- };
+ let snippet;
+ let mutationTypes;
+ let mutationVariables;
+
+ let errorMsg;
+ let err;
function createComponent({
loading = false,
@@ -63,7 +36,7 @@ describe('Snippet header component', () => {
mutate: mutationRes,
};
- wrapper = shallowMount(SnippetHeader, {
+ wrapper = mount(SnippetHeader, {
mocks: { $apollo },
propsData: {
snippet: {
@@ -76,6 +49,41 @@ describe('Snippet header component', () => {
});
}
+ beforeEach(() => {
+ snippet = {
+ id: 'gid://gitlab/PersonalSnippet/50',
+ title: 'The property of Thor',
+ visibilityLevel: 'private',
+ webUrl: 'http://personal.dev.null/42',
+ userPermissions: {
+ adminSnippet: true,
+ updateSnippet: true,
+ reportSnippet: false,
+ },
+ project: null,
+ author: {
+ name: 'Thor Odinson',
+ },
+ blobs: [Blob],
+ createdAt: new Date(Date.now() - 32 * 24 * 3600 * 1000).toISOString(),
+ };
+
+ mutationVariables = {
+ mutation: DeleteSnippetMutation,
+ variables: {
+ id: snippet.id,
+ },
+ };
+
+ errorMsg = 'Foo bar';
+ err = { message: errorMsg };
+
+ mutationTypes = {
+ RESOLVE: jest.fn(() => Promise.resolve({ data: { destroySnippet: { errors: [] } } })),
+ REJECT: jest.fn(() => Promise.reject(err)),
+ };
+ });
+
afterEach(() => {
wrapper.destroy();
});
@@ -85,6 +93,23 @@ describe('Snippet header component', () => {
expect(wrapper.find('.detail-page-header').exists()).toBe(true);
});
+ it('renders a message showing snippet creation date and author', () => {
+ createComponent();
+
+ const text = wrapper.find('[data-testid="authored-message"]').text();
+ expect(text).toContain('Authored 1 month ago by');
+ expect(text).toContain('Thor Odinson');
+ });
+
+ it('renders a message showing only snippet creation date if author is null', () => {
+ snippet.author = null;
+
+ createComponent();
+
+ const text = wrapper.find('[data-testid="authored-message"]').text();
+ expect(text).toBe('Authored 1 month ago');
+ });
+
it('renders action buttons based on permissions', () => {
createComponent({
permissions: {
@@ -163,14 +188,15 @@ describe('Snippet header component', () => {
expect(mutationTypes.RESOLVE).toHaveBeenCalledWith(mutationVariables);
});
- it('sets error message if mutation fails', () => {
+ it('sets error message if mutation fails', async () => {
createComponent({ mutationRes: mutationTypes.REJECT });
expect(Boolean(wrapper.vm.errorMessage)).toBe(false);
wrapper.vm.deleteSnippet();
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.errorMessage).toEqual(errorMsg);
- });
+
+ await waitForPromises();
+
+ expect(wrapper.vm.errorMessage).toEqual(errorMsg);
});
describe('in case of successful mutation, closes modal and redirects to correct listing', () => {
@@ -199,7 +225,7 @@ describe('Snippet header component', () => {
},
}).then(() => {
expect(wrapper.vm.closeDeleteModal).toHaveBeenCalled();
- expect(window.location.pathname).toBe(`${fullPath}/snippets`);
+ expect(window.location.pathname).toBe(`${fullPath}/-/snippets`);
});
});
});
diff --git a/spec/frontend/snippets/components/snippet_title_spec.js b/spec/frontend/snippets/components/snippet_title_spec.js
index 88261a75f6c..f201cfb19b7 100644
--- a/spec/frontend/snippets/components/snippet_title_spec.js
+++ b/spec/frontend/snippets/components/snippet_title_spec.js
@@ -1,7 +1,7 @@
-import SnippetTitle from '~/snippets/components/snippet_title.vue';
-import SnippetDescription from '~/snippets/components/snippet_description_view.vue';
import { GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import SnippetTitle from '~/snippets/components/snippet_title.vue';
+import SnippetDescription from '~/snippets/components/snippet_description_view.vue';
describe('Snippet header component', () => {
let wrapper;
diff --git a/spec/frontend/snippets/components/snippet_visibility_edit_spec.js b/spec/frontend/snippets/components/snippet_visibility_edit_spec.js
index 0bdef71bc08..a8df13787a5 100644
--- a/spec/frontend/snippets/components/snippet_visibility_edit_spec.js
+++ b/spec/frontend/snippets/components/snippet_visibility_edit_spec.js
@@ -1,12 +1,12 @@
-import SnippetVisibilityEdit from '~/snippets/components/snippet_visibility_edit.vue';
import { GlFormRadio, GlIcon, GlFormRadioGroup, GlLink } from '@gitlab/ui';
+import { mount, shallowMount } from '@vue/test-utils';
+import SnippetVisibilityEdit from '~/snippets/components/snippet_visibility_edit.vue';
import {
SNIPPET_VISIBILITY,
SNIPPET_VISIBILITY_PRIVATE,
SNIPPET_VISIBILITY_INTERNAL,
SNIPPET_VISIBILITY_PUBLIC,
} from '~/snippets/constants';
-import { mount, shallowMount } from '@vue/test-utils';
describe('Snippet Visibility Edit component', () => {
let wrapper;
diff --git a/spec/frontend/snippets/test_utils.js b/spec/frontend/snippets/test_utils.js
new file mode 100644
index 00000000000..86262723157
--- /dev/null
+++ b/spec/frontend/snippets/test_utils.js
@@ -0,0 +1,76 @@
+import {
+ SNIPPET_BLOB_ACTION_CREATE,
+ SNIPPET_BLOB_ACTION_UPDATE,
+ SNIPPET_BLOB_ACTION_MOVE,
+ SNIPPET_BLOB_ACTION_DELETE,
+} from '~/snippets/constants';
+
+const CONTENT_1 = 'Lorem ipsum dolar\nSit amit\n\nGoodbye!\n';
+const CONTENT_2 = 'Lorem ipsum dolar sit amit.\n\nGoodbye!\n';
+
+export const testEntries = {
+ created: {
+ id: 'blob_1',
+ diff: {
+ action: SNIPPET_BLOB_ACTION_CREATE,
+ filePath: '/new/file',
+ previousPath: '/new/file',
+ content: CONTENT_1,
+ },
+ },
+ deleted: {
+ id: 'blob_2',
+ diff: {
+ action: SNIPPET_BLOB_ACTION_DELETE,
+ filePath: '/src/delete/me',
+ previousPath: '/src/delete/me',
+ content: CONTENT_1,
+ },
+ },
+ updated: {
+ id: 'blob_3',
+ origContent: CONTENT_1,
+ diff: {
+ action: SNIPPET_BLOB_ACTION_UPDATE,
+ filePath: '/lorem.md',
+ previousPath: '/lorem.md',
+ content: CONTENT_2,
+ },
+ },
+ renamed: {
+ id: 'blob_4',
+ diff: {
+ action: SNIPPET_BLOB_ACTION_MOVE,
+ filePath: '/dolar.md',
+ previousPath: '/ipsum.md',
+ content: CONTENT_1,
+ },
+ },
+ renamedAndUpdated: {
+ id: 'blob_5',
+ origContent: CONTENT_1,
+ diff: {
+ action: SNIPPET_BLOB_ACTION_MOVE,
+ filePath: '/sit.md',
+ previousPath: '/sit/amit.md',
+ content: CONTENT_2,
+ },
+ },
+};
+
+export const createBlobFromTestEntry = ({ diff, origContent }, isOrig = false) => ({
+ content: isOrig && origContent ? origContent : diff.content,
+ path: isOrig ? diff.previousPath : diff.filePath,
+});
+
+export const createBlobsFromTestEntries = (entries, isOrig = false) =>
+ entries.reduce(
+ (acc, entry) =>
+ Object.assign(acc, {
+ [entry.id]: {
+ id: entry.id,
+ ...createBlobFromTestEntry(entry, isOrig),
+ },
+ }),
+ {},
+ );
diff --git a/spec/frontend/snippets/utils/blob_spec.js b/spec/frontend/snippets/utils/blob_spec.js
new file mode 100644
index 00000000000..c20cf2e6102
--- /dev/null
+++ b/spec/frontend/snippets/utils/blob_spec.js
@@ -0,0 +1,63 @@
+import { cloneDeep } from 'lodash';
+import { decorateBlob, createBlob, diffAll } from '~/snippets/utils/blob';
+import { testEntries, createBlobsFromTestEntries } from '../test_utils';
+
+jest.mock('lodash/uniqueId', () => arg => `${arg}fakeUniqueId`);
+
+const TEST_RAW_BLOB = {
+ rawPath: '/test/blob/7/raw',
+};
+
+describe('~/snippets/utils/blob', () => {
+ describe('decorateBlob', () => {
+ it('should decorate the given object with local blob properties', () => {
+ const orig = cloneDeep(TEST_RAW_BLOB);
+
+ expect(decorateBlob(orig)).toEqual({
+ ...TEST_RAW_BLOB,
+ id: 'blob_local_fakeUniqueId',
+ isLoaded: false,
+ content: '',
+ });
+ });
+ });
+
+ describe('createBlob', () => {
+ it('should create an empty local blob', () => {
+ expect(createBlob()).toEqual({
+ id: 'blob_local_fakeUniqueId',
+ isLoaded: true,
+ content: '',
+ path: '',
+ });
+ });
+ });
+
+ describe('diffAll', () => {
+ it('should create diff from original files', () => {
+ const origBlobs = createBlobsFromTestEntries(
+ [
+ testEntries.deleted,
+ testEntries.updated,
+ testEntries.renamed,
+ testEntries.renamedAndUpdated,
+ ],
+ true,
+ );
+ const blobs = createBlobsFromTestEntries([
+ testEntries.created,
+ testEntries.updated,
+ testEntries.renamed,
+ testEntries.renamedAndUpdated,
+ ]);
+
+ expect(diffAll(blobs, origBlobs)).toEqual([
+ testEntries.deleted.diff,
+ testEntries.created.diff,
+ testEntries.updated.diff,
+ testEntries.renamed.diff,
+ testEntries.renamedAndUpdated.diff,
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/snippets_spec.js b/spec/frontend/snippets_spec.js
index 5b391606371..6c39ff0da27 100644
--- a/spec/frontend/snippets_spec.js
+++ b/spec/frontend/snippets_spec.js
@@ -7,7 +7,7 @@ describe('Snippets', () => {
let shareBtn;
let scriptTag;
- const snippetUrl = 'http://test.host/snippets/1';
+ const snippetUrl = 'http://test.host/-/snippets/1';
beforeEach(() => {
loadHTMLFixture('snippets/show.html');
diff --git a/spec/frontend/static_site_editor/components/app_spec.js b/spec/frontend/static_site_editor/components/app_spec.js
new file mode 100644
index 00000000000..bbdffeae68f
--- /dev/null
+++ b/spec/frontend/static_site_editor/components/app_spec.js
@@ -0,0 +1,34 @@
+import { shallowMount } from '@vue/test-utils';
+import App from '~/static_site_editor/components/app.vue';
+
+describe('static_site_editor/components/app', () => {
+ const mergeRequestsIllustrationPath = 'illustrations/merge_requests.svg';
+ const RouterView = {
+ template: '<div></div>',
+ };
+ let wrapper;
+
+ const buildWrapper = () => {
+ wrapper = shallowMount(App, {
+ stubs: {
+ RouterView,
+ },
+ propsData: {
+ mergeRequestsIllustrationPath,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('passes merge request illustration path to the router view component', () => {
+ buildWrapper();
+
+ expect(wrapper.find(RouterView).attributes()).toMatchObject({
+ 'merge-requests-illustration-path': mergeRequestsIllustrationPath,
+ });
+ });
+});
diff --git a/spec/frontend/static_site_editor/components/edit_area_spec.js b/spec/frontend/static_site_editor/components/edit_area_spec.js
index 11c5abf1b08..f4be911171e 100644
--- a/spec/frontend/static_site_editor/components/edit_area_spec.js
+++ b/spec/frontend/static_site_editor/components/edit_area_spec.js
@@ -15,8 +15,11 @@ import {
returnUrl,
} from '../mock_data';
+jest.mock('~/static_site_editor/services/formatter', () => jest.fn(str => `${str} format-pass`));
+
describe('~/static_site_editor/components/edit_area.vue', () => {
let wrapper;
+ const formattedBody = `${body} format-pass`;
const savingChanges = true;
const newBody = `new ${body}`;
@@ -50,9 +53,9 @@ describe('~/static_site_editor/components/edit_area.vue', () => {
expect(findEditHeader().props('title')).toBe(title);
});
- it('renders rich content editor', () => {
+ it('renders rich content editor with a format pass', () => {
expect(findRichContentEditor().exists()).toBe(true);
- expect(findRichContentEditor().props('content')).toBe(body);
+ expect(findRichContentEditor().props('content')).toBe(formattedBody);
});
it('renders publish toolbar', () => {
@@ -94,7 +97,7 @@ describe('~/static_site_editor/components/edit_area.vue', () => {
});
it('sets publish toolbar as not saveable when content changes are rollback', () => {
- findRichContentEditor().vm.$emit('input', body);
+ findRichContentEditor().vm.$emit('input', formattedBody);
return wrapper.vm.$nextTick().then(() => {
expect(findPublishToolbar().props('saveable')).toBe(false);
@@ -103,31 +106,53 @@ describe('~/static_site_editor/components/edit_area.vue', () => {
});
describe('when the mode changes', () => {
+ let resetInitialValue;
+
const setInitialMode = mode => {
wrapper.setData({ editorMode: mode });
};
+ const buildResetInitialValue = () => {
+ resetInitialValue = jest.fn();
+ findRichContentEditor().setMethods({ resetInitialValue });
+ };
+
afterEach(() => {
setInitialMode(EDITOR_TYPES.wysiwyg);
+ resetInitialValue = null;
});
it.each`
initialMode | targetMode | resetValue
- ${EDITOR_TYPES.wysiwyg} | ${EDITOR_TYPES.markdown} | ${content}
- ${EDITOR_TYPES.markdown} | ${EDITOR_TYPES.wysiwyg} | ${body}
+ ${EDITOR_TYPES.wysiwyg} | ${EDITOR_TYPES.markdown} | ${`${content} format-pass format-pass`}
+ ${EDITOR_TYPES.markdown} | ${EDITOR_TYPES.wysiwyg} | ${`${body} format-pass format-pass`}
`(
'sets editorMode from $initialMode to $targetMode',
({ initialMode, targetMode, resetValue }) => {
setInitialMode(initialMode);
+ buildResetInitialValue();
- const resetInitialValue = jest.fn();
-
- findRichContentEditor().setMethods({ resetInitialValue });
findRichContentEditor().vm.$emit('modeChange', targetMode);
expect(resetInitialValue).toHaveBeenCalledWith(resetValue);
expect(wrapper.vm.editorMode).toBe(targetMode);
},
);
+
+ it('should format the content', () => {
+ buildResetInitialValue();
+
+ findRichContentEditor().vm.$emit('modeChange', EDITOR_TYPES.markdown);
+
+ expect(resetInitialValue).toHaveBeenCalledWith(`${content} format-pass format-pass`);
+ });
+ });
+
+ describe('when content is submitted', () => {
+ it('should format the content', () => {
+ findPublishToolbar().vm.$emit('submit', content);
+
+ expect(wrapper.emitted('submit')[0][0].content).toBe(`${content} format-pass format-pass`);
+ });
});
});
diff --git a/spec/frontend/static_site_editor/components/saved_changes_message_spec.js b/spec/frontend/static_site_editor/components/saved_changes_message_spec.js
deleted file mode 100644
index a63c3a83395..00000000000
--- a/spec/frontend/static_site_editor/components/saved_changes_message_spec.js
+++ /dev/null
@@ -1,56 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-
-import SavedChangesMessage from '~/static_site_editor/components/saved_changes_message.vue';
-
-import { returnUrl, savedContentMeta } from '../mock_data';
-
-describe('~/static_site_editor/components/saved_changes_message.vue', () => {
- let wrapper;
- const { branch, commit, mergeRequest } = savedContentMeta;
- const props = {
- branch,
- commit,
- mergeRequest,
- returnUrl,
- };
- const findReturnToSiteButton = () => wrapper.find({ ref: 'returnToSiteButton' });
- const findMergeRequestButton = () => wrapper.find({ ref: 'mergeRequestButton' });
- const findBranchLink = () => wrapper.find({ ref: 'branchLink' });
- const findCommitLink = () => wrapper.find({ ref: 'commitLink' });
- const findMergeRequestLink = () => wrapper.find({ ref: 'mergeRequestLink' });
-
- beforeEach(() => {
- wrapper = shallowMount(SavedChangesMessage, {
- propsData: props,
- });
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it.each`
- text | findEl | url
- ${'Return to site'} | ${findReturnToSiteButton} | ${props.returnUrl}
- ${'View merge request'} | ${findMergeRequestButton} | ${props.mergeRequest.url}
- `('renders "$text" button link', ({ text, findEl, url }) => {
- const btn = findEl();
-
- expect(btn.exists()).toBe(true);
- expect(btn.text()).toBe(text);
- expect(btn.attributes('href')).toBe(url);
- });
-
- it.each`
- desc | findEl | prop
- ${'branch'} | ${findBranchLink} | ${props.branch}
- ${'commit'} | ${findCommitLink} | ${props.commit}
- ${'merge request'} | ${findMergeRequestLink} | ${props.mergeRequest}
- `('renders $desc link', ({ findEl, prop }) => {
- const el = findEl();
-
- expect(el.exists()).toBe(true);
- expect(el.text()).toBe(prop.label);
- expect(el.attributes('href')).toBe(prop.url);
- });
-});
diff --git a/spec/frontend/static_site_editor/pages/home_spec.js b/spec/frontend/static_site_editor/pages/home_spec.js
index d3ee70785d1..c5473596df8 100644
--- a/spec/frontend/static_site_editor/pages/home_spec.js
+++ b/spec/frontend/static_site_editor/pages/home_spec.js
@@ -1,5 +1,6 @@
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import Home from '~/static_site_editor/pages/home.vue';
import SkeletonLoader from '~/static_site_editor/components/skeleton_loader.vue';
import EditArea from '~/static_site_editor/components/edit_area.vue';
@@ -7,7 +8,6 @@ import InvalidContentMessage from '~/static_site_editor/components/invalid_conte
import SubmitChangesError from '~/static_site_editor/components/submit_changes_error.vue';
import submitContentChangesMutation from '~/static_site_editor/graphql/mutations/submit_content_changes.mutation.graphql';
import { SUCCESS_ROUTE } from '~/static_site_editor/router/constants';
-import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import { TRACKING_ACTION_INITIALIZE_EDITOR } from '~/static_site_editor/constants';
import {
diff --git a/spec/frontend/static_site_editor/pages/success_spec.js b/spec/frontend/static_site_editor/pages/success_spec.js
index d62b67bfa83..3e19e2413e7 100644
--- a/spec/frontend/static_site_editor/pages/success_spec.js
+++ b/spec/frontend/static_site_editor/pages/success_spec.js
@@ -1,17 +1,12 @@
-import Vuex from 'vuex';
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
+import { GlEmptyState, GlButton } from '@gitlab/ui';
import Success from '~/static_site_editor/pages/success.vue';
-import SavedChangesMessage from '~/static_site_editor/components/saved_changes_message.vue';
-import { savedContentMeta, returnUrl } from '../mock_data';
+import { savedContentMeta, returnUrl, sourcePath } from '../mock_data';
import { HOME_ROUTE } from '~/static_site_editor/router/constants';
-const localVue = createLocalVue();
-
-localVue.use(Vuex);
-
describe('static_site_editor/pages/success', () => {
+ const mergeRequestsIllustrationPath = 'illustrations/merge_requests.svg';
let wrapper;
- let store;
let router;
const buildRouter = () => {
@@ -22,16 +17,22 @@ describe('static_site_editor/pages/success', () => {
const buildWrapper = (data = {}) => {
wrapper = shallowMount(Success, {
- localVue,
- store,
mocks: {
$router: router,
},
+ stubs: {
+ GlEmptyState,
+ GlButton,
+ },
+ propsData: {
+ mergeRequestsIllustrationPath,
+ },
data() {
return {
savedContentMeta,
appData: {
returnUrl,
+ sourcePath,
},
...data,
};
@@ -39,7 +40,8 @@ describe('static_site_editor/pages/success', () => {
});
};
- const findSavedChangesMessage = () => wrapper.find(SavedChangesMessage);
+ const findEmptyState = () => wrapper.find(GlEmptyState);
+ const findReturnUrlButton = () => wrapper.find(GlButton);
beforeEach(() => {
buildRouter();
@@ -50,29 +52,50 @@ describe('static_site_editor/pages/success', () => {
wrapper = null;
});
- it('renders saved changes message', () => {
+ it('renders empty state with a link to the created merge request', () => {
+ buildWrapper();
+
+ expect(findEmptyState().exists()).toBe(true);
+ expect(findEmptyState().props()).toMatchObject({
+ primaryButtonText: 'View merge request',
+ primaryButtonLink: savedContentMeta.mergeRequest.url,
+ title: 'Your merge request has been created',
+ svgPath: mergeRequestsIllustrationPath,
+ });
+ });
+
+ it('displays merge request instructions in the empty state', () => {
buildWrapper();
- expect(findSavedChangesMessage().exists()).toBe(true);
+ expect(findEmptyState().text()).toContain(
+ 'To see your changes live you will need to do the following things:',
+ );
+ expect(findEmptyState().text()).toContain('1. Add a clear title to describe the change.');
+ expect(findEmptyState().text()).toContain(
+ '2. Add a description to explain why the change is being made.',
+ );
+ expect(findEmptyState().text()).toContain(
+ '3. Assign a person to review and accept the merge request.',
+ );
});
- it('passes returnUrl to the saved changes message', () => {
+ it('displays return to site button', () => {
buildWrapper();
- expect(findSavedChangesMessage().props('returnUrl')).toBe(returnUrl);
+ expect(findReturnUrlButton().text()).toBe('Return to site');
+ expect(findReturnUrlButton().attributes().href).toBe(returnUrl);
});
- it('passes saved content metadata to the saved changes message', () => {
+ it('displays source path', () => {
buildWrapper();
- expect(findSavedChangesMessage().props('branch')).toBe(savedContentMeta.branch);
- expect(findSavedChangesMessage().props('commit')).toBe(savedContentMeta.commit);
- expect(findSavedChangesMessage().props('mergeRequest')).toBe(savedContentMeta.mergeRequest);
+ expect(wrapper.text()).toContain(`Update ${sourcePath} file`);
});
it('redirects to the HOME route when content has not been submitted', () => {
buildWrapper({ savedContentMeta: null });
expect(router.push).toHaveBeenCalledWith(HOME_ROUTE);
+ expect(wrapper.html()).toBe('');
});
});
diff --git a/spec/frontend/static_site_editor/services/formatter_spec.js b/spec/frontend/static_site_editor/services/formatter_spec.js
new file mode 100644
index 00000000000..b7600798db9
--- /dev/null
+++ b/spec/frontend/static_site_editor/services/formatter_spec.js
@@ -0,0 +1,26 @@
+import formatter from '~/static_site_editor/services/formatter';
+
+describe('formatter', () => {
+ const source = `Some text
+<br>
+
+And some more text
+
+
+<br>
+
+
+And even more text`;
+ const sourceWithoutBrTags = `Some text
+
+And some more text
+
+
+
+
+And even more text`;
+
+ it('removes extraneous <br> tags', () => {
+ expect(formatter(source)).toMatch(sourceWithoutBrTags);
+ });
+});
diff --git a/spec/frontend/static_site_editor/services/submit_content_changes_spec.js b/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
index a9169eb3e16..645ccedf7e7 100644
--- a/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
+++ b/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
@@ -1,6 +1,6 @@
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import Api from '~/api';
import { convertObjectPropsToSnakeCase } from '~/lib/utils/common_utils';
-import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import {
DEFAULT_TARGET_BRANCH,
diff --git a/spec/frontend/static_site_editor/services/templater_spec.js b/spec/frontend/static_site_editor/services/templater_spec.js
new file mode 100644
index 00000000000..1e7ae872b7e
--- /dev/null
+++ b/spec/frontend/static_site_editor/services/templater_spec.js
@@ -0,0 +1,104 @@
+/* eslint-disable no-useless-escape */
+import templater from '~/static_site_editor/services/templater';
+
+describe('templater', () => {
+ const source = `Below this line is a simple ERB (single-line erb block) example.
+
+<% some erb code %>
+
+Below this line is a complex ERB (multi-line erb block) example.
+
+<% if apptype.maturity && (apptype.maturity != "planned") %>
+ <% maturity = "This application type is at the \"#{apptype.maturity}\" level of maturity." %>
+<% end %>
+
+Below this line is a non-erb (single-line HTML) markup example that also has erb.
+
+<a href="<%= compensation_roadmap.role_path %>"><%= compensation_roadmap.role_path %></a>
+
+Below this line is a non-erb (multi-line HTML block) markup example that also has erb.
+
+<ul>
+<% compensation_roadmap.recommendation.recommendations.each do |recommendation| %>
+ <li><%= recommendation %></li>
+<% end %>
+</ul>
+
+Below this line is a block of HTML.
+
+<div>
+ <h1>Heading</h1>
+ <p>Some paragraph...</p>
+</div>
+
+Below this line is a codeblock of the same HTML that should be ignored and preserved.
+
+\`\`\` html
+<div>
+ <h1>Heading</h1>
+ <p>Some paragraph...</p>
+</div>
+\`\`\`
+`;
+ const sourceTemplated = `Below this line is a simple ERB (single-line erb block) example.
+
+\`\`\` sse
+<% some erb code %>
+\`\`\`
+
+Below this line is a complex ERB (multi-line erb block) example.
+
+\`\`\` sse
+<% if apptype.maturity && (apptype.maturity != "planned") %>
+ <% maturity = "This application type is at the \"#{apptype.maturity}\" level of maturity." %>
+<% end %>
+\`\`\`
+
+Below this line is a non-erb (single-line HTML) markup example that also has erb.
+
+\`\`\` sse
+<a href="<%= compensation_roadmap.role_path %>"><%= compensation_roadmap.role_path %></a>
+\`\`\`
+
+Below this line is a non-erb (multi-line HTML block) markup example that also has erb.
+
+\`\`\` sse
+<ul>
+<% compensation_roadmap.recommendation.recommendations.each do |recommendation| %>
+ <li><%= recommendation %></li>
+<% end %>
+</ul>
+\`\`\`
+
+Below this line is a block of HTML.
+
+\`\`\` sse
+<div>
+ <h1>Heading</h1>
+ <p>Some paragraph...</p>
+</div>
+\`\`\`
+
+Below this line is a codeblock of the same HTML that should be ignored and preserved.
+
+\`\`\` html
+<div>
+ <h1>Heading</h1>
+ <p>Some paragraph...</p>
+</div>
+\`\`\`
+`;
+
+ it.each`
+ fn | initial | target
+ ${'wrap'} | ${source} | ${sourceTemplated}
+ ${'wrap'} | ${sourceTemplated} | ${sourceTemplated}
+ ${'unwrap'} | ${sourceTemplated} | ${source}
+ ${'unwrap'} | ${source} | ${source}
+ `(
+ 'wraps $initial in a templated sse codeblocks if $fn is wrap, unwraps otherwise',
+ ({ fn, initial, target }) => {
+ expect(templater[fn](initial)).toMatch(target);
+ },
+ );
+});
diff --git a/spec/frontend/test_setup.js b/spec/frontend/test_setup.js
index 49eae715a45..544c19da57b 100644
--- a/spec/frontend/test_setup.js
+++ b/spec/frontend/test_setup.js
@@ -24,7 +24,7 @@ afterEach(() =>
}),
);
-initializeTestTimeout(process.env.CI ? 5000 : 500);
+initializeTestTimeout(process.env.CI ? 6000 : 500);
Vue.config.devtools = false;
Vue.config.productionTip = false;
diff --git a/spec/frontend/vue_alerts_spec.js b/spec/frontend/vue_alerts_spec.js
index b2ee6f895a8..b52737e6106 100644
--- a/spec/frontend/vue_alerts_spec.js
+++ b/spec/frontend/vue_alerts_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
-import initVueAlerts from '~/vue_alerts';
import { setHTMLFixture } from 'helpers/fixtures';
import { TEST_HOST } from 'helpers/test_constants';
+import initVueAlerts from '~/vue_alerts';
describe('VueAlerts', () => {
const alerts = [
diff --git a/spec/frontend/vue_mr_widget/components/approvals/approvals_spec.js b/spec/frontend/vue_mr_widget/components/approvals/approvals_spec.js
index e39f66d3f30..65ca3639dcc 100644
--- a/spec/frontend/vue_mr_widget/components/approvals/approvals_spec.js
+++ b/spec/frontend/vue_mr_widget/components/approvals/approvals_spec.js
@@ -3,7 +3,7 @@ import { GlButton } from '@gitlab/ui';
import Approvals from '~/vue_merge_request_widget/components/approvals/approvals.vue';
import ApprovalsSummary from '~/vue_merge_request_widget/components/approvals/approvals_summary.vue';
import ApprovalsSummaryOptional from '~/vue_merge_request_widget/components/approvals/approvals_summary_optional.vue';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import {
FETCH_LOADING,
FETCH_ERROR,
diff --git a/spec/frontend/vue_mr_widget/components/approvals/approvals_summary_optional_spec.js b/spec/frontend/vue_mr_widget/components/approvals/approvals_summary_optional_spec.js
index 77fad7f51ab..d9a5230f55f 100644
--- a/spec/frontend/vue_mr_widget/components/approvals/approvals_summary_optional_spec.js
+++ b/spec/frontend/vue_mr_widget/components/approvals/approvals_summary_optional_spec.js
@@ -1,9 +1,5 @@
import { shallowMount } from '@vue/test-utils';
import { GlLink } from '@gitlab/ui';
-import {
- OPTIONAL,
- OPTIONAL_CAN_APPROVE,
-} from '~/vue_merge_request_widget/components/approvals/messages';
import ApprovalsSummaryOptional from '~/vue_merge_request_widget/components/approvals/approvals_summary_optional.vue';
const TEST_HELP_PATH = 'help/path';
@@ -29,10 +25,6 @@ describe('MRWidget approvals summary optional', () => {
createComponent({ canApprove: true, helpPath: TEST_HELP_PATH });
});
- it('shows optional can approve message', () => {
- expect(wrapper.text()).toEqual(OPTIONAL_CAN_APPROVE);
- });
-
it('shows help link', () => {
const link = findHelpLink();
@@ -46,10 +38,6 @@ describe('MRWidget approvals summary optional', () => {
createComponent({ canApprove: false, helpPath: TEST_HELP_PATH });
});
- it('shows optional message', () => {
- expect(wrapper.text()).toEqual(OPTIONAL);
- });
-
it('does not show help link', () => {
expect(findHelpLink().exists()).toBe(false);
});
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js
index 5f3a8654990..d67f1adadf2 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js
@@ -1,9 +1,9 @@
import { mount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
import MrWidgetPipelineContainer from '~/vue_merge_request_widget/components/mr_widget_pipeline_container.vue';
import MrWidgetPipeline from '~/vue_merge_request_widget/components/mr_widget_pipeline.vue';
import ArtifactsApp from '~/vue_merge_request_widget/components/artifacts_list_app.vue';
import { mockStore } from '../mock_data';
-import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
describe('MrWidgetPipelineContainer', () => {
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js
index d6c996f7501..8fcc982ac99 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js
@@ -1,119 +1,156 @@
-import { mount } from '@vue/test-utils';
+import { mount, shallowMount } from '@vue/test-utils';
import { GlLink, GlSprintf } from '@gitlab/ui';
+import { mockTracking, triggerEvent, unmockTracking } from 'helpers/tracking_helper';
+import MockAdapter from 'axios-mock-adapter';
import suggestPipelineComponent from '~/vue_merge_request_widget/components/mr_widget_suggest_pipeline.vue';
import MrWidgetIcon from '~/vue_merge_request_widget/components/mr_widget_icon.vue';
-import { mockTracking, triggerEvent, unmockTracking } from 'helpers/tracking_helper';
-import { popoverProps, iconName } from './pipeline_tour_mock_data';
+import dismissibleContainer from '~/vue_shared/components/dismissible_container.vue';
+import { suggestProps, iconName } from './pipeline_tour_mock_data';
+import axios from '~/lib/utils/axios_utils';
+import {
+ SP_TRACK_LABEL,
+ SP_LINK_TRACK_EVENT,
+ SP_SHOW_TRACK_EVENT,
+ SP_LINK_TRACK_VALUE,
+ SP_SHOW_TRACK_VALUE,
+ SP_HELP_URL,
+} from '~/vue_merge_request_widget/constants';
describe('MRWidgetSuggestPipeline', () => {
- let wrapper;
- let trackingSpy;
-
- const mockTrackingOnWrapper = () => {
- unmockTracking();
- trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
- };
-
- beforeEach(() => {
- document.body.dataset.page = 'projects:merge_requests:show';
- trackingSpy = mockTracking('_category_', undefined, jest.spyOn);
-
- wrapper = mount(suggestPipelineComponent, {
- propsData: popoverProps,
- stubs: {
- GlSprintf,
- },
+ describe('template', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
});
- });
- afterEach(() => {
- wrapper.destroy();
- unmockTracking();
- });
+ describe('core functionality', () => {
+ const findOkBtn = () => wrapper.find('[data-testid="ok"]');
+ let trackingSpy;
+ let mockAxios;
+
+ const mockTrackingOnWrapper = () => {
+ unmockTracking();
+ trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
+ };
+
+ beforeEach(() => {
+ mockAxios = new MockAdapter(axios);
+ document.body.dataset.page = 'projects:merge_requests:show';
+ trackingSpy = mockTracking('_category_', undefined, jest.spyOn);
+
+ wrapper = mount(suggestPipelineComponent, {
+ propsData: suggestProps,
+ stubs: {
+ GlSprintf,
+ },
+ });
+ });
- describe('template', () => {
- const findOkBtn = () => wrapper.find('[data-testid="ok"]');
+ afterEach(() => {
+ unmockTracking();
+ mockAxios.restore();
+ });
- it('renders add pipeline file link', () => {
- const link = wrapper.find(GlLink);
+ it('renders add pipeline file link', () => {
+ const link = wrapper.find(GlLink);
- expect(link.exists()).toBe(true);
- expect(link.attributes().href).toBe(popoverProps.pipelinePath);
- });
+ expect(link.exists()).toBe(true);
+ expect(link.attributes().href).toBe(suggestProps.pipelinePath);
+ });
- it('renders the expected text', () => {
- const messageText = /\s*No pipeline\s*Add the .gitlab-ci.yml file\s*to create one./;
+ it('renders the expected text', () => {
+ const messageText = /\s*No pipeline\s*Add the .gitlab-ci.yml file\s*to create one./;
- expect(wrapper.text()).toMatch(messageText);
- });
+ expect(wrapper.text()).toMatch(messageText);
+ });
- it('renders widget icon', () => {
- const icon = wrapper.find(MrWidgetIcon);
+ it('renders widget icon', () => {
+ const icon = wrapper.find(MrWidgetIcon);
- expect(icon.exists()).toBe(true);
- expect(icon.props()).toEqual(
- expect.objectContaining({
- name: iconName,
- }),
- );
- });
+ expect(icon.exists()).toBe(true);
+ expect(icon.props()).toEqual(
+ expect.objectContaining({
+ name: iconName,
+ }),
+ );
+ });
- it('renders the show me how button', () => {
- const button = findOkBtn();
+ it('renders the show me how button', () => {
+ const button = findOkBtn();
- expect(button.exists()).toBe(true);
- expect(button.classes('btn-info')).toEqual(true);
- expect(button.attributes('href')).toBe(popoverProps.pipelinePath);
- });
+ expect(button.exists()).toBe(true);
+ expect(button.classes('btn-info')).toEqual(true);
+ expect(button.attributes('href')).toBe(suggestProps.pipelinePath);
+ });
- it('renders the help link', () => {
- const link = wrapper.find('[data-testid="help"]');
+ it('renders the help link', () => {
+ const link = wrapper.find('[data-testid="help"]');
- expect(link.exists()).toBe(true);
- expect(link.attributes('href')).toBe(wrapper.vm.$options.helpURL);
- });
+ expect(link.exists()).toBe(true);
+ expect(link.attributes('href')).toBe(SP_HELP_URL);
+ });
- it('renders the empty pipelines image', () => {
- const image = wrapper.find('[data-testid="pipeline-image"]');
+ it('renders the empty pipelines image', () => {
+ const image = wrapper.find('[data-testid="pipeline-image"]');
- expect(image.exists()).toBe(true);
- expect(image.attributes().src).toBe(popoverProps.pipelineSvgPath);
- });
+ expect(image.exists()).toBe(true);
+ expect(image.attributes().src).toBe(suggestProps.pipelineSvgPath);
+ });
- describe('tracking', () => {
- it('send event for basic view of the suggest pipeline widget', () => {
- const expectedCategory = undefined;
- const expectedAction = undefined;
+ describe('tracking', () => {
+ it('send event for basic view of the suggest pipeline widget', () => {
+ const expectedCategory = undefined;
+ const expectedAction = undefined;
- expect(trackingSpy).toHaveBeenCalledWith(expectedCategory, expectedAction, {
- label: wrapper.vm.$options.trackLabel,
- property: popoverProps.humanAccess,
+ expect(trackingSpy).toHaveBeenCalledWith(expectedCategory, expectedAction, {
+ label: SP_TRACK_LABEL,
+ property: suggestProps.humanAccess,
+ });
});
- });
- it('send an event when add pipeline link is clicked', () => {
- mockTrackingOnWrapper();
- const link = wrapper.find('[data-testid="add-pipeline-link"]');
- triggerEvent(link.element);
+ it('send an event when add pipeline link is clicked', () => {
+ mockTrackingOnWrapper();
+ const link = wrapper.find('[data-testid="add-pipeline-link"]');
+ triggerEvent(link.element);
- expect(trackingSpy).toHaveBeenCalledWith('_category_', 'click_link', {
- label: wrapper.vm.$options.trackLabel,
- property: popoverProps.humanAccess,
- value: '30',
+ expect(trackingSpy).toHaveBeenCalledWith('_category_', SP_LINK_TRACK_EVENT, {
+ label: SP_TRACK_LABEL,
+ property: suggestProps.humanAccess,
+ value: SP_LINK_TRACK_VALUE.toString(),
+ });
});
- });
- it('send an event when ok button is clicked', () => {
- mockTrackingOnWrapper();
- const okBtn = findOkBtn();
- triggerEvent(okBtn.element);
+ it('send an event when ok button is clicked', () => {
+ mockTrackingOnWrapper();
+ const okBtn = findOkBtn();
+ triggerEvent(okBtn.element);
- expect(trackingSpy).toHaveBeenCalledWith('_category_', 'click_button', {
- label: wrapper.vm.$options.trackLabel,
- property: popoverProps.humanAccess,
- value: '10',
+ expect(trackingSpy).toHaveBeenCalledWith('_category_', SP_SHOW_TRACK_EVENT, {
+ label: SP_TRACK_LABEL,
+ property: suggestProps.humanAccess,
+ value: SP_SHOW_TRACK_VALUE.toString(),
+ });
});
});
});
+
+ describe('dismissible', () => {
+ const findDismissContainer = () => wrapper.find(dismissibleContainer);
+
+ beforeEach(() => {
+ wrapper = shallowMount(suggestPipelineComponent, { propsData: suggestProps });
+ });
+
+ it('renders the dismissal container', () => {
+ expect(findDismissContainer().exists()).toBe(true);
+ });
+
+ it('emits dismiss upon dismissal button click', () => {
+ findDismissContainer().vm.$emit('dismiss');
+
+ expect(wrapper.emitted().dismiss).toBeTruthy();
+ });
+ });
});
});
diff --git a/spec/frontend/vue_mr_widget/components/pipeline_tour_mock_data.js b/spec/frontend/vue_mr_widget/components/pipeline_tour_mock_data.js
index c749c434079..eef087d62b8 100644
--- a/spec/frontend/vue_mr_widget/components/pipeline_tour_mock_data.js
+++ b/spec/frontend/vue_mr_widget/components/pipeline_tour_mock_data.js
@@ -1,7 +1,9 @@
-export const popoverProps = {
+export const suggestProps = {
pipelinePath: '/foo/bar/add/pipeline/path',
pipelineSvgPath: 'assets/illustrations/something.svg',
humanAccess: 'maintainer',
+ userCalloutsPath: 'some/callout/path',
+ userCalloutFeatureId: 'suggest_pipeline',
};
export const iconName = 'status_notfound';
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js
index 56832f82b05..5c7e6a87c16 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDropdownItem } from '@gitlab/ui';
+import { GlDeprecatedDropdownItem } from '@gitlab/ui';
import CommitMessageDropdown from '~/vue_merge_request_widget/components/states/commit_message_dropdown.vue';
const commits = [
@@ -39,7 +39,7 @@ describe('Commits message dropdown component', () => {
wrapper.destroy();
});
- const findDropdownElements = () => wrapper.findAll(GlDropdownItem);
+ const findDropdownElements = () => wrapper.findAll(GlDeprecatedDropdownItem);
const findFirstDropdownElement = () => findDropdownElements().at(0);
it('should have 3 elements in dropdown list', () => {
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
index d3482b457ad..c3a16a776a7 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
@@ -1,8 +1,8 @@
import $ from 'jquery';
import { createLocalVue, shallowMount } from '@vue/test-utils';
import { removeBreakLine } from 'helpers/text_helper';
-import ConflictsComponent from '~/vue_merge_request_widget/components/states/mr_widget_conflicts.vue';
import { TEST_HOST } from 'helpers/test_constants';
+import ConflictsComponent from '~/vue_merge_request_widget/components/states/mr_widget_conflicts.vue';
describe('MRWidgetConflicts', () => {
let vm;
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js
index 1542b0939aa..4c213899dbd 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js
@@ -79,7 +79,7 @@ describe('Squash before merge component', () => {
});
it(expectation, () => {
- expect(findLabel().classes('gl-text-gray-600')).toBe(isDisabled);
+ expect(findLabel().classes('gl-text-gray-400')).toBe(isDisabled);
});
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js
index 33e52f4fd36..a5531577a8c 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js
@@ -1,46 +1,68 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import UnresolvedDiscussions from '~/vue_merge_request_widget/components/states/unresolved_discussions.vue';
+import { mount } from '@vue/test-utils';
import { TEST_HOST } from 'helpers/test_constants';
+import UnresolvedDiscussions from '~/vue_merge_request_widget/components/states/unresolved_discussions.vue';
+import notesEventHub from '~/notes/event_hub';
+
+function createComponent({ path = '' } = {}) {
+ return mount(UnresolvedDiscussions, {
+ propsData: {
+ mr: {
+ createIssueToResolveDiscussionsPath: path,
+ },
+ },
+ });
+}
describe('UnresolvedDiscussions', () => {
- const Component = Vue.extend(UnresolvedDiscussions);
- let vm;
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
+ });
+
+ it('triggers the correct notes event when the jump to first unresolved discussion button is clicked', () => {
+ jest.spyOn(notesEventHub, '$emit');
+
+ wrapper.find('[data-testid="jump-to-first"]').trigger('click');
+
+ expect(notesEventHub.$emit).toHaveBeenCalledWith('jumpToFirstUnresolvedDiscussion');
});
describe('with threads path', () => {
beforeEach(() => {
- vm = mountComponent(Component, {
- mr: {
- createIssueToResolveDiscussionsPath: TEST_HOST,
- },
- });
+ wrapper = createComponent({ path: TEST_HOST });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
});
it('should have correct elements', () => {
- expect(vm.$el.innerText).toContain(
- 'There are unresolved threads. Please resolve these threads',
+ expect(wrapper.element.innerText).toContain(
+ `Before this can be merged, one or more threads must be resolved.`,
);
- expect(vm.$el.innerText).toContain('Create an issue to resolve them later');
- expect(vm.$el.querySelector('.js-create-issue').getAttribute('href')).toEqual(TEST_HOST);
+ expect(wrapper.element.innerText).toContain('Jump to first unresolved thread');
+ expect(wrapper.element.innerText).toContain('Resolve all threads in new issue');
+ expect(wrapper.element.querySelector('.js-create-issue').getAttribute('href')).toEqual(
+ TEST_HOST,
+ );
});
});
describe('without threads path', () => {
- beforeEach(() => {
- vm = mountComponent(Component, { mr: {} });
- });
-
it('should not show create issue link if user cannot create issue', () => {
- expect(vm.$el.innerText).toContain(
- 'There are unresolved threads. Please resolve these threads',
+ expect(wrapper.element.innerText).toContain(
+ `Before this can be merged, one or more threads must be resolved.`,
);
- expect(vm.$el.querySelector('.js-create-issue')).toEqual(null);
+ expect(wrapper.element.innerText).toContain('Jump to first unresolved thread');
+ expect(wrapper.element.innerText).not.toContain('Resolve all threads in new issue');
+ expect(wrapper.element.querySelector('.js-create-issue')).toEqual(null);
});
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js
index 6fa555b4fc4..6ccf1e1f56b 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
import WorkInProgress from '~/vue_merge_request_widget/components/states/work_in_progress.vue';
import eventHub from '~/vue_merge_request_widget/event_hub';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
jest.mock('~/flash');
@@ -84,11 +84,11 @@ describe('Wip', () => {
it('should have correct elements', () => {
expect(el.classList.contains('mr-widget-body')).toBeTruthy();
- expect(el.innerText).toContain('This is a Work in Progress');
+ expect(el.innerText).toContain('This merge request is still a work in progress.');
expect(el.querySelector('button').getAttribute('disabled')).toBeTruthy();
expect(el.querySelector('button').innerText).toContain('Merge');
expect(el.querySelector('.js-remove-wip').innerText.replace(/\s\s+/g, ' ')).toContain(
- 'Resolve WIP status',
+ 'Mark as ready',
);
});
diff --git a/spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js b/spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js
index be43f10c03e..ffcf9b1477a 100644
--- a/spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js
+++ b/spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js
@@ -1,8 +1,8 @@
import { GlSkeletonLoading, GlSprintf } from '@gitlab/ui';
-import { invalidPlanWithName, plans, validPlanWithName } from './mock_data';
import { shallowMount } from '@vue/test-utils';
-import axios from '~/lib/utils/axios_utils';
import MockAdapter from 'axios-mock-adapter';
+import { invalidPlanWithName, plans, validPlanWithName } from './mock_data';
+import axios from '~/lib/utils/axios_utils';
import MrWidgetExpanableSection from '~/vue_merge_request_widget/components/mr_widget_expandable_section.vue';
import MrWidgetTerraformContainer from '~/vue_merge_request_widget/components/terraform/mr_widget_terraform_container.vue';
import Poll from '~/lib/utils/poll';
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js b/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js
index 6449272e6ed..1711efb5512 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js
+++ b/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js
@@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import { visitUrl } from '~/lib/utils/url_utility';
import MRWidgetService from '~/vue_merge_request_widget/services/mr_widget_service';
import DeploymentActions from '~/vue_merge_request_widget/components/deployment/deployment_actions.vue';
diff --git a/spec/frontend/vue_mr_widget/mock_data.js b/spec/frontend/vue_mr_widget/mock_data.js
index e00456a78b5..d64a7f88b6b 100644
--- a/spec/frontend/vue_mr_widget/mock_data.js
+++ b/spec/frontend/vue_mr_widget/mock_data.js
@@ -37,6 +37,9 @@ export default {
target_project_id: 19,
target_project_full_path: '/group2/project2',
merge_request_add_ci_config_path: '/group2/project2/new/pipeline',
+ is_dismissed_suggest_pipeline: false,
+ user_callouts_path: 'some/callout/path',
+ suggest_pipeline_feature_id: 'suggest_pipeline',
new_project_pipeline_path: '/group2/project2/pipelines/new',
metrics: {
merged_by: {
diff --git a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
index 93659fa54fb..0bbe040d031 100644
--- a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
+++ b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
@@ -62,6 +62,9 @@ describe('mrWidgetOptions', () => {
return axios.waitForAll();
};
+ const findSuggestPipeline = () => vm.$el.querySelector('[data-testid="mr-suggest-pipeline"]');
+ const findSuggestPipelineButton = () => findSuggestPipeline().querySelector('button');
+
describe('default', () => {
beforeEach(() => {
return createComponent();
@@ -804,42 +807,48 @@ describe('mrWidgetOptions', () => {
});
});
- it('should not suggest pipelines', () => {
- vm.mr.mergeRequestAddCiConfigPath = null;
-
- expect(vm.shouldSuggestPipelines).toBeFalsy();
+ it('should not suggest pipelines when feature flag is not present', () => {
+ expect(findSuggestPipeline()).toBeNull();
});
});
describe('given suggestPipeline feature flag is enabled', () => {
beforeEach(() => {
+ mock.onAny().reply(200);
+
// This is needed because some grandchildren Bootstrap components throw warnings
// https://gitlab.com/gitlab-org/gitlab/issues/208458
jest.spyOn(console, 'warn').mockImplementation();
gon.features = { suggestPipeline: true };
- return createComponent();
- });
- it('should suggest pipelines when none exist', () => {
- vm.mr.mergeRequestAddCiConfigPath = 'some/path';
+ createComponent();
+
vm.mr.hasCI = false;
+ });
- expect(vm.shouldSuggestPipelines).toBeTruthy();
+ it('should suggest pipelines when none exist', () => {
+ expect(findSuggestPipeline()).toEqual(expect.any(Element));
});
- it('should not suggest pipelines when they exist', () => {
- vm.mr.mergeRequestAddCiConfigPath = null;
- vm.mr.hasCI = false;
+ it.each([
+ { isDismissedSuggestPipeline: true },
+ { mergeRequestAddCiConfigPath: null },
+ { hasCI: true },
+ ])('with %s, should not suggest pipeline', async obj => {
+ Object.assign(vm.mr, obj);
+
+ await vm.$nextTick();
- expect(vm.shouldSuggestPipelines).toBeFalsy();
+ expect(findSuggestPipeline()).toBeNull();
});
- it('should not suggest pipelines hasCI is true', () => {
- vm.mr.mergeRequestAddCiConfigPath = 'some/path';
- vm.mr.hasCI = true;
+ it('should allow dismiss of the suggest pipeline message', async () => {
+ findSuggestPipelineButton().click();
+
+ await vm.$nextTick();
- expect(vm.shouldSuggestPipelines).toBeFalsy();
+ expect(findSuggestPipeline()).toBeNull();
});
});
});
diff --git a/spec/frontend/vue_mr_widget/stores/get_state_key_spec.js b/spec/frontend/vue_mr_widget/stores/get_state_key_spec.js
index 1cb2c6c669b..128e0f39c41 100644
--- a/spec/frontend/vue_mr_widget/stores/get_state_key_spec.js
+++ b/spec/frontend/vue_mr_widget/stores/get_state_key_spec.js
@@ -11,15 +11,13 @@ describe('getStateKey', () => {
hasMergeableDiscussionsState: false,
isPipelineBlocked: false,
canBeMerged: false,
+ projectArchived: false,
+ branchMissing: false,
+ commitsCount: 2,
+ hasConflicts: false,
+ workInProgress: false,
};
- const data = {
- project_archived: false,
- branch_missing: false,
- commits_count: 2,
- has_conflicts: false,
- work_in_progress: false,
- };
- const bound = getStateKey.bind(context, data);
+ const bound = getStateKey.bind(context);
expect(bound()).toEqual(null);
@@ -49,7 +47,7 @@ describe('getStateKey', () => {
expect(bound()).toEqual('unresolvedDiscussions');
- data.work_in_progress = true;
+ context.workInProgress = true;
expect(bound()).toEqual('workInProgress');
@@ -62,7 +60,7 @@ describe('getStateKey', () => {
expect(bound()).toEqual('rebase');
- data.has_conflicts = true;
+ context.hasConflicts = true;
expect(bound()).toEqual('conflicts');
@@ -70,15 +68,15 @@ describe('getStateKey', () => {
expect(bound()).toEqual('checking');
- data.commits_count = 0;
+ context.commitsCount = 0;
expect(bound()).toEqual('nothingToMerge');
- data.branch_missing = true;
+ context.branchMissing = true;
expect(bound()).toEqual('missingBranch');
- data.project_archived = true;
+ context.projectArchived = true;
expect(bound()).toEqual('archived');
});
@@ -94,15 +92,13 @@ describe('getStateKey', () => {
isPipelineBlocked: false,
canBeMerged: false,
shouldBeRebased: true,
+ projectArchived: false,
+ branchMissing: false,
+ commitsCount: 2,
+ hasConflicts: false,
+ workInProgress: false,
};
- const data = {
- project_archived: false,
- branch_missing: false,
- commits_count: 2,
- has_conflicts: false,
- work_in_progress: false,
- };
- const bound = getStateKey.bind(context, data);
+ const bound = getStateKey.bind(context);
expect(bound()).toEqual('rebase');
});
@@ -115,15 +111,11 @@ describe('getStateKey', () => {
`(
'returns $stateKey when canMerge is $canMerge and isSHAMismatch is $isSHAMismatch',
({ canMerge, isSHAMismatch, stateKey }) => {
- const bound = getStateKey.bind(
- {
- canMerge,
- isSHAMismatch,
- },
- {
- commits_count: 2,
- },
- );
+ const bound = getStateKey.bind({
+ canMerge,
+ isSHAMismatch,
+ commitsCount: 2,
+ });
expect(bound()).toEqual(stateKey);
},
diff --git a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
index 408f9d57147..e84eb7789d3 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
@@ -4,6 +4,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
<gl-new-dropdown-stub
category="primary"
headertext=""
+ right=""
size="medium"
text="Clone"
variant="info"
@@ -38,7 +39,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
tag="div"
>
<gl-button-stub
- category="tertiary"
+ category="primary"
class="d-inline-flex"
data-clipboard-text="ssh://foo.bar"
data-qa-selector="copy_ssh_url_button"
@@ -79,7 +80,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
tag="div"
>
<gl-button-stub
- category="tertiary"
+ category="primary"
class="d-inline-flex"
data-clipboard-text="http://foo.bar"
data-qa-selector="copy_http_url_button"
diff --git a/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap
index 1f54405928b..cd4728baeaa 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/expand_button_spec.js.snap
@@ -4,20 +4,22 @@ exports[`Expand button on click when short text is provided renders button after
<span>
<button
aria-label="Click to expand text"
- class="btn js-text-expander-prepend text-expander btn-blank btn-secondary btn-md"
+ class="btn js-text-expander-prepend text-expander btn-blank btn-default btn-md btn-icon button-ellipsis-horizontal gl-button"
style="display: none;"
type="button"
>
<!---->
<svg
- aria-hidden="true"
- class="s12 ic-ellipsis_h"
+ class="gl-icon s16"
+ data-testid="ellipsis_h-icon"
>
<use
- xlink:href="#ellipsis_h"
+ href="#ellipsis_h"
/>
</svg>
+
+ <!---->
</button>
<!---->
@@ -30,20 +32,22 @@ exports[`Expand button on click when short text is provided renders button after
<button
aria-label="Click to expand text"
- class="btn js-text-expander-append text-expander btn-blank btn-secondary btn-md"
+ class="btn js-text-expander-append text-expander btn-blank btn-default btn-md btn-icon button-ellipsis-horizontal gl-button"
style=""
type="button"
>
<!---->
<svg
- aria-hidden="true"
- class="s12 ic-ellipsis_h"
+ class="gl-icon s16"
+ data-testid="ellipsis_h-icon"
>
<use
- xlink:href="#ellipsis_h"
+ href="#ellipsis_h"
/>
</svg>
+
+ <!---->
</button>
</span>
`;
@@ -52,19 +56,21 @@ exports[`Expand button when short text is provided renders button before text 1`
<span>
<button
aria-label="Click to expand text"
- class="btn js-text-expander-prepend text-expander btn-blank btn-secondary btn-md"
+ class="btn js-text-expander-prepend text-expander btn-blank btn-default btn-md btn-icon button-ellipsis-horizontal gl-button"
type="button"
>
<!---->
<svg
- aria-hidden="true"
- class="s12 ic-ellipsis_h"
+ class="gl-icon s16"
+ data-testid="ellipsis_h-icon"
>
<use
- xlink:href="#ellipsis_h"
+ href="#ellipsis_h"
/>
</svg>
+
+ <!---->
</button>
<span>
@@ -77,20 +83,22 @@ exports[`Expand button when short text is provided renders button before text 1`
<button
aria-label="Click to expand text"
- class="btn js-text-expander-append text-expander btn-blank btn-secondary btn-md"
+ class="btn js-text-expander-append text-expander btn-blank btn-default btn-md btn-icon button-ellipsis-horizontal gl-button"
style="display: none;"
type="button"
>
<!---->
<svg
- aria-hidden="true"
- class="s12 ic-ellipsis_h"
+ class="gl-icon s16"
+ data-testid="ellipsis_h-icon"
>
<use
- xlink:href="#ellipsis_h"
+ href="#ellipsis_h"
/>
</svg>
+
+ <!---->
</button>
</span>
`;
diff --git a/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
index 74f71c23d02..fcb9c4b8b02 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/split_button_spec.js.snap
@@ -1,13 +1,13 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`SplitButton renders actionItems 1`] = `
-<gl-dropdown-stub
+<gl-deprecated-dropdown-stub
menu-class="dropdown-menu-selectable "
split="true"
text="professor"
variant="secondary"
>
- <gl-dropdown-item-stub
+ <gl-deprecated-dropdown-item-stub
active="true"
active-class="is-active"
>
@@ -18,10 +18,10 @@ exports[`SplitButton renders actionItems 1`] = `
<div>
very symphonic
</div>
- </gl-dropdown-item-stub>
+ </gl-deprecated-dropdown-item-stub>
- <gl-dropdown-divider-stub />
- <gl-dropdown-item-stub
+ <gl-deprecated-dropdown-divider-stub />
+ <gl-deprecated-dropdown-item-stub
active-class="is-active"
>
<strong>
@@ -31,8 +31,8 @@ exports[`SplitButton renders actionItems 1`] = `
<div>
warp drive
</div>
- </gl-dropdown-item-stub>
+ </gl-deprecated-dropdown-item-stub>
<!---->
-</gl-dropdown-stub>
+</gl-deprecated-dropdown-stub>
`;
diff --git a/spec/frontend/vue_shared/components/clone_dropdown_spec.js b/spec/frontend/vue_shared/components/clone_dropdown_spec.js
index 38e0cadfe83..d9829874b93 100644
--- a/spec/frontend/vue_shared/components/clone_dropdown_spec.js
+++ b/spec/frontend/vue_shared/components/clone_dropdown_spec.js
@@ -1,6 +1,6 @@
-import CloneDropdown from '~/vue_shared/components/clone_dropdown.vue';
import { shallowMount } from '@vue/test-utils';
import { GlFormInputGroup, GlNewDropdownHeader } from '@gitlab/ui';
+import CloneDropdown from '~/vue_shared/components/clone_dropdown.vue';
describe('Clone Dropdown Button', () => {
let wrapper;
diff --git a/spec/frontend/vue_shared/components/content_viewer/viewers/markdown_viewer_spec.js b/spec/frontend/vue_shared/components/content_viewer/viewers/markdown_viewer_spec.js
index 8d3fcdd48d2..c75891c9ed3 100644
--- a/spec/frontend/vue_shared/components/content_viewer/viewers/markdown_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/content_viewer/viewers/markdown_viewer_spec.js
@@ -1,8 +1,8 @@
import $ from 'jquery';
-import axios from '~/lib/utils/axios_utils';
import MockAdapter from 'axios-mock-adapter';
import { mount } from '@vue/test-utils';
import waitForPromises from 'helpers/wait_for_promises';
+import axios from '~/lib/utils/axios_utils';
import MarkdownViewer from '~/vue_shared/components/content_viewer/viewers/markdown_viewer.vue';
describe('MarkdownViewer', () => {
diff --git a/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_spec.js b/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_spec.js
index ceea8d2fa92..223e22d650b 100644
--- a/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_spec.js
+++ b/spec/frontend/vue_shared/components/date_time_picker/date_time_picker_spec.js
@@ -13,9 +13,9 @@ describe('DateTimePicker', () => {
const dropdownToggle = () => wrapper.find('.dropdown-toggle');
const dropdownMenu = () => wrapper.find('.dropdown-menu');
+ const cancelButton = () => wrapper.find('[data-testid="cancelButton"]');
const applyButtonElement = () => wrapper.find('button.btn-success').element;
const findQuickRangeItems = () => wrapper.findAll('.dropdown-item');
- const cancelButtonElement = () => wrapper.find('button.btn-secondary').element;
const createComponent = props => {
wrapper = mount(DateTimePicker, {
@@ -260,7 +260,7 @@ describe('DateTimePicker', () => {
dropdownToggle().trigger('click');
return wrapper.vm.$nextTick(() => {
- cancelButtonElement().click();
+ cancelButton().trigger('click');
return wrapper.vm.$nextTick(() => {
expect(dropdownMenu().classes('show')).toBe(false);
diff --git a/spec/frontend/vue_shared/components/dismissible_container_spec.js b/spec/frontend/vue_shared/components/dismissible_container_spec.js
new file mode 100644
index 00000000000..e49ca1e2285
--- /dev/null
+++ b/spec/frontend/vue_shared/components/dismissible_container_spec.js
@@ -0,0 +1,58 @@
+import MockAdapter from 'axios-mock-adapter';
+import { shallowMount } from '@vue/test-utils';
+import axios from '~/lib/utils/axios_utils';
+import dismissibleContainer from '~/vue_shared/components/dismissible_container.vue';
+
+describe('DismissibleContainer', () => {
+ let wrapper;
+ const propsData = {
+ path: 'some/path',
+ featureId: 'some-feature-id',
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('template', () => {
+ const findBtn = () => wrapper.find('[data-testid="close"]');
+ let mockAxios;
+
+ beforeEach(() => {
+ mockAxios = new MockAdapter(axios);
+ wrapper = shallowMount(dismissibleContainer, { propsData });
+ });
+
+ afterEach(() => {
+ mockAxios.restore();
+ });
+
+ it('successfully dismisses', () => {
+ mockAxios.onPost(propsData.path).replyOnce(200);
+ const button = findBtn();
+
+ button.trigger('click');
+
+ expect(wrapper.emitted().dismiss).toBeTruthy();
+ });
+ });
+
+ describe('slots', () => {
+ const slots = {
+ title: 'Foo Title',
+ default: 'default slot',
+ };
+
+ it.each(Object.keys(slots))('renders the %s slot', slot => {
+ const slotContent = slots[slot];
+ wrapper = shallowMount(dismissibleContainer, {
+ propsData,
+ slots: {
+ [slot]: `<span>${slotContent}</span>`,
+ },
+ });
+
+ expect(wrapper.text()).toContain(slotContent);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/dismissible_feedback_alert_spec.js b/spec/frontend/vue_shared/components/dismissible_feedback_alert_spec.js
new file mode 100644
index 00000000000..4c4baf23120
--- /dev/null
+++ b/spec/frontend/vue_shared/components/dismissible_feedback_alert_spec.js
@@ -0,0 +1,91 @@
+import { mount, shallowMount } from '@vue/test-utils';
+import { GlAlert, GlSprintf, GlLink } from '@gitlab/ui';
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
+import Component from '~/vue_shared/components/dismissible_feedback_alert.vue';
+
+describe('Dismissible Feedback Alert', () => {
+ useLocalStorageSpy();
+
+ let wrapper;
+
+ const defaultProps = {
+ featureName: 'Dependency List',
+ feedbackLink: 'https://gitlab.link',
+ };
+
+ const STORAGE_DISMISSAL_KEY = 'dependency_list_feedback_dismissed';
+
+ const createComponent = ({ props, shallow } = {}) => {
+ const mountFn = shallow ? shallowMount : mount;
+
+ wrapper = mountFn(Component, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ stubs: {
+ GlSprintf,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const findAlert = () => wrapper.find(GlAlert);
+ const findLink = () => wrapper.find(GlLink);
+
+ describe('with default', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('shows alert', () => {
+ expect(findAlert().exists()).toBe(true);
+ });
+
+ it('contains feature name', () => {
+ expect(findAlert().text()).toContain(defaultProps.featureName);
+ });
+
+ it('contains provided link', () => {
+ const link = findLink();
+
+ expect(link.attributes('href')).toBe(defaultProps.feedbackLink);
+ expect(link.attributes('target')).toBe('_blank');
+ });
+
+ it('should have the storage key set', () => {
+ expect(wrapper.vm.storageKey).toBe(STORAGE_DISMISSAL_KEY);
+ });
+ });
+
+ describe('dismissible', () => {
+ describe('after dismissal', () => {
+ beforeEach(() => {
+ createComponent({ shallow: false });
+ findAlert().vm.$emit('dismiss');
+ });
+
+ it('hides the alert', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('should remember the dismissal state', () => {
+ expect(localStorage.setItem).toHaveBeenCalledWith(STORAGE_DISMISSAL_KEY, 'true');
+ });
+ });
+
+ describe('already dismissed', () => {
+ it('should not show the alert once dismissed', async () => {
+ localStorage.setItem(STORAGE_DISMISSAL_KEY, 'true');
+ createComponent({ shallow: false });
+ await wrapper.vm.$nextTick();
+
+ expect(findAlert().exists()).toBe(false);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/file_finder/item_spec.js b/spec/frontend/vue_shared/components/file_finder/item_spec.js
index 63f2614106d..5a45a5dbba1 100644
--- a/spec/frontend/vue_shared/components/file_finder/item_spec.js
+++ b/spec/frontend/vue_shared/components/file_finder/item_spec.js
@@ -1,7 +1,7 @@
import Vue from 'vue';
import { file } from 'jest/ide/helpers';
-import ItemComponent from '~/vue_shared/components/file_finder/item.vue';
import createComponent from 'helpers/vue_mount_component_helper';
+import ItemComponent from '~/vue_shared/components/file_finder/item.vue';
describe('File finder item spec', () => {
const Component = Vue.extend(ItemComponent);
diff --git a/spec/frontend/vue_shared/components/file_icon_spec.js b/spec/frontend/vue_shared/components/file_icon_spec.js
index adf0da21f9f..e55449dc684 100644
--- a/spec/frontend/vue_shared/components/file_icon_spec.js
+++ b/spec/frontend/vue_shared/components/file_icon_spec.js
@@ -36,6 +36,9 @@ describe('File Icon component', () => {
fileName | iconName
${'test.js'} | ${'javascript'}
${'test.png'} | ${'image'}
+ ${'test.PNG'} | ${'image'}
+ ${'.npmrc'} | ${'npm'}
+ ${'.Npmrc'} | ${'file'}
${'webpack.js'} | ${'webpack'}
`('should render a $iconName icon based on file ending', ({ fileName, iconName }) => {
createComponent({ fileName });
diff --git a/spec/frontend/vue_shared/components/file_row_spec.js b/spec/frontend/vue_shared/components/file_row_spec.js
index 46df2d2aaf1..1acd2e05464 100644
--- a/spec/frontend/vue_shared/components/file_row_spec.js
+++ b/spec/frontend/vue_shared/components/file_row_spec.js
@@ -1,8 +1,8 @@
import { file } from 'jest/ide/helpers';
-import FileRow from '~/vue_shared/components/file_row.vue';
-import FileHeader from '~/vue_shared/components/file_row_header.vue';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
+import FileRow from '~/vue_shared/components/file_row.vue';
+import FileHeader from '~/vue_shared/components/file_row_header.vue';
import { escapeFileUrl } from '~/lib/utils/url_utility';
describe('File row component', () => {
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js
index 05508d14209..73dbecadd89 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js
@@ -1,4 +1,4 @@
-import { shallowMount } from '@vue/test-utils';
+import { shallowMount, mount } from '@vue/test-utils';
import {
GlFilteredSearch,
GlButtonGroup,
@@ -16,13 +16,16 @@ import RecentSearchesService from '~/filtered_search/services/recent_searches_se
import { mockAvailableTokens, mockSortOptions, mockHistoryItems } from './mock_data';
const createComponent = ({
+ shallow = true,
namespace = 'gitlab-org/gitlab-test',
recentSearchesStorageKey = 'requirements',
tokens = mockAvailableTokens,
- sortOptions = mockSortOptions,
+ sortOptions,
searchInputPlaceholder = 'Filter requirements',
-} = {}) =>
- shallowMount(FilteredSearchBarRoot, {
+} = {}) => {
+ const mountMethod = shallow ? shallowMount : mount;
+
+ return mountMethod(FilteredSearchBarRoot, {
propsData: {
namespace,
recentSearchesStorageKey,
@@ -31,12 +34,13 @@ const createComponent = ({
searchInputPlaceholder,
},
});
+};
describe('FilteredSearchBarRoot', () => {
let wrapper;
beforeEach(() => {
- wrapper = createComponent();
+ wrapper = createComponent({ sortOptions: mockSortOptions });
});
afterEach(() => {
@@ -44,23 +48,38 @@ describe('FilteredSearchBarRoot', () => {
});
describe('data', () => {
- it('initializes `filterValue`, `selectedSortOption` and `selectedSortDirection` data props', () => {
+ it('initializes `filterValue`, `selectedSortOption` and `selectedSortDirection` data props and displays the sort dropdown', () => {
expect(wrapper.vm.filterValue).toEqual([]);
expect(wrapper.vm.selectedSortOption).toBe(mockSortOptions[0].sortDirection.descending);
expect(wrapper.vm.selectedSortDirection).toBe(SortDirection.descending);
+ expect(wrapper.contains(GlButtonGroup)).toBe(true);
+ expect(wrapper.contains(GlButton)).toBe(true);
+ expect(wrapper.contains(GlDropdown)).toBe(true);
+ expect(wrapper.contains(GlDropdownItem)).toBe(true);
+ });
+
+ it('does not initialize `selectedSortOption` and `selectedSortDirection` when `sortOptions` is not applied and hides the sort dropdown', () => {
+ const wrapperNoSort = createComponent();
+
+ expect(wrapperNoSort.vm.filterValue).toEqual([]);
+ expect(wrapperNoSort.vm.selectedSortOption).toBe(undefined);
+ expect(wrapperNoSort.contains(GlButtonGroup)).toBe(false);
+ expect(wrapperNoSort.contains(GlButton)).toBe(false);
+ expect(wrapperNoSort.contains(GlDropdown)).toBe(false);
+ expect(wrapperNoSort.contains(GlDropdownItem)).toBe(false);
});
});
describe('computed', () => {
describe('tokenSymbols', () => {
it('returns a map containing type and symbols from `tokens` prop', () => {
- expect(wrapper.vm.tokenSymbols).toEqual({ author_username: '@' });
+ expect(wrapper.vm.tokenSymbols).toEqual({ author_username: '@', label_name: '~' });
});
});
describe('tokenTitles', () => {
it('returns a map containing type and title from `tokens` prop', () => {
- expect(wrapper.vm.tokenTitles).toEqual({ author_username: 'Author' });
+ expect(wrapper.vm.tokenTitles).toEqual({ author_username: 'Author', label_name: 'Label' });
});
});
@@ -99,6 +118,29 @@ describe('FilteredSearchBarRoot', () => {
expect(wrapper.vm.sortDirectionTooltip).toBe('Sort direction: Descending');
});
});
+
+ describe('filteredRecentSearches', () => {
+ it('returns array of recent searches filtering out any string type (unsupported) items', async () => {
+ wrapper.setData({
+ recentSearches: [{ foo: 'bar' }, 'foo'],
+ });
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.vm.filteredRecentSearches).toHaveLength(1);
+ expect(wrapper.vm.filteredRecentSearches[0]).toEqual({ foo: 'bar' });
+ });
+
+ it('returns undefined when recentSearchesStorageKey prop is not set on component', async () => {
+ wrapper.setProps({
+ recentSearchesStorageKey: '',
+ });
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.vm.filteredRecentSearches).not.toBeDefined();
+ });
+ });
});
describe('watchers', () => {
@@ -139,6 +181,46 @@ describe('FilteredSearchBarRoot', () => {
});
});
+ describe('removeQuotesEnclosure', () => {
+ const mockFilters = [
+ {
+ type: 'author_username',
+ value: {
+ data: 'root',
+ operator: '=',
+ },
+ },
+ {
+ type: 'label_name',
+ value: {
+ data: '"Documentation Update"',
+ operator: '=',
+ },
+ },
+ 'foo',
+ ];
+
+ it('returns filter array with unescaped strings for values which have spaces', () => {
+ expect(wrapper.vm.removeQuotesEnclosure(mockFilters)).toEqual([
+ {
+ type: 'author_username',
+ value: {
+ data: 'root',
+ operator: '=',
+ },
+ },
+ {
+ type: 'label_name',
+ value: {
+ data: 'Documentation Update',
+ operator: '=',
+ },
+ },
+ 'foo',
+ ]);
+ });
+ });
+
describe('handleSortOptionClick', () => {
it('emits component event `onSort` with selected sort by value', () => {
wrapper.vm.handleSortOptionClick(mockSortOptions[1]);
@@ -172,9 +254,12 @@ describe('FilteredSearchBarRoot', () => {
describe('handleHistoryItemSelected', () => {
it('emits `onFilter` event with provided filters param', () => {
+ jest.spyOn(wrapper.vm, 'removeQuotesEnclosure');
+
wrapper.vm.handleHistoryItemSelected(mockHistoryItems[0]);
expect(wrapper.emitted('onFilter')[0]).toEqual([mockHistoryItems[0]]);
+ expect(wrapper.vm.removeQuotesEnclosure).toHaveBeenCalledWith(mockHistoryItems[0]);
});
});
@@ -233,10 +318,21 @@ describe('FilteredSearchBarRoot', () => {
});
});
+ it('calls `blurSearchInput` method to remove focus from filter input field', () => {
+ jest.spyOn(wrapper.vm, 'blurSearchInput');
+
+ wrapper.find(GlFilteredSearch).vm.$emit('submit', mockFilters);
+
+ expect(wrapper.vm.blurSearchInput).toHaveBeenCalled();
+ });
+
it('emits component event `onFilter` with provided filters param', () => {
+ jest.spyOn(wrapper.vm, 'removeQuotesEnclosure');
+
wrapper.vm.handleFilterSubmit(mockFilters);
expect(wrapper.emitted('onFilter')[0]).toEqual([mockFilters]);
+ expect(wrapper.vm.removeQuotesEnclosure).toHaveBeenCalledWith(mockFilters);
});
});
});
@@ -260,13 +356,28 @@ describe('FilteredSearchBarRoot', () => {
expect(glFilteredSearchEl.props('historyItems')).toEqual(mockHistoryItems);
});
+ it('renders search history items dropdown with formatting done using token symbols', async () => {
+ const wrapperFullMount = createComponent({ sortOptions: mockSortOptions, shallow: false });
+ wrapperFullMount.vm.recentSearchesStore.addRecentSearch(mockHistoryItems[0]);
+
+ await wrapperFullMount.vm.$nextTick();
+
+ const searchHistoryItemsEl = wrapperFullMount.findAll(
+ '.gl-search-box-by-click-menu .gl-search-box-by-click-history-item',
+ );
+
+ expect(searchHistoryItemsEl.at(0).text()).toBe('Author := @tobyLabel := ~Bug"duo"');
+
+ wrapperFullMount.destroy();
+ });
+
it('renders sort dropdown component', () => {
expect(wrapper.find(GlButtonGroup).exists()).toBe(true);
expect(wrapper.find(GlDropdown).exists()).toBe(true);
expect(wrapper.find(GlDropdown).props('text')).toBe(mockSortOptions[0].title);
});
- it('renders dropdown items', () => {
+ it('renders sort dropdown items', () => {
const dropdownItemsEl = wrapper.findAll(GlDropdownItem);
expect(dropdownItemsEl).toHaveLength(mockSortOptions.length);
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js
new file mode 100644
index 00000000000..a857f84adf1
--- /dev/null
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_utils_spec.js
@@ -0,0 +1,19 @@
+import * as filteredSearchUtils from '~/vue_shared/components/filtered_search_bar/filtered_search_utils';
+
+describe('Filtered Search Utils', () => {
+ describe('stripQuotes', () => {
+ it.each`
+ inputValue | outputValue
+ ${'"Foo Bar"'} | ${'Foo Bar'}
+ ${"'Foo Bar'"} | ${'Foo Bar'}
+ ${'FooBar'} | ${'FooBar'}
+ ${"Foo'Bar"} | ${"Foo'Bar"}
+ ${'Foo"Bar'} | ${'Foo"Bar'}
+ `(
+ 'returns string $outputValue when called with string $inputValue',
+ ({ inputValue, outputValue }) => {
+ expect(filteredSearchUtils.stripQuotes(inputValue)).toBe(outputValue);
+ },
+ );
+ });
+});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
index 7e28c4e11e1..dcccb1f49b6 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
@@ -1,5 +1,8 @@
+import { mockLabels } from 'jest/vue_shared/components/sidebar/labels_select_vue/mock_data';
import Api from '~/api';
import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue';
+import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label_token.vue';
+import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue';
export const mockAuthor1 = {
id: 1,
@@ -30,6 +33,28 @@ export const mockAuthor3 = {
export const mockAuthors = [mockAuthor1, mockAuthor2, mockAuthor3];
+export const mockRegularMilestone = {
+ id: 1,
+ name: '4.0',
+ title: '4.0',
+};
+
+export const mockEscapedMilestone = {
+ id: 3,
+ name: '5.0 RC1',
+ title: '5.0 RC1',
+};
+
+export const mockMilestones = [
+ {
+ id: 2,
+ name: '5.0',
+ title: '5.0',
+ },
+ mockRegularMilestone,
+ mockEscapedMilestone,
+];
+
export const mockAuthorToken = {
type: 'author_username',
icon: 'user',
@@ -42,7 +67,29 @@ export const mockAuthorToken = {
fetchAuthors: Api.projectUsers.bind(Api),
};
-export const mockAvailableTokens = [mockAuthorToken];
+export const mockLabelToken = {
+ type: 'label_name',
+ icon: 'labels',
+ title: 'Label',
+ unique: false,
+ symbol: '~',
+ token: LabelToken,
+ operators: [{ value: '=', description: 'is', default: 'true' }],
+ fetchLabels: () => Promise.resolve(mockLabels),
+};
+
+export const mockMilestoneToken = {
+ type: 'milestone_title',
+ icon: 'clock',
+ title: 'Milestone',
+ unique: true,
+ symbol: '%',
+ token: MilestoneToken,
+ operators: [{ value: '=', description: 'is', default: 'true' }],
+ fetchMilestones: () => Promise.resolve({ data: mockMilestones }),
+};
+
+export const mockAvailableTokens = [mockAuthorToken, mockLabelToken];
export const mockHistoryItems = [
[
@@ -53,6 +100,13 @@ export const mockHistoryItems = [
operator: '=',
},
},
+ {
+ type: 'label_name',
+ value: {
+ data: 'Bug',
+ operator: '=',
+ },
+ },
'duo',
],
[
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
index 45294096eda..160febf9d06 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
@@ -4,7 +4,7 @@ import MockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
-import createFlash from '~/flash';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue';
import { mockAuthorToken, mockAuthors } from '../mock_data';
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
new file mode 100644
index 00000000000..0e60ee99327
--- /dev/null
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
@@ -0,0 +1,170 @@
+import { mount } from '@vue/test-utils';
+import { GlFilteredSearchToken, GlFilteredSearchTokenSegment } from '@gitlab/ui';
+import MockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
+import {
+ mockRegularLabel,
+ mockLabels,
+} from 'jest/vue_shared/components/sidebar/labels_select_vue/mock_data';
+import axios from '~/lib/utils/axios_utils';
+
+import { deprecatedCreateFlash as createFlash } from '~/flash';
+import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label_token.vue';
+
+import { mockLabelToken } from '../mock_data';
+
+jest.mock('~/flash');
+
+const createComponent = ({ config = mockLabelToken, value = { data: '' }, active = false } = {}) =>
+ mount(LabelToken, {
+ propsData: {
+ config,
+ value,
+ active,
+ },
+ provide: {
+ portalName: 'fake target',
+ alignSuggestions: function fakeAlignSuggestions() {},
+ },
+ stubs: {
+ Portal: {
+ template: '<div><slot></slot></div>',
+ },
+ GlFilteredSearchSuggestionList: {
+ template: '<div></div>',
+ methods: {
+ getValue: () => '=',
+ },
+ },
+ },
+ });
+
+describe('LabelToken', () => {
+ let mock;
+ let wrapper;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ mock.restore();
+ wrapper.destroy();
+ });
+
+ describe('computed', () => {
+ beforeEach(async () => {
+ // Label title with spaces is always enclosed in quotations by component.
+ wrapper = createComponent({ value: { data: `"${mockRegularLabel.title}"` } });
+
+ wrapper.setData({
+ labels: mockLabels,
+ });
+
+ await wrapper.vm.$nextTick();
+ });
+
+ describe('currentValue', () => {
+ it('returns lowercase string for `value.data`', () => {
+ expect(wrapper.vm.currentValue).toBe('"foo label"');
+ });
+ });
+
+ describe('activeLabel', () => {
+ it('returns object for currently present `value.data`', () => {
+ expect(wrapper.vm.activeLabel).toEqual(mockRegularLabel);
+ });
+ });
+
+ describe('containerStyle', () => {
+ it('returns object containing `backgroundColor` and `color` properties based on `activeLabel` value', () => {
+ expect(wrapper.vm.containerStyle).toEqual({
+ backgroundColor: mockRegularLabel.color,
+ color: mockRegularLabel.textColor,
+ });
+ });
+
+ it('returns empty object when `activeLabel` is not set', async () => {
+ wrapper.setData({
+ labels: [],
+ });
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.vm.containerStyle).toEqual({});
+ });
+ });
+ });
+
+ describe('methods', () => {
+ describe('fetchLabelBySearchTerm', () => {
+ it('calls `config.fetchLabels` with provided searchTerm param', () => {
+ jest.spyOn(wrapper.vm.config, 'fetchLabels');
+
+ wrapper.vm.fetchLabelBySearchTerm('foo');
+
+ expect(wrapper.vm.config.fetchLabels).toHaveBeenCalledWith('foo');
+ });
+
+ it('sets response to `labels` when request is succesful', () => {
+ jest.spyOn(wrapper.vm.config, 'fetchLabels').mockResolvedValue(mockLabels);
+
+ wrapper.vm.fetchLabelBySearchTerm('foo');
+
+ return waitForPromises().then(() => {
+ expect(wrapper.vm.labels).toEqual(mockLabels);
+ });
+ });
+
+ it('calls `createFlash` with flash error message when request fails', () => {
+ jest.spyOn(wrapper.vm.config, 'fetchLabels').mockRejectedValue({});
+
+ wrapper.vm.fetchLabelBySearchTerm('foo');
+
+ return waitForPromises().then(() => {
+ expect(createFlash).toHaveBeenCalledWith('There was a problem fetching labels.');
+ });
+ });
+
+ it('sets `loading` to false when request completes', () => {
+ jest.spyOn(wrapper.vm.config, 'fetchLabels').mockRejectedValue({});
+
+ wrapper.vm.fetchLabelBySearchTerm('foo');
+
+ return waitForPromises().then(() => {
+ expect(wrapper.vm.loading).toBe(false);
+ });
+ });
+ });
+ });
+
+ describe('template', () => {
+ beforeEach(async () => {
+ wrapper = createComponent({ value: { data: `"${mockRegularLabel.title}"` } });
+
+ wrapper.setData({
+ labels: mockLabels,
+ });
+
+ await wrapper.vm.$nextTick();
+ });
+
+ it('renders gl-filtered-search-token component', () => {
+ expect(wrapper.find(GlFilteredSearchToken).exists()).toBe(true);
+ });
+
+ it('renders token item when value is selected', () => {
+ const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+
+ expect(tokenSegments).toHaveLength(3); // Label, =, "Foo Label"
+ expect(tokenSegments.at(2).text()).toBe(`~${mockRegularLabel.title}`); // "Foo Label"
+ expect(
+ tokenSegments
+ .at(2)
+ .find('.gl-token')
+ .attributes('style'),
+ ).toBe('background-color: rgb(186, 218, 85); color: rgb(255, 255, 255);');
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
new file mode 100644
index 00000000000..de893bf44c8
--- /dev/null
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
@@ -0,0 +1,152 @@
+import { mount } from '@vue/test-utils';
+import { GlFilteredSearchToken, GlFilteredSearchTokenSegment } from '@gitlab/ui';
+import MockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
+import axios from '~/lib/utils/axios_utils';
+
+import createFlash from '~/flash';
+import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue';
+
+import {
+ mockMilestoneToken,
+ mockMilestones,
+ mockRegularMilestone,
+ mockEscapedMilestone,
+} from '../mock_data';
+
+jest.mock('~/flash');
+
+const createComponent = ({
+ config = mockMilestoneToken,
+ value = { data: '' },
+ active = false,
+} = {}) =>
+ mount(MilestoneToken, {
+ propsData: {
+ config,
+ value,
+ active,
+ },
+ provide: {
+ portalName: 'fake target',
+ alignSuggestions: function fakeAlignSuggestions() {},
+ },
+ stubs: {
+ Portal: {
+ template: '<div><slot></slot></div>',
+ },
+ GlFilteredSearchSuggestionList: {
+ template: '<div></div>',
+ methods: {
+ getValue: () => '=',
+ },
+ },
+ },
+ });
+
+describe('MilestoneToken', () => {
+ let mock;
+ let wrapper;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ mock.restore();
+ wrapper.destroy();
+ });
+
+ describe('computed', () => {
+ beforeEach(async () => {
+ // Milestone title with spaces is always enclosed in quotations by component.
+ wrapper = createComponent({ value: { data: `"${mockEscapedMilestone.title}"` } });
+
+ wrapper.setData({
+ milestones: mockMilestones,
+ });
+
+ await wrapper.vm.$nextTick();
+ });
+
+ describe('currentValue', () => {
+ it('returns lowercase string for `value.data`', () => {
+ expect(wrapper.vm.currentValue).toBe('"5.0 rc1"');
+ });
+ });
+
+ describe('activeMilestone', () => {
+ it('returns object for currently present `value.data`', () => {
+ expect(wrapper.vm.activeMilestone).toEqual(mockEscapedMilestone);
+ });
+ });
+ });
+
+ describe('methods', () => {
+ describe('fetchMilestoneBySearchTerm', () => {
+ it('calls `config.fetchMilestones` with provided searchTerm param', () => {
+ jest.spyOn(wrapper.vm.config, 'fetchMilestones');
+
+ wrapper.vm.fetchMilestoneBySearchTerm('foo');
+
+ expect(wrapper.vm.config.fetchMilestones).toHaveBeenCalledWith('foo');
+ });
+
+ it('sets response to `milestones` when request is successful', () => {
+ jest.spyOn(wrapper.vm.config, 'fetchMilestones').mockResolvedValue({
+ data: mockMilestones,
+ });
+
+ wrapper.vm.fetchMilestoneBySearchTerm();
+
+ return waitForPromises().then(() => {
+ expect(wrapper.vm.milestones).toEqual(mockMilestones);
+ });
+ });
+
+ it('calls `createFlash` with flash error message when request fails', () => {
+ jest.spyOn(wrapper.vm.config, 'fetchMilestones').mockRejectedValue({});
+
+ wrapper.vm.fetchMilestoneBySearchTerm('foo');
+
+ return waitForPromises().then(() => {
+ expect(createFlash).toHaveBeenCalledWith('There was a problem fetching milestones.');
+ });
+ });
+
+ it('sets `loading` to false when request completes', () => {
+ jest.spyOn(wrapper.vm.config, 'fetchMilestones').mockRejectedValue({});
+
+ wrapper.vm.fetchMilestoneBySearchTerm('foo');
+
+ return waitForPromises().then(() => {
+ expect(wrapper.vm.loading).toBe(false);
+ });
+ });
+ });
+ });
+
+ describe('template', () => {
+ beforeEach(async () => {
+ wrapper = createComponent({ value: { data: `"${mockRegularMilestone.title}"` } });
+
+ wrapper.setData({
+ milestones: mockMilestones,
+ });
+
+ await wrapper.vm.$nextTick();
+ });
+
+ it('renders gl-filtered-search-token component', () => {
+ expect(wrapper.find(GlFilteredSearchToken).exists()).toBe(true);
+ });
+
+ it('renders token item when value is selected', () => {
+ const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+
+ expect(tokenSegments).toHaveLength(3); // Milestone, =, '%"4.0"'
+ expect(tokenSegments.at(2).text()).toBe(`%"${mockRegularMilestone.title}"`); // "4.0 RC1"
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/form/form_footer_actions_spec.js b/spec/frontend/vue_shared/components/form/form_footer_actions_spec.js
index 30e16bd12da..361b162b6a0 100644
--- a/spec/frontend/vue_shared/components/form/form_footer_actions_spec.js
+++ b/spec/frontend/vue_shared/components/form/form_footer_actions_spec.js
@@ -1,5 +1,5 @@
-import FormFooterActions from '~/vue_shared/components/form/form_footer_actions.vue';
import { shallowMount } from '@vue/test-utils';
+import FormFooterActions from '~/vue_shared/components/form/form_footer_actions.vue';
describe('Form Footer Actions', () => {
let wrapper;
diff --git a/spec/frontend/vue_shared/components/form/title_spec.js b/spec/frontend/vue_shared/components/form/title_spec.js
index 38ef1bb3aa7..452f3723e76 100644
--- a/spec/frontend/vue_shared/components/form/title_spec.js
+++ b/spec/frontend/vue_shared/components/form/title_spec.js
@@ -1,5 +1,5 @@
-import TitleField from '~/vue_shared/components/form/title.vue';
import { shallowMount } from '@vue/test-utils';
+import TitleField from '~/vue_shared/components/form/title.vue';
describe('Title edit field', () => {
let wrapper;
diff --git a/spec/frontend/vue_shared/components/header_ci_component_spec.js b/spec/frontend/vue_shared/components/header_ci_component_spec.js
index 216563165d6..5233a64ce5e 100644
--- a/spec/frontend/vue_shared/components/header_ci_component_spec.js
+++ b/spec/frontend/vue_shared/components/header_ci_component_spec.js
@@ -35,7 +35,7 @@ describe('Header CI Component', () => {
vm.$destroy();
});
- const findActionButtons = () => vm.$el.querySelector('.header-action-buttons');
+ const findActionButtons = () => vm.$el.querySelector('[data-testid="headerButtons"]');
describe('render', () => {
beforeEach(() => {
diff --git a/spec/frontend/vue_shared/components/icon_spec.js b/spec/frontend/vue_shared/components/icon_spec.js
index a448953cc8e..16728e1705a 100644
--- a/spec/frontend/vue_shared/components/icon_spec.js
+++ b/spec/frontend/vue_shared/components/icon_spec.js
@@ -1,8 +1,8 @@
import Vue from 'vue';
import { mount } from '@vue/test-utils';
import mountComponent from 'helpers/vue_mount_component_helper';
-import Icon from '~/vue_shared/components/icon.vue';
import iconsPath from '@gitlab/svgs/dist/icons.svg';
+import Icon from '~/vue_shared/components/icon.vue';
jest.mock('@gitlab/svgs/dist/icons.svg', () => 'testing');
diff --git a/spec/frontend/vue_shared/components/identicon_spec.js b/spec/frontend/vue_shared/components/identicon_spec.js
index 53a55dcd6bd..24fc3713e2b 100644
--- a/spec/frontend/vue_shared/components/identicon_spec.js
+++ b/spec/frontend/vue_shared/components/identicon_spec.js
@@ -25,7 +25,7 @@ describe('Identicon', () => {
});
describe('entity id is a number', () => {
- beforeEach(createComponent);
+ beforeEach(() => createComponent());
it('matches snapshot', () => {
expect(wrapper.element).toMatchSnapshot();
diff --git a/spec/frontend/vue_shared/components/issuable/issuable_header_warnings_spec.js b/spec/frontend/vue_shared/components/issuable/issuable_header_warnings_spec.js
new file mode 100644
index 00000000000..2f910a10bc6
--- /dev/null
+++ b/spec/frontend/vue_shared/components/issuable/issuable_header_warnings_spec.js
@@ -0,0 +1,73 @@
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
+import IssuableHeaderWarnings from '~/vue_shared/components/issuable/issuable_header_warnings.vue';
+import createIssueStore from '~/notes/stores';
+import { createStore as createMrStore } from '~/mr_notes/stores';
+
+const ISSUABLE_TYPE_ISSUE = 'issue';
+const ISSUABLE_TYPE_MR = 'merge request';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('IssuableHeaderWarnings', () => {
+ let wrapper;
+ let store;
+
+ const findConfidentialIcon = () => wrapper.find('[data-testid="confidential"]');
+ const findLockedIcon = () => wrapper.find('[data-testid="locked"]');
+
+ const renderTestMessage = renders => (renders ? 'renders' : 'does not render');
+
+ const setLock = locked => {
+ store.getters.getNoteableData.discussion_locked = locked;
+ };
+
+ const setConfidential = confidential => {
+ store.getters.getNoteableData.confidential = confidential;
+ };
+
+ const createComponent = () => {
+ wrapper = shallowMount(IssuableHeaderWarnings, { store, localVue });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ store = null;
+ });
+
+ describe.each`
+ issuableType
+ ${ISSUABLE_TYPE_ISSUE} | ${ISSUABLE_TYPE_MR}
+ `(`when issuableType=$issuableType`, ({ issuableType }) => {
+ beforeEach(() => {
+ store = issuableType === ISSUABLE_TYPE_ISSUE ? createIssueStore() : createMrStore();
+ createComponent();
+ });
+
+ describe.each`
+ lockStatus | confidentialStatus
+ ${true} | ${true}
+ ${true} | ${false}
+ ${false} | ${true}
+ ${false} | ${false}
+ `(
+ `when locked=$lockStatus and confidential=$confidentialStatus`,
+ ({ lockStatus, confidentialStatus }) => {
+ beforeEach(() => {
+ setLock(lockStatus);
+ setConfidential(confidentialStatus);
+ });
+
+ it(`${renderTestMessage(lockStatus)} the locked icon`, () => {
+ expect(findLockedIcon().exists()).toBe(lockStatus);
+ });
+
+ it(`${renderTestMessage(confidentialStatus)} the confidential icon`, () => {
+ expect(findConfidentialIcon().exists()).toBe(confidentialStatus);
+ });
+ },
+ );
+ });
+});
diff --git a/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js b/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js
index 548d4476c0f..192e33d8b00 100644
--- a/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js
+++ b/spec/frontend/vue_shared/components/issue/issue_assignees_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
+import { mockAssigneesList } from 'jest/boards/mock_data';
import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
import IssueAssignees from '~/vue_shared/components/issue/issue_assignees.vue';
-import { mockAssigneesList } from 'jest/boards/mock_data';
const TEST_CSS_CLASSES = 'test-classes';
const TEST_MAX_VISIBLE = 4;
@@ -21,6 +21,11 @@ describe('IssueAssigneesComponent', () => {
vm = wrapper.vm;
};
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
const findTooltipText = () => wrapper.find('.js-assignee-tooltip').text();
const findAvatars = () => wrapper.findAll(UserAvatarLink);
const findOverflowCounter = () => wrapper.find('.avatar-counter');
@@ -123,6 +128,22 @@ describe('IssueAssigneesComponent', () => {
it('renders assignee @username', () => {
expect(findTooltipText()).toContain('@monserrate.gleichner');
});
+
+ it('does not render `@` when username not available', () => {
+ const userName = 'User without username';
+ factory({
+ assignees: [
+ {
+ name: userName,
+ },
+ ],
+ });
+
+ const tooltipText = findTooltipText();
+
+ expect(tooltipText).toContain(userName);
+ expect(tooltipText).not.toContain('@');
+ });
});
});
});
diff --git a/spec/frontend/vue_shared/components/issue/issue_milestone_spec.js b/spec/frontend/vue_shared/components/issue/issue_milestone_spec.js
index 69d8c1a5918..b72f78c4f60 100644
--- a/spec/frontend/vue_shared/components/issue/issue_milestone_spec.js
+++ b/spec/frontend/vue_shared/components/issue/issue_milestone_spec.js
@@ -1,11 +1,10 @@
import Vue from 'vue';
import { shallowMount } from '@vue/test-utils';
+import { mockMilestone } from 'jest/boards/mock_data';
import IssueMilestone from '~/vue_shared/components/issue/issue_milestone.vue';
import Icon from '~/vue_shared/components/icon.vue';
-import { mockMilestone } from 'jest/boards/mock_data';
-
const createComponent = (milestone = mockMilestone) => {
const Component = Vue.extend(IssueMilestone);
diff --git a/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js b/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js
index fe9a5156539..fb9487d0bf8 100644
--- a/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js
+++ b/spec/frontend/vue_shared/components/issue/related_issuable_item_spec.js
@@ -1,9 +1,9 @@
-import Vue from 'vue';
import { mount } from '@vue/test-utils';
+import { TEST_HOST } from 'jest/helpers/test_constants';
import { formatDate } from '~/lib/utils/datetime_utility';
import RelatedIssuableItem from '~/vue_shared/components/issue/related_issuable_item.vue';
+import IssueDueDate from '~/boards/components/issue_due_date.vue';
import { defaultAssignees, defaultMilestone } from './related_issuable_mock_data';
-import { TEST_HOST } from 'jest/helpers/test_constants';
describe('RelatedIssuableItem', () => {
let wrapper;
@@ -71,85 +71,65 @@ describe('RelatedIssuableItem', () => {
});
describe('token state', () => {
- let tokenState;
+ const tokenState = () => wrapper.find({ ref: 'iconElementXL' });
- beforeEach(done => {
+ beforeEach(() => {
wrapper.setProps({ state: 'opened' });
-
- Vue.nextTick(() => {
- tokenState = wrapper.find('.issue-token-state-icon-open');
-
- done();
- });
});
it('renders if hasState', () => {
- expect(tokenState.exists()).toBe(true);
+ expect(tokenState().exists()).toBe(true);
});
it('renders state title', () => {
- const stateTitle = tokenState.attributes('title');
+ const stateTitle = tokenState().attributes('title');
const formattedCreateDate = formatDate(props.createdAt);
expect(stateTitle).toContain('<span class="bold">Opened</span>');
-
expect(stateTitle).toContain(`<span class="text-tertiary">${formattedCreateDate}</span>`);
});
it('renders aria label', () => {
- expect(tokenState.attributes('aria-label')).toEqual('opened');
+ expect(tokenState().attributes('aria-label')).toEqual('opened');
});
it('renders open icon when open state', () => {
- expect(tokenState.classes('issue-token-state-icon-open')).toBe(true);
+ expect(tokenState().classes('issue-token-state-icon-open')).toBe(true);
});
- it('renders close icon when close state', done => {
+ it('renders close icon when close state', async () => {
wrapper.setProps({
state: 'closed',
closedAt: '2018-12-01T00:00:00.00Z',
});
+ await wrapper.vm.$nextTick();
- Vue.nextTick(() => {
- expect(tokenState.classes('issue-token-state-icon-closed')).toBe(true);
-
- done();
- });
+ expect(tokenState().classes('issue-token-state-icon-closed')).toBe(true);
});
});
describe('token metadata', () => {
- let tokenMetadata;
-
- beforeEach(done => {
- Vue.nextTick(() => {
- tokenMetadata = wrapper.find('.item-meta');
-
- done();
- });
- });
+ const tokenMetadata = () => wrapper.find('.item-meta');
it('renders item path and ID', () => {
- const pathAndID = tokenMetadata.find('.item-path-id').text();
+ const pathAndID = tokenMetadata()
+ .find('.item-path-id')
+ .text();
expect(pathAndID).toContain('gitlab-org/gitlab-test');
expect(pathAndID).toContain('#1');
});
it('renders milestone icon and name', () => {
- const milestoneIcon = tokenMetadata.find('.item-milestone svg use');
- const milestoneTitle = tokenMetadata.find('.item-milestone .milestone-title');
+ const milestoneIcon = tokenMetadata().find('.item-milestone svg use');
+ const milestoneTitle = tokenMetadata().find('.item-milestone .milestone-title');
expect(milestoneIcon.attributes('href')).toContain('clock');
expect(milestoneTitle.text()).toContain('Milestone title');
});
- it('renders due date component', () => {
- expect(tokenMetadata.find('.js-due-date-slot').exists()).toBe(true);
- });
-
- it('renders weight component', () => {
- expect(tokenMetadata.find('.js-weight-slot').exists()).toBe(true);
+ it('renders due date component with correct due date', () => {
+ expect(wrapper.find(IssueDueDate).props('date')).toBe(props.dueDate);
});
});
@@ -163,40 +143,30 @@ describe('RelatedIssuableItem', () => {
});
describe('remove button', () => {
- let removeBtn;
+ const removeButton = () => wrapper.find({ ref: 'removeButton' });
- beforeEach(done => {
+ beforeEach(() => {
wrapper.setProps({ canRemove: true });
- Vue.nextTick(() => {
- removeBtn = wrapper.find({ ref: 'removeButton' });
-
- done();
- });
});
it('renders if canRemove', () => {
- expect(removeBtn.exists()).toBe(true);
+ expect(removeButton().exists()).toBe(true);
});
- it('renders disabled button when removeDisabled', done => {
- wrapper.vm.removeDisabled = true;
-
- Vue.nextTick(() => {
- expect(removeBtn.attributes('disabled')).toEqual('disabled');
+ it('renders disabled button when removeDisabled', async () => {
+ wrapper.setData({ removeDisabled: true });
+ await wrapper.vm.$nextTick();
- done();
- });
+ expect(removeButton().attributes('disabled')).toEqual('disabled');
});
- it('triggers onRemoveRequest when clicked', () => {
- removeBtn.trigger('click');
+ it('triggers onRemoveRequest when clicked', async () => {
+ removeButton().trigger('click');
+ await wrapper.vm.$nextTick();
+ const { relatedIssueRemoveRequest } = wrapper.emitted();
- return wrapper.vm.$nextTick().then(() => {
- const { relatedIssueRemoveRequest } = wrapper.emitted();
-
- expect(relatedIssueRemoveRequest.length).toBe(1);
- expect(relatedIssueRemoveRequest[0]).toEqual([props.idKey]);
- });
+ expect(relatedIssueRemoveRequest.length).toBe(1);
+ expect(relatedIssueRemoveRequest[0]).toEqual([props.idKey]);
});
});
});
diff --git a/spec/frontend/vue_shared/components/markdown/field_spec.js b/spec/frontend/vue_shared/components/markdown/field_spec.js
index 74be5f8230e..3da0a35f05a 100644
--- a/spec/frontend/vue_shared/components/markdown/field_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/field_spec.js
@@ -1,8 +1,8 @@
import { mount } from '@vue/test-utils';
-import fieldComponent from '~/vue_shared/components/markdown/field.vue';
import { TEST_HOST, FIXTURES_PATH } from 'spec/test_constants';
import AxiosMockAdapter from 'axios-mock-adapter';
import $ from 'jquery';
+import fieldComponent from '~/vue_shared/components/markdown/field.vue';
import axios from '~/lib/utils/axios_utils';
const markdownPreviewPath = `${TEST_HOST}/preview`;
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/editor_service_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/editor_service_spec.js
index 78f27c9948b..16f60b5ff21 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/editor_service_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/editor_service_spec.js
@@ -5,10 +5,13 @@ import {
registerHTMLToMarkdownRenderer,
addImage,
getMarkdown,
+ getEditorOptions,
} from '~/vue_shared/components/rich_content_editor/services/editor_service';
import buildHTMLToMarkdownRenderer from '~/vue_shared/components/rich_content_editor/services/build_html_to_markdown_renderer';
+import buildCustomRenderer from '~/vue_shared/components/rich_content_editor/services/build_custom_renderer';
jest.mock('~/vue_shared/components/rich_content_editor/services/build_html_to_markdown_renderer');
+jest.mock('~/vue_shared/components/rich_content_editor/services/build_custom_renderer');
describe('Editor Service', () => {
let mockInstance;
@@ -120,4 +123,25 @@ describe('Editor Service', () => {
expect(mockInstance.toMarkOptions.renderer).toBe(extendedRenderer);
});
});
+
+ describe('getEditorOptions', () => {
+ const externalOptions = {
+ customRenderers: {},
+ };
+ const renderer = {};
+
+ beforeEach(() => {
+ buildCustomRenderer.mockReturnValueOnce(renderer);
+ });
+
+ it('generates a configuration object with a custom HTML renderer and toolbarItems', () => {
+ expect(getEditorOptions()).toHaveProp('customHTMLRenderer', renderer);
+ expect(getEditorOptions()).toHaveProp('toolbarItems');
+ });
+
+ it('passes external renderers to the buildCustomRenderers function', () => {
+ getEditorOptions(externalOptions);
+ expect(buildCustomRenderer).toHaveBeenCalledWith(externalOptions.customRenderers);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_integration_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_integration_spec.js
new file mode 100644
index 00000000000..b9b93b274d2
--- /dev/null
+++ b/spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_integration_spec.js
@@ -0,0 +1,69 @@
+import Editor from '@toast-ui/editor';
+import { registerHTMLToMarkdownRenderer } from '~/vue_shared/components/rich_content_editor/services/editor_service';
+import buildMarkdownToHTMLRenderer from '~/vue_shared/components/rich_content_editor/services/build_custom_renderer';
+
+describe('vue_shared/components/rich_content_editor', () => {
+ let editor;
+
+ const buildEditor = () => {
+ editor = new Editor({
+ el: document.body,
+ customHTMLRenderer: buildMarkdownToHTMLRenderer(),
+ });
+
+ registerHTMLToMarkdownRenderer(editor);
+ };
+
+ beforeEach(() => {
+ buildEditor();
+ });
+
+ describe('HTML to Markdown', () => {
+ it('uses "-" character list marker in unordered lists', () => {
+ editor.setHtml('<ul><li>List item 1</li><li>List item 2</li></ul>');
+
+ const markdown = editor.getMarkdown();
+
+ expect(markdown).toBe('- List item 1\n- List item 2');
+ });
+
+ it('does not increment the list marker in ordered lists', () => {
+ editor.setHtml('<ol><li>List item 1</li><li>List item 2</li></ol>');
+
+ const markdown = editor.getMarkdown();
+
+ expect(markdown).toBe('1. List item 1\n1. List item 2');
+ });
+
+ it('indents lists using four spaces', () => {
+ editor.setHtml('<ul><li>List item 1</li><ul><li>List item 2</li></ul></ul>');
+
+ const markdown = editor.getMarkdown();
+
+ expect(markdown).toBe('- List item 1\n - List item 2');
+ });
+
+ it('uses * for strong and _ for emphasis text', () => {
+ editor.setHtml('<strong>strong text</strong><i>emphasis text</i>');
+
+ const markdown = editor.getMarkdown();
+
+ expect(markdown).toBe('**strong text**_emphasis text_');
+ });
+ });
+
+ describe('Markdown to HTML', () => {
+ it.each`
+ input | output
+ ${'markdown with _emphasized\ntext_'} | ${'<p>markdown with <em>emphasized text</em></p>\n'}
+ ${'markdown with **strong\ntext**'} | ${'<p>markdown with <strong>strong text</strong></p>\n'}
+ `(
+ 'does not transform softbreaks inside (_) and strong (**) nodes into <br/> tags',
+ ({ input, output }) => {
+ editor.setMarkdown(input);
+
+ expect(editor.getHtml()).toBe(output);
+ },
+ );
+ });
+});
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_spec.js
index b6ff6aa767c..3d54db7fe5c 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/rich_content_editor_spec.js
@@ -2,7 +2,6 @@ import { shallowMount } from '@vue/test-utils';
import RichContentEditor from '~/vue_shared/components/rich_content_editor/rich_content_editor.vue';
import AddImageModal from '~/vue_shared/components/rich_content_editor/modals/add_image/add_image_modal.vue';
import {
- EDITOR_OPTIONS,
EDITOR_TYPES,
EDITOR_HEIGHT,
EDITOR_PREVIEW_STYLE,
@@ -14,6 +13,7 @@ import {
removeCustomEventListener,
addImage,
registerHTMLToMarkdownRenderer,
+ getEditorOptions,
} from '~/vue_shared/components/rich_content_editor/services/editor_service';
jest.mock('~/vue_shared/components/rich_content_editor/services/editor_service', () => ({
@@ -22,6 +22,7 @@ jest.mock('~/vue_shared/components/rich_content_editor/services/editor_service',
removeCustomEventListener: jest.fn(),
addImage: jest.fn(),
registerHTMLToMarkdownRenderer: jest.fn(),
+ getEditorOptions: jest.fn(),
}));
describe('Rich Content Editor', () => {
@@ -32,13 +33,25 @@ describe('Rich Content Editor', () => {
const findEditor = () => wrapper.find({ ref: 'editor' });
const findAddImageModal = () => wrapper.find(AddImageModal);
- beforeEach(() => {
+ const buildWrapper = () => {
wrapper = shallowMount(RichContentEditor, {
propsData: { content, imageRoot },
});
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
});
describe('when content is loaded', () => {
+ const editorOptions = {};
+
+ beforeEach(() => {
+ getEditorOptions.mockReturnValueOnce(editorOptions);
+ buildWrapper();
+ });
+
it('renders an editor', () => {
expect(findEditor().exists()).toBe(true);
});
@@ -47,8 +60,8 @@ describe('Rich Content Editor', () => {
expect(findEditor().props().initialValue).toBe(content);
});
- it('provides the correct editor options', () => {
- expect(findEditor().props().options).toEqual(EDITOR_OPTIONS);
+ it('provides options generated by the getEditorOptions service', () => {
+ expect(findEditor().props().options).toBe(editorOptions);
});
it('has the correct preview style', () => {
@@ -65,6 +78,10 @@ describe('Rich Content Editor', () => {
});
describe('when content is changed', () => {
+ beforeEach(() => {
+ buildWrapper();
+ });
+
it('emits an input event with the changed content', () => {
const changedMarkdown = '## Changed Markdown';
const getMarkdownMock = jest.fn().mockReturnValueOnce(changedMarkdown);
@@ -77,6 +94,10 @@ describe('Rich Content Editor', () => {
});
describe('when content is reset', () => {
+ beforeEach(() => {
+ buildWrapper();
+ });
+
it('should reset the content via setMarkdown', () => {
const newContent = 'Just the body content excluding the front matter for example';
const mockInstance = { invoke: jest.fn() };
@@ -89,35 +110,33 @@ describe('Rich Content Editor', () => {
});
describe('when editor is loaded', () => {
- let mockEditorApi;
-
beforeEach(() => {
- mockEditorApi = { eventManager: { addEventType: jest.fn(), listen: jest.fn() } };
- findEditor().vm.$emit('load', mockEditorApi);
+ buildWrapper();
});
it('adds the CUSTOM_EVENTS.openAddImageModal custom event listener', () => {
expect(addCustomEventListener).toHaveBeenCalledWith(
- mockEditorApi,
+ wrapper.vm.editorApi,
CUSTOM_EVENTS.openAddImageModal,
wrapper.vm.onOpenAddImageModal,
);
});
it('registers HTML to markdown renderer', () => {
- expect(registerHTMLToMarkdownRenderer).toHaveBeenCalledWith(mockEditorApi);
+ expect(registerHTMLToMarkdownRenderer).toHaveBeenCalledWith(wrapper.vm.editorApi);
});
});
describe('when editor is destroyed', () => {
- it('removes the CUSTOM_EVENTS.openAddImageModal custom event listener', () => {
- const mockEditorApi = { eventManager: { removeEventHandler: jest.fn() } };
+ beforeEach(() => {
+ buildWrapper();
+ });
- wrapper.vm.editorApi = mockEditorApi;
+ it('removes the CUSTOM_EVENTS.openAddImageModal custom event listener', () => {
wrapper.vm.$destroy();
expect(removeCustomEventListener).toHaveBeenCalledWith(
- mockEditorApi,
+ wrapper.vm.editorApi,
CUSTOM_EVENTS.openAddImageModal,
wrapper.vm.onOpenAddImageModal,
);
@@ -125,6 +144,10 @@ describe('Rich Content Editor', () => {
});
describe('add image modal', () => {
+ beforeEach(() => {
+ buildWrapper();
+ });
+
it('renders an addImageModal component', () => {
expect(findAddImageModal().exists()).toBe(true);
});
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/services/build_html_to_markdown_renderer_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/services/build_html_to_markdown_renderer_spec.js
index 0e8610a22f5..a90d3528d60 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/services/build_html_to_markdown_renderer_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/services/build_html_to_markdown_renderer_spec.js
@@ -47,4 +47,87 @@ describe('HTMLToMarkdownRenderer', () => {
expect(baseRenderer.convert).toHaveBeenCalledWith(NODE, list);
});
});
+
+ describe('UL LI visitor', () => {
+ it.each`
+ listItem | unorderedListBulletChar | result | bulletChar
+ ${'* list item'} | ${undefined} | ${'- list item'} | ${'default'}
+ ${' - list item'} | ${'*'} | ${' * list item'} | ${'*'}
+ ${' * list item'} | ${'-'} | ${' - list item'} | ${'-'}
+ `(
+ 'uses $bulletChar bullet char in unordered list items when $unorderedListBulletChar is set in config',
+ ({ listItem, unorderedListBulletChar, result }) => {
+ htmlToMarkdownRenderer = buildHTMLToMarkdownRenderer(baseRenderer, {
+ unorderedListBulletChar,
+ });
+ baseRenderer.convert.mockReturnValueOnce(listItem);
+
+ expect(htmlToMarkdownRenderer['UL LI'](NODE, listItem)).toBe(result);
+ expect(baseRenderer.convert).toHaveBeenCalledWith(NODE, listItem);
+ },
+ );
+ });
+
+ describe('OL LI visitor', () => {
+ it.each`
+ listItem | result | incrementListMarker | action
+ ${'2. list item'} | ${'1. list item'} | ${false} | ${'increments'}
+ ${' 3. list item'} | ${' 1. list item'} | ${false} | ${'increments'}
+ ${' 123. list item'} | ${' 1. list item'} | ${false} | ${'increments'}
+ ${'3. list item'} | ${'3. list item'} | ${true} | ${'does not increment'}
+ `(
+ '$action a list item counter when incrementListMaker is $incrementListMarker',
+ ({ listItem, result, incrementListMarker }) => {
+ const subContent = null;
+
+ htmlToMarkdownRenderer = buildHTMLToMarkdownRenderer(baseRenderer, {
+ incrementListMarker,
+ });
+ baseRenderer.convert.mockReturnValueOnce(listItem);
+
+ expect(htmlToMarkdownRenderer['OL LI'](NODE, subContent)).toBe(result);
+ expect(baseRenderer.convert).toHaveBeenCalledWith(NODE, subContent);
+ },
+ );
+ });
+
+ describe('STRONG, B visitor', () => {
+ it.each`
+ input | strongCharacter | result
+ ${'**strong text**'} | ${'_'} | ${'__strong text__'}
+ ${'__strong text__'} | ${'*'} | ${'**strong text**'}
+ `(
+ 'converts $input to $result when strong character is $strongCharacter',
+ ({ input, strongCharacter, result }) => {
+ htmlToMarkdownRenderer = buildHTMLToMarkdownRenderer(baseRenderer, {
+ strong: strongCharacter,
+ });
+
+ baseRenderer.convert.mockReturnValueOnce(input);
+
+ expect(htmlToMarkdownRenderer['STRONG, B'](NODE, input)).toBe(result);
+ expect(baseRenderer.convert).toHaveBeenCalledWith(NODE, input);
+ },
+ );
+ });
+
+ describe('EM, I visitor', () => {
+ it.each`
+ input | emphasisCharacter | result
+ ${'*strong text*'} | ${'_'} | ${'_strong text_'}
+ ${'_strong text_'} | ${'*'} | ${'*strong text*'}
+ `(
+ 'converts $input to $result when emphasis character is $emphasisCharacter',
+ ({ input, emphasisCharacter, result }) => {
+ htmlToMarkdownRenderer = buildHTMLToMarkdownRenderer(baseRenderer, {
+ emphasis: emphasisCharacter,
+ });
+
+ baseRenderer.convert.mockReturnValueOnce(input);
+
+ expect(htmlToMarkdownRenderer['EM, I'](NODE, input)).toBe(result);
+ expect(baseRenderer.convert).toHaveBeenCalledWith(NODE, input);
+ },
+ );
+ });
});
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/build_uneditable_token_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/build_uneditable_token_spec.js
index 18dff0a39bb..7a7e3055520 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/build_uneditable_token_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/build_uneditable_token_spec.js
@@ -3,7 +3,7 @@ import {
buildUneditableOpenTokens,
buildUneditableCloseToken,
buildUneditableCloseTokens,
- buildUneditableTokens,
+ buildUneditableBlockTokens,
buildUneditableInlineTokens,
buildUneditableHtmlAsTextTokens,
} from '~/vue_shared/components/rich_content_editor/services/renderers/build_uneditable_token';
@@ -51,9 +51,9 @@ describe('Build Uneditable Token renderer helper', () => {
});
});
- describe('buildUneditableTokens', () => {
+ describe('buildUneditableBlockTokens', () => {
it('returns a 3-item array of tokens with the originToken wrapped in the middle of block tokens', () => {
- const result = buildUneditableTokens(originToken);
+ const result = buildUneditableBlockTokens(originToken);
expect(result).toHaveLength(3);
expect(result).toStrictEqual(uneditableTokens);
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_embedded_ruby_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_embedded_ruby_spec.js
index b723ee8c8a0..0c59d9f569b 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_embedded_ruby_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_embedded_ruby_spec.js
@@ -1,5 +1,5 @@
import renderer from '~/vue_shared/components/rich_content_editor/services/renderers/render_embedded_ruby_text';
-import { buildUneditableTokens } from '~/vue_shared/components/rich_content_editor/services/renderers/build_uneditable_token';
+import { renderUneditableLeaf } from '~/vue_shared/components/rich_content_editor/services/renderers/render_utils';
import { buildMockTextNode, normalTextNode } from './mock_data';
@@ -17,14 +17,8 @@ describe('Render Embedded Ruby Text renderer', () => {
});
describe('render', () => {
- const origin = jest.fn();
-
- it('should return uneditable tokens', () => {
- const context = { origin };
-
- expect(renderer.render(embeddedRubyTextNode, context)).toStrictEqual(
- buildUneditableTokens(origin()),
- );
+ it('should delegate rendering to the renderUneditableLeaf util', () => {
+ expect(renderer.render).toBe(renderUneditableLeaf);
});
});
});
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_identifier_paragraph_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_identifier_paragraph_spec.js
index 320589e4de3..f4a06b91a10 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_identifier_paragraph_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_identifier_paragraph_spec.js
@@ -1,8 +1,5 @@
import renderer from '~/vue_shared/components/rich_content_editor/services/renderers/render_identifier_paragraph';
-import {
- buildUneditableOpenTokens,
- buildUneditableCloseToken,
-} from '~/vue_shared/components/rich_content_editor/services/renderers/build_uneditable_token';
+import { renderUneditableBranch } from '~/vue_shared/components/rich_content_editor/services/renderers/render_utils';
import { buildMockTextNode } from './mock_data';
@@ -40,26 +37,8 @@ describe('Render Identifier Paragraph renderer', () => {
});
describe('render', () => {
- let origin;
-
- beforeEach(() => {
- origin = jest.fn();
- });
-
- it('should return uneditable open tokens when entering', () => {
- const context = { entering: true, origin };
-
- expect(renderer.render(identifierParagraphNode, context)).toStrictEqual(
- buildUneditableOpenTokens(origin()),
- );
- });
-
- it('should return an uneditable close tokens when exiting', () => {
- const context = { entering: false, origin };
-
- expect(renderer.render(identifierParagraphNode, context)).toStrictEqual(
- buildUneditableCloseToken(origin()),
- );
+ it('should delegate rendering to the renderUneditableBranch util', () => {
+ expect(renderer.render).toBe(renderUneditableBranch);
});
});
});
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_kramdown_list_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_kramdown_list_spec.js
index e60bf1c8c92..7d427108ba6 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_kramdown_list_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_kramdown_list_spec.js
@@ -1,8 +1,5 @@
import renderer from '~/vue_shared/components/rich_content_editor/services/renderers/render_kramdown_list';
-import {
- buildUneditableOpenTokens,
- buildUneditableCloseToken,
-} from '~/vue_shared/components/rich_content_editor/services/renderers/build_uneditable_token';
+import { renderUneditableBranch } from '~/vue_shared/components/rich_content_editor/services/renderers/render_utils';
import { buildMockTextNode } from './mock_data';
@@ -34,22 +31,8 @@ describe('Render Kramdown List renderer', () => {
});
describe('render', () => {
- const origin = jest.fn();
-
- it('should return uneditable open tokens when entering', () => {
- const context = { entering: true, origin };
-
- expect(renderer.render(kramdownListNode, context)).toStrictEqual(
- buildUneditableOpenTokens(origin()),
- );
- });
-
- it('should return an uneditable close tokens when exiting', () => {
- const context = { entering: false, origin };
-
- expect(renderer.render(kramdownListNode, context)).toStrictEqual(
- buildUneditableCloseToken(origin()),
- );
+ it('should delegate rendering to the renderUneditableBranch util', () => {
+ expect(renderer.render).toBe(renderUneditableBranch);
});
});
});
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_kramdown_text_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_kramdown_text_spec.js
index 97ff9794e69..1d2d152ffc3 100644
--- a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_kramdown_text_spec.js
+++ b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_kramdown_text_spec.js
@@ -1,5 +1,5 @@
import renderer from '~/vue_shared/components/rich_content_editor/services/renderers/render_kramdown_text';
-import { buildUneditableTokens } from '~/vue_shared/components/rich_content_editor/services/renderers/build_uneditable_token';
+import { renderUneditableLeaf } from '~/vue_shared/components/rich_content_editor/services/renderers/render_utils';
import { buildMockTextNode, normalTextNode } from './mock_data';
@@ -17,14 +17,8 @@ describe('Render Kramdown Text renderer', () => {
});
describe('render', () => {
- const origin = jest.fn();
-
- it('should return uneditable tokens', () => {
- const context = { origin };
-
- expect(renderer.render(kramdownTextNode, context)).toStrictEqual(
- buildUneditableTokens(origin()),
- );
+ it('should delegate rendering to the renderUneditableLeaf util', () => {
+ expect(renderer.render).toBe(renderUneditableLeaf);
});
});
});
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_softbreak_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_softbreak_spec.js
new file mode 100644
index 00000000000..3c3d2354cb9
--- /dev/null
+++ b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_softbreak_spec.js
@@ -0,0 +1,23 @@
+import renderer from '~/vue_shared/components/rich_content_editor/services/renderers/render_softbreak';
+
+describe('Render softbreak renderer', () => {
+ describe('canRender', () => {
+ it.each`
+ node | parentType | result
+ ${{ parent: { type: 'emph' } }} | ${'emph'} | ${true}
+ ${{ parent: { type: 'strong' } }} | ${'strong'} | ${true}
+ ${{ parent: { type: 'paragraph' } }} | ${'paragraph'} | ${false}
+ `('returns $result when node parent type is $parentType ', ({ node, result }) => {
+ expect(renderer.canRender(node)).toBe(result);
+ });
+ });
+
+ describe('render', () => {
+ it('returns text node with a break line', () => {
+ expect(renderer.render()).toEqual({
+ type: 'text',
+ content: ' ',
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_utils_spec.js b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_utils_spec.js
new file mode 100644
index 00000000000..92435b3e4e3
--- /dev/null
+++ b/spec/frontend/vue_shared/components/rich_content_editor/services/renderers/render_utils_spec.js
@@ -0,0 +1,44 @@
+import {
+ renderUneditableLeaf,
+ renderUneditableBranch,
+} from '~/vue_shared/components/rich_content_editor/services/renderers/render_utils';
+
+import {
+ buildUneditableBlockTokens,
+ buildUneditableOpenTokens,
+} from '~/vue_shared/components/rich_content_editor/services/renderers/build_uneditable_token';
+
+import { originToken, uneditableCloseToken } from './mock_data';
+
+describe('Render utils', () => {
+ describe('renderUneditableLeaf', () => {
+ it('should return uneditable block tokens around an origin token', () => {
+ const context = { origin: jest.fn().mockReturnValueOnce(originToken) };
+ const result = renderUneditableLeaf({}, context);
+
+ expect(result).toStrictEqual(buildUneditableBlockTokens(originToken));
+ });
+ });
+
+ describe('renderUneditableBranch', () => {
+ let origin;
+
+ beforeEach(() => {
+ origin = jest.fn().mockReturnValueOnce(originToken);
+ });
+
+ it('should return uneditable block open token followed by the origin token when entering', () => {
+ const context = { entering: true, origin };
+ const result = renderUneditableBranch({}, context);
+
+ expect(result).toStrictEqual(buildUneditableOpenTokens(originToken));
+ });
+
+ it('should return uneditable block closing token when exiting', () => {
+ const context = { entering: false, origin };
+ const result = renderUneditableBranch({}, context);
+
+ expect(result).toStrictEqual(uneditableCloseToken);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js
index c33cffb421d..53e8a0e1278 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select/dropdown_value_spec.js
@@ -1,6 +1,6 @@
import { mount } from '@vue/test-utils';
-import DropdownValueComponent from '~/vue_shared/components/sidebar/labels_select/dropdown_value.vue';
import { GlLabel } from '@gitlab/ui';
+import DropdownValueComponent from '~/vue_shared/components/sidebar/labels_select/dropdown_value.vue';
import { mockConfig, mockLabels } from './mock_data';
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_button_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_button_spec.js
index 68c9d26bb1a..cb758797c63 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_button_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_button_spec.js
@@ -41,23 +41,20 @@ describe('DropdownButton', () => {
describe('methods', () => {
describe('handleButtonClick', () => {
it.each`
- variant
- ${'standalone'}
- ${'embedded'}
+ variant | expectPropagationStopped
+ ${'standalone'} | ${true}
+ ${'embedded'} | ${false}
`(
- 'toggles dropdown content and stops event propagation when `state.variant` is "$variant"',
- ({ variant }) => {
+ 'toggles dropdown content and handles event propagation when `state.variant` is "$variant"',
+ ({ variant, expectPropagationStopped }) => {
const event = { stopPropagation: jest.fn() };
- wrapper = createComponent({
- ...mockConfig,
- variant,
- });
+ wrapper = createComponent({ ...mockConfig, variant });
findDropdownButton().vm.$emit('click', event);
expect(store.state.showDropdownContents).toBe(true);
- expect(event.stopPropagation).toHaveBeenCalled();
+ expect(event.stopPropagation).toHaveBeenCalledTimes(expectPropagationStopped ? 1 : 0);
},
);
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
index 9b01e0b9637..589be0ad7a4 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
@@ -17,53 +17,47 @@ import { mockConfig, mockLabels, mockRegularLabel } from './mock_data';
const localVue = createLocalVue();
localVue.use(Vuex);
-const createComponent = (initialState = mockConfig) => {
- const store = new Vuex.Store({
- getters,
- mutations,
- state: {
- ...defaultState(),
- footerCreateLabelTitle: 'Create label',
- footerManageLabelTitle: 'Manage labels',
- },
- actions: {
- ...actions,
- fetchLabels: jest.fn(),
- },
- });
-
- store.dispatch('setInitialState', initialState);
- store.dispatch('receiveLabelsSuccess', mockLabels);
-
- return shallowMount(DropdownContentsLabelsView, {
- localVue,
- store,
- });
-};
-
describe('DropdownContentsLabelsView', () => {
let wrapper;
- let wrapperStandalone;
- let wrapperEmbedded;
- beforeEach(() => {
- wrapper = createComponent();
- wrapperStandalone = createComponent({
- ...mockConfig,
- variant: 'standalone',
+ const createComponent = (initialState = mockConfig) => {
+ const store = new Vuex.Store({
+ getters,
+ mutations,
+ state: {
+ ...defaultState(),
+ footerCreateLabelTitle: 'Create label',
+ footerManageLabelTitle: 'Manage labels',
+ },
+ actions: {
+ ...actions,
+ fetchLabels: jest.fn(),
+ },
});
- wrapperEmbedded = createComponent({
- ...mockConfig,
- variant: 'embedded',
+
+ store.dispatch('setInitialState', initialState);
+ store.dispatch('receiveLabelsSuccess', mockLabels);
+
+ wrapper = shallowMount(DropdownContentsLabelsView, {
+ localVue,
+ store,
});
+ };
+
+ beforeEach(() => {
+ createComponent();
});
afterEach(() => {
wrapper.destroy();
- wrapperStandalone.destroy();
- wrapperEmbedded.destroy();
+ wrapper = null;
});
+ const findDropdownContent = () => wrapper.find('[data-testid="dropdown-content"]');
+ const findDropdownTitle = () => wrapper.find('[data-testid="dropdown-title"]');
+ const findDropdownFooter = () => wrapper.find('[data-testid="dropdown-footer"]');
+ const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
+
describe('computed', () => {
describe('visibleLabels', () => {
it('returns matching labels filtered with `searchKey`', () => {
@@ -83,6 +77,24 @@ describe('DropdownContentsLabelsView', () => {
expect(wrapper.vm.visibleLabels.length).toBe(mockLabels.length);
});
});
+
+ describe('showListContainer', () => {
+ it.each`
+ variant | loading | showList
+ ${'sidebar'} | ${false} | ${true}
+ ${'sidebar'} | ${true} | ${false}
+ ${'not-sidebar'} | ${true} | ${true}
+ ${'not-sidebar'} | ${false} | ${true}
+ `(
+ 'returns $showList if `state.variant` is "$variant" and `labelsFetchInProgress` is $loading',
+ ({ variant, loading, showList }) => {
+ createComponent({ ...mockConfig, variant });
+ wrapper.vm.$store.state.labelsFetchInProgress = loading;
+
+ expect(wrapper.vm.showListContainer).toBe(showList);
+ },
+ );
+ });
});
describe('methods', () => {
@@ -199,7 +211,7 @@ describe('DropdownContentsLabelsView', () => {
wrapper.vm.$store.dispatch('requestLabels');
return wrapper.vm.$nextTick(() => {
- const loadingIconEl = wrapper.find(GlLoadingIcon);
+ const loadingIconEl = findLoadingIcon();
expect(loadingIconEl.exists()).toBe(true);
expect(loadingIconEl.attributes('class')).toContain('labels-fetch-loading');
@@ -207,22 +219,24 @@ describe('DropdownContentsLabelsView', () => {
});
it('renders dropdown title element', () => {
- const titleEl = wrapper.find('.dropdown-title > span');
+ const titleEl = findDropdownTitle();
expect(titleEl.exists()).toBe(true);
expect(titleEl.text()).toBe('Assign labels');
});
it('does not render dropdown title element when `state.variant` is "standalone"', () => {
- expect(wrapperStandalone.find('.dropdown-title').exists()).toBe(false);
+ createComponent({ ...mockConfig, variant: 'standalone' });
+ expect(findDropdownTitle().exists()).toBe(false);
});
it('renders dropdown title element when `state.variant` is "embedded"', () => {
- expect(wrapperEmbedded.find('.dropdown-title').exists()).toBe(true);
+ createComponent({ ...mockConfig, variant: 'embedded' });
+ expect(findDropdownTitle().exists()).toBe(true);
});
it('renders dropdown close button element', () => {
- const closeButtonEl = wrapper.find('.dropdown-title').find(GlButton);
+ const closeButtonEl = findDropdownTitle().find(GlButton);
expect(closeButtonEl.exists()).toBe(true);
expect(closeButtonEl.props('icon')).toBe('close');
@@ -249,8 +263,7 @@ describe('DropdownContentsLabelsView', () => {
});
return wrapper.vm.$nextTick(() => {
- const labelsEl = wrapper.findAll('.dropdown-content li');
- const labelItemEl = labelsEl.at(0).find(LabelItem);
+ const labelItemEl = findDropdownContent().find(LabelItem);
expect(labelItemEl.props('highlight')).toBe(true);
});
@@ -262,22 +275,28 @@ describe('DropdownContentsLabelsView', () => {
});
return wrapper.vm.$nextTick(() => {
- const noMatchEl = wrapper.find('.dropdown-content li');
+ const noMatchEl = findDropdownContent().find('li');
expect(noMatchEl.isVisible()).toBe(true);
expect(noMatchEl.text()).toContain('No matching results');
});
});
+ it('renders empty content while loading', () => {
+ wrapper.vm.$store.state.labelsFetchInProgress = true;
+
+ return wrapper.vm.$nextTick(() => {
+ const dropdownContent = findDropdownContent();
+
+ expect(dropdownContent.exists()).toBe(true);
+ expect(dropdownContent.isVisible()).toBe(false);
+ });
+ });
+
it('renders footer list items', () => {
- const createLabelLink = wrapper
- .find('.dropdown-footer')
- .findAll(GlLink)
- .at(0);
- const manageLabelsLink = wrapper
- .find('.dropdown-footer')
- .findAll(GlLink)
- .at(1);
+ const footerLinks = findDropdownFooter().findAll(GlLink);
+ const createLabelLink = footerLinks.at(0);
+ const manageLabelsLink = footerLinks.at(1);
expect(createLabelLink.exists()).toBe(true);
expect(createLabelLink.text()).toBe('Create label');
@@ -289,8 +308,7 @@ describe('DropdownContentsLabelsView', () => {
wrapper.vm.$store.state.allowLabelCreate = false;
return wrapper.vm.$nextTick(() => {
- const createLabelLink = wrapper
- .find('.dropdown-footer')
+ const createLabelLink = findDropdownFooter()
.findAll(GlLink)
.at(0);
@@ -299,11 +317,12 @@ describe('DropdownContentsLabelsView', () => {
});
it('does not render footer list items when `state.variant` is "standalone"', () => {
- expect(wrapperStandalone.find('.dropdown-footer').exists()).toBe(false);
+ createComponent({ ...mockConfig, variant: 'standalone' });
+ expect(findDropdownFooter().exists()).toBe(false);
});
it('renders footer list items when `state.variant` is "embedded"', () => {
- expect(wrapperEmbedded.find('.dropdown-footer').exists()).toBe(true);
+ expect(findDropdownFooter().exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_spec.js
index bb462acf11c..97946993857 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_spec.js
@@ -10,12 +10,13 @@ import { mockConfig } from './mock_data';
const localVue = createLocalVue();
localVue.use(Vuex);
-const createComponent = (initialState = mockConfig) => {
+const createComponent = (initialState = mockConfig, propsData = {}) => {
const store = new Vuex.Store(labelsSelectModule());
store.dispatch('setInitialState', initialState);
return shallowMount(DropdownContents, {
+ propsData,
localVue,
store,
});
@@ -47,8 +48,15 @@ describe('DropdownContent', () => {
});
describe('template', () => {
- it('renders component container element with class `labels-select-dropdown-contents`', () => {
+ it('renders component container element with class `labels-select-dropdown-contents` and no styles', () => {
expect(wrapper.attributes('class')).toContain('labels-select-dropdown-contents');
+ expect(wrapper.attributes('style')).toBe(undefined);
+ });
+
+ it('renders component container element with styles when `renderOnTop` is true', () => {
+ wrapper = createComponent(mockConfig, { renderOnTop: true });
+
+ expect(wrapper.attributes('style')).toContain('bottom: 100%');
});
});
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_title_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_title_spec.js
index 0717fd829a0..c1d9be7393c 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_title_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_title_spec.js
@@ -1,7 +1,7 @@
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
-import { GlDeprecatedButton, GlLoadingIcon } from '@gitlab/ui';
+import { GlButton, GlLoadingIcon } from '@gitlab/ui';
import DropdownTitle from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_title.vue';
import labelsSelectModule from '~/vue_shared/components/sidebar/labels_select_vue/store';
@@ -42,7 +42,7 @@ describe('DropdownTitle', () => {
});
it('renders edit link', () => {
- const editBtnEl = wrapper.find(GlDeprecatedButton);
+ const editBtnEl = wrapper.find(GlButton);
expect(editBtnEl.exists()).toBe(true);
expect(editBtnEl.text()).toBe('Edit');
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js
index 6e97b046be2..a1e0db4d29e 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js
@@ -9,9 +9,14 @@ import DropdownButton from '~/vue_shared/components/sidebar/labels_select_vue/dr
import DropdownContents from '~/vue_shared/components/sidebar/labels_select_vue/dropdown_contents.vue';
import labelsSelectModule from '~/vue_shared/components/sidebar/labels_select_vue/store';
+import { isInViewport } from '~/lib/utils/common_utils';
import { mockConfig } from './mock_data';
+jest.mock('~/lib/utils/common_utils', () => ({
+ isInViewport: jest.fn().mockReturnValue(true),
+}));
+
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -21,6 +26,9 @@ const createComponent = (config = mockConfig, slots = {}) =>
slots,
store: new Vuex.Store(labelsSelectModule()),
propsData: config,
+ stubs: {
+ 'dropdown-contents': DropdownContents,
+ },
});
describe('LabelsSelectRoot', () => {
@@ -144,5 +152,42 @@ describe('LabelsSelectRoot', () => {
expect(wrapper.find(DropdownContents).exists()).toBe(true);
});
});
+
+ describe('sets content direction based on viewport', () => {
+ it('does not set direction when `state.variant` is not "embedded"', () => {
+ wrapper.vm.$store.dispatch('toggleDropdownContents');
+
+ wrapper.vm.setContentIsOnViewport(wrapper.vm.$store.state);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownContents).props('renderOnTop')).toBe(false);
+ });
+ });
+
+ describe('when `state.variant` is "embedded"', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ ...mockConfig, variant: 'embedded' });
+ wrapper.vm.$store.dispatch('toggleDropdownContents');
+ });
+
+ it('set direction when out of viewport', () => {
+ isInViewport.mockImplementation(() => false);
+ wrapper.vm.setContentIsOnViewport(wrapper.vm.$store.state);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownContents).props('renderOnTop')).toBe(true);
+ });
+ });
+
+ it('does not set direction when inside of viewport', () => {
+ isInViewport.mockImplementation(() => true);
+ wrapper.vm.setContentIsOnViewport(wrapper.vm.$store.state);
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownContents).props('renderOnTop')).toBe(false);
+ });
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js
index 072d8fe2fe2..c742220ba8a 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/store/actions_spec.js
@@ -1,10 +1,10 @@
import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
import defaultState from '~/vue_shared/components/sidebar/labels_select_vue/store/state';
import * as types from '~/vue_shared/components/sidebar/labels_select_vue/store/mutation_types';
import * as actions from '~/vue_shared/components/sidebar/labels_select_vue/store/actions';
-import testAction from 'helpers/vuex_action_helper';
import axios from '~/lib/utils/axios_utils';
describe('LabelsSelect Actions', () => {
diff --git a/spec/frontend/vue_shared/components/split_button_spec.js b/spec/frontend/vue_shared/components/split_button_spec.js
index e09bc073042..f3bd4c14717 100644
--- a/spec/frontend/vue_shared/components/split_button_spec.js
+++ b/spec/frontend/vue_shared/components/split_button_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlDeprecatedDropdown, GlDeprecatedDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import SplitButton from '~/vue_shared/components/split_button.vue';
@@ -25,10 +25,10 @@ describe('SplitButton', () => {
});
};
- const findDropdown = () => wrapper.find(GlDropdown);
+ const findDropdown = () => wrapper.find(GlDeprecatedDropdown);
const findDropdownItem = (index = 0) =>
findDropdown()
- .findAll(GlDropdownItem)
+ .findAll(GlDeprecatedDropdownItem)
.at(index);
const selectItem = index => {
findDropdownItem(index).vm.$emit('click');
diff --git a/spec/frontend/vue_shared/components/table_pagination_spec.js b/spec/frontend/vue_shared/components/table_pagination_spec.js
index 56ffffc7f0f..ef3ae088eec 100644
--- a/spec/frontend/vue_shared/components/table_pagination_spec.js
+++ b/spec/frontend/vue_shared/components/table_pagination_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import TablePagination from '~/vue_shared/components/pagination/table_pagination.vue';
import { GlPagination } from '@gitlab/ui';
+import TablePagination from '~/vue_shared/components/pagination/table_pagination.vue';
describe('Pagination component', () => {
let wrapper;
diff --git a/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js b/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js
index 46fcb92455b..691e19473c1 100644
--- a/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js
+++ b/spec/frontend/vue_shared/components/time_ago_tooltip_spec.js
@@ -1,16 +1,19 @@
import { shallowMount } from '@vue/test-utils';
-import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+
import { formatDate, getTimeago } from '~/lib/utils/datetime_utility';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
describe('Time ago with tooltip component', () => {
let vm;
- const buildVm = (propsData = {}) => {
+ const buildVm = (propsData = {}, scopedSlots = {}) => {
vm = shallowMount(TimeAgoTooltip, {
propsData,
+ scopedSlots,
});
};
const timestamp = '2017-05-08T14:57:39.781Z';
+ const timeAgoTimestamp = getTimeago().format(timestamp);
afterEach(() => {
vm.destroy();
@@ -20,10 +23,9 @@ describe('Time ago with tooltip component', () => {
buildVm({
time: timestamp,
});
- const timeago = getTimeago();
expect(vm.attributes('title')).toEqual(formatDate(timestamp));
- expect(vm.text()).toEqual(timeago.format(timestamp));
+ expect(vm.text()).toEqual(timeAgoTimestamp);
});
it('should render provided html class', () => {
@@ -34,4 +36,16 @@ describe('Time ago with tooltip component', () => {
expect(vm.classes()).toContain('foo');
});
+
+ it('should render with the datetime attribute', () => {
+ buildVm({ time: timestamp });
+
+ expect(vm.attributes('datetime')).toEqual(timestamp);
+ });
+
+ it('should render provided scope content with the correct timeAgo string', () => {
+ buildVm({ time: timestamp }, { default: `<span>The time is {{ props.timeAgo }}</span>` });
+
+ expect(vm.text()).toEqual(`The time is ${timeAgoTimestamp}`);
+ });
});
diff --git a/spec/frontend/vue_shared/components/toggle_button_spec.js b/spec/frontend/vue_shared/components/toggle_button_spec.js
index 83bbb37a89a..f58647ff12b 100644
--- a/spec/frontend/vue_shared/components/toggle_button_spec.js
+++ b/spec/frontend/vue_shared/components/toggle_button_spec.js
@@ -32,7 +32,7 @@ describe('Toggle Button', () => {
it('renders input status icon', () => {
expect(vm.$el.querySelectorAll('span.toggle-icon').length).toEqual(1);
- expect(vm.$el.querySelectorAll('svg.s16.toggle-icon-svg').length).toEqual(1);
+ expect(vm.$el.querySelectorAll('svg.s18').length).toEqual(1);
});
});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js
index 1db1114f9ba..6f66d1cafb9 100644
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlDeprecatedButton } from '@gitlab/ui';
+import { GlButton } from '@gitlab/ui';
import { TEST_HOST } from 'spec/test_constants';
import UserAvatarList from '~/vue_shared/components/user_avatar/user_avatar_list.vue';
import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
@@ -37,7 +37,7 @@ describe('UserAvatarList', () => {
};
const clickButton = () => {
- const button = wrapper.find(GlDeprecatedButton);
+ const button = wrapper.find(GlButton);
button.vm.$emit('click');
};
@@ -112,7 +112,7 @@ describe('UserAvatarList', () => {
it('does not show button', () => {
factory();
- expect(wrapper.find(GlDeprecatedButton).exists()).toBe(false);
+ expect(wrapper.find(GlButton).exists()).toBe(false);
});
});
diff --git a/spec/frontend/vue_shared/directives/autofocusonshow_spec.js b/spec/frontend/vue_shared/directives/autofocusonshow_spec.js
index 90530b7d5c2..1c9e89f99e9 100644
--- a/spec/frontend/vue_shared/directives/autofocusonshow_spec.js
+++ b/spec/frontend/vue_shared/directives/autofocusonshow_spec.js
@@ -1,3 +1,4 @@
+import { useMockIntersectionObserver } from 'helpers/mock_dom_observer';
import autofocusonshow from '~/vue_shared/directives/autofocusonshow';
/**
@@ -6,20 +7,14 @@ import autofocusonshow from '~/vue_shared/directives/autofocusonshow';
* on underlying DOM methods.
*/
describe('AutofocusOnShow directive', () => {
+ useMockIntersectionObserver();
+
describe('with input invisible on component render', () => {
let el;
beforeEach(() => {
setFixtures('<div id="container" style="display: none;"><input id="inputel"/></div>');
el = document.querySelector('#inputel');
-
- window.IntersectionObserver = class {
- observe = jest.fn();
- };
- });
-
- afterEach(() => {
- delete window.IntersectionObserver;
});
it('should bind IntersectionObserver on input element', () => {
diff --git a/spec/frontend/whats_new/components/app_spec.js b/spec/frontend/whats_new/components/app_spec.js
new file mode 100644
index 00000000000..a349aad9f1c
--- /dev/null
+++ b/spec/frontend/whats_new/components/app_spec.js
@@ -0,0 +1,57 @@
+import { createLocalVue, mount } from '@vue/test-utils';
+import Vuex from 'vuex';
+import { GlDrawer } from '@gitlab/ui';
+import App from '~/whats_new/components/app.vue';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('App', () => {
+ let wrapper;
+ let store;
+ let actions;
+ let state;
+
+ beforeEach(() => {
+ actions = {
+ closeDrawer: jest.fn(),
+ };
+
+ state = {
+ open: true,
+ };
+
+ store = new Vuex.Store({
+ actions,
+ state,
+ });
+
+ wrapper = mount(App, {
+ localVue,
+ store,
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const getDrawer = () => wrapper.find(GlDrawer);
+
+ it('contains a drawer', () => {
+ expect(getDrawer().exists()).toBe(true);
+ });
+
+ it('dispatches closeDrawer when clicking close', () => {
+ getDrawer().vm.$emit('close');
+ expect(actions.closeDrawer).toHaveBeenCalled();
+ });
+
+ it.each([true, false])('passes open property', async openState => {
+ wrapper.vm.$store.state.open = openState;
+
+ await wrapper.vm.$nextTick();
+
+ expect(getDrawer().props('open')).toBe(openState);
+ });
+});
diff --git a/spec/frontend/whats_new/components/trigger_spec.js b/spec/frontend/whats_new/components/trigger_spec.js
new file mode 100644
index 00000000000..7961957e077
--- /dev/null
+++ b/spec/frontend/whats_new/components/trigger_spec.js
@@ -0,0 +1,43 @@
+import { createLocalVue, mount } from '@vue/test-utils';
+import Vuex from 'vuex';
+import { GlButton } from '@gitlab/ui';
+import Trigger from '~/whats_new/components/trigger.vue';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Trigger', () => {
+ let wrapper;
+ let store;
+ let actions;
+ let state;
+
+ beforeEach(() => {
+ actions = {
+ openDrawer: jest.fn(),
+ };
+
+ state = {
+ open: true,
+ };
+
+ store = new Vuex.Store({
+ actions,
+ state,
+ });
+
+ wrapper = mount(Trigger, {
+ localVue,
+ store,
+ });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('dispatches openDrawer when clicking close', () => {
+ wrapper.find(GlButton).vm.$emit('click');
+ expect(actions.openDrawer).toHaveBeenCalled();
+ });
+});
diff --git a/spec/frontend/whats_new/store/actions_spec.js b/spec/frontend/whats_new/store/actions_spec.js
new file mode 100644
index 00000000000..d95453c9175
--- /dev/null
+++ b/spec/frontend/whats_new/store/actions_spec.js
@@ -0,0 +1,17 @@
+import testAction from 'helpers/vuex_action_helper';
+import actions from '~/whats_new/store/actions';
+import * as types from '~/whats_new/store/mutation_types';
+
+describe('whats new actions', () => {
+ describe('openDrawer', () => {
+ it('should commit openDrawer', () => {
+ testAction(actions.openDrawer, {}, {}, [{ type: types.OPEN_DRAWER }]);
+ });
+ });
+
+ describe('closeDrawer', () => {
+ it('should commit closeDrawer', () => {
+ testAction(actions.closeDrawer, {}, {}, [{ type: types.CLOSE_DRAWER }]);
+ });
+ });
+});
diff --git a/spec/frontend/whats_new/store/mutations_spec.js b/spec/frontend/whats_new/store/mutations_spec.js
new file mode 100644
index 00000000000..3c33364fed3
--- /dev/null
+++ b/spec/frontend/whats_new/store/mutations_spec.js
@@ -0,0 +1,25 @@
+import mutations from '~/whats_new/store/mutations';
+import createState from '~/whats_new/store/state';
+import * as types from '~/whats_new/store/mutation_types';
+
+describe('whats new mutations', () => {
+ let state;
+
+ beforeEach(() => {
+ state = createState;
+ });
+
+ describe('openDrawer', () => {
+ it('sets open to true', () => {
+ mutations[types.OPEN_DRAWER](state);
+ expect(state.open).toBe(true);
+ });
+ });
+
+ describe('closeDrawer', () => {
+ it('sets open to false', () => {
+ mutations[types.CLOSE_DRAWER](state);
+ expect(state.open).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend_integration/ide/ide_integration_spec.js b/spec/frontend_integration/ide/ide_integration_spec.js
index 7e8fb3a32ee..91d89c26ec1 100644
--- a/spec/frontend_integration/ide/ide_integration_spec.js
+++ b/spec/frontend_integration/ide/ide_integration_spec.js
@@ -8,93 +8,55 @@
*
* See https://gitlab.com/gitlab-org/gitlab/-/issues/208800 for more information.
*/
-import MockAdapter from 'axios-mock-adapter';
-import axios from '~/lib/utils/axios_utils';
+import { TEST_HOST } from 'helpers/test_constants';
+import { useOverclockTimers } from 'test_helpers/utils/overclock_timers';
import { initIde } from '~/ide';
-
-jest.mock('~/api', () => {
- return {
- project: jest.fn().mockImplementation(() => new Promise(() => {})),
- };
-});
-
-jest.mock('~/ide/services/gql', () => {
- return {
- query: jest.fn().mockImplementation(() => new Promise(() => {})),
- };
-});
+import extendStore from '~/ide/stores/extend';
+
+const TEST_DATASET = {
+ emptyStateSvgPath: '/test/empty_state.svg',
+ noChangesStateSvgPath: '/test/no_changes_state.svg',
+ committedStateSvgPath: '/test/committed_state.svg',
+ pipelinesEmptyStateSvgPath: '/test/pipelines_empty_state.svg',
+ promotionSvgPath: '/test/promotion.svg',
+ ciHelpPagePath: '/test/ci_help_page',
+ webIDEHelpPagePath: '/test/web_ide_help_page',
+ clientsidePreviewEnabled: 'true',
+ renderWhitespaceInCode: 'false',
+ codesandboxBundlerUrl: 'test/codesandbox_bundler',
+};
describe('WebIDE', () => {
+ useOverclockTimers();
+
let vm;
let root;
- let mock;
- let initData;
- let location;
beforeEach(() => {
root = document.createElement('div');
- initData = {
- emptyStateSvgPath: '/test/empty_state.svg',
- noChangesStateSvgPath: '/test/no_changes_state.svg',
- committedStateSvgPath: '/test/committed_state.svg',
- pipelinesEmptyStateSvgPath: '/test/pipelines_empty_state.svg',
- promotionSvgPath: '/test/promotion.svg',
- ciHelpPagePath: '/test/ci_help_page',
- webIDEHelpPagePath: '/test/web_ide_help_page',
- clientsidePreviewEnabled: 'true',
- renderWhitespaceInCode: 'false',
- codesandboxBundlerUrl: 'test/codesandbox_bundler',
- };
+ document.body.appendChild(root);
- mock = new MockAdapter(axios);
- mock.onAny('*').reply(() => new Promise(() => {}));
-
- location = { pathname: '/-/ide/project/gitlab-test/test', search: '', hash: '' };
- Object.defineProperty(window, 'location', {
- get() {
- return location;
- },
+ global.jsdom.reconfigure({
+ url: `${TEST_HOST}/-/ide/project/gitlab-test/lorem-ipsum`,
});
});
afterEach(() => {
vm.$destroy();
vm = null;
-
- mock.restore();
+ root.remove();
});
const createComponent = () => {
const el = document.createElement('div');
- Object.assign(el.dataset, initData);
+ Object.assign(el.dataset, TEST_DATASET);
root.appendChild(el);
- vm = initIde(el);
+ vm = initIde(el, { extendStore });
};
- expect.addSnapshotSerializer({
- test(value) {
- return value instanceof HTMLElement && !value.$_hit;
- },
- print(element, serialize) {
- element.$_hit = true;
- element.querySelectorAll('[style]').forEach(el => {
- el.$_hit = true;
- if (el.style.display === 'none') {
- el.textContent = '(jest: contents hidden)';
- }
- });
-
- return serialize(element)
- .replace(/^\s*<!---->$/gm, '')
- .replace(/\n\s*\n/gm, '\n');
- },
- });
-
it('runs', () => {
createComponent();
- return vm.$nextTick().then(() => {
- expect(root).toMatchSnapshot();
- });
+ expect(root).toMatchSnapshot();
});
});
diff --git a/spec/frontend_integration/test_helpers/factories/commit.js b/spec/frontend_integration/test_helpers/factories/commit.js
new file mode 100644
index 00000000000..1ee82e74ffe
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/factories/commit.js
@@ -0,0 +1,15 @@
+import { withValues } from '../utils/obj';
+import { getCommit } from '../fixtures';
+import { createCommitId } from './commit_id';
+
+// eslint-disable-next-line import/prefer-default-export
+export const createNewCommit = ({ id = createCommitId(), message }, orig = getCommit()) => {
+ return withValues(orig, {
+ id,
+ short_id: id.substr(0, 8),
+ message,
+ title: message,
+ web_url: orig.web_url.replace(orig.id, id),
+ parent_ids: [orig.id],
+ });
+};
diff --git a/spec/frontend_integration/test_helpers/factories/commit_id.js b/spec/frontend_integration/test_helpers/factories/commit_id.js
new file mode 100644
index 00000000000..9fa278c9dde
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/factories/commit_id.js
@@ -0,0 +1,21 @@
+const COMMIT_ID_LENGTH = 40;
+const DEFAULT_COMMIT_ID = Array(COMMIT_ID_LENGTH)
+ .fill('0')
+ .join('');
+
+export const createCommitId = (index = 0) =>
+ `${index}${DEFAULT_COMMIT_ID}`.substr(0, COMMIT_ID_LENGTH);
+
+export const createCommitIdGenerator = () => {
+ let prevCommitId = 0;
+
+ const next = () => {
+ prevCommitId += 1;
+
+ return createCommitId(prevCommitId);
+ };
+
+ return {
+ next,
+ };
+};
diff --git a/spec/frontend_integration/test_helpers/factories/index.js b/spec/frontend_integration/test_helpers/factories/index.js
new file mode 100644
index 00000000000..0f28830b236
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/factories/index.js
@@ -0,0 +1,2 @@
+export * from './commit';
+export * from './commit_id';
diff --git a/spec/frontend_integration/test_helpers/fixtures.js b/spec/frontend_integration/test_helpers/fixtures.js
new file mode 100644
index 00000000000..5f9c0e8dcba
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/fixtures.js
@@ -0,0 +1,10 @@
+/* eslint-disable global-require */
+import { memoize } from 'lodash';
+
+export const getProject = () => require('test_fixtures/api/projects/get.json');
+export const getBranch = () => require('test_fixtures/api/projects/branches/get.json');
+export const getMergeRequests = () => require('test_fixtures/api/merge_requests/get.json');
+export const getRepositoryFiles = () => require('test_fixtures/projects_json/files.json');
+export const getPipelinesEmptyResponse = () =>
+ require('test_fixtures/projects_json/pipelines_empty.json');
+export const getCommit = memoize(() => getBranch().commit);
diff --git a/spec/frontend_integration/test_helpers/mock_server/graphql.js b/spec/frontend_integration/test_helpers/mock_server/graphql.js
new file mode 100644
index 00000000000..6dcc4798378
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/mock_server/graphql.js
@@ -0,0 +1,21 @@
+import { buildSchema, graphql } from 'graphql';
+import gitlabSchemaStr from '../../../../doc/api/graphql/reference/gitlab_schema.graphql';
+
+const graphqlSchema = buildSchema(gitlabSchemaStr.loc.source.body);
+const graphqlResolvers = {
+ project({ fullPath }, schema) {
+ const result = schema.projects.findBy({ path_with_namespace: fullPath });
+ const userPermission = schema.db.userPermissions[0];
+
+ return {
+ ...result.attrs,
+ userPermissions: {
+ ...userPermission,
+ },
+ };
+ },
+};
+
+// eslint-disable-next-line import/prefer-default-export
+export const graphqlQuery = (query, variables, schema) =>
+ graphql(graphqlSchema, query, graphqlResolvers, schema, variables);
diff --git a/spec/frontend_integration/test_helpers/mock_server/index.js b/spec/frontend_integration/test_helpers/mock_server/index.js
new file mode 100644
index 00000000000..b3979d05ea5
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/mock_server/index.js
@@ -0,0 +1,45 @@
+import { Server, Model, RestSerializer } from 'miragejs';
+import { getProject, getBranch, getMergeRequests, getRepositoryFiles } from 'test_helpers/fixtures';
+import setupRoutes from './routes';
+
+export const createMockServerOptions = () => ({
+ models: {
+ project: Model,
+ branch: Model,
+ mergeRequest: Model,
+ file: Model,
+ userPermission: Model,
+ },
+ serializers: {
+ application: RestSerializer.extend({
+ root: false,
+ }),
+ },
+ seeds(schema) {
+ schema.db.loadData({
+ files: getRepositoryFiles().map(path => ({ path })),
+ projects: [getProject()],
+ branches: [getBranch()],
+ mergeRequests: getMergeRequests(),
+ userPermissions: [
+ {
+ createMergeRequestIn: true,
+ readMergeRequest: true,
+ pushCode: true,
+ },
+ ],
+ });
+ },
+ routes() {
+ this.namespace = '';
+ this.urlPrefix = '/';
+
+ setupRoutes(this);
+ },
+});
+
+export const createMockServer = () => {
+ const server = new Server(createMockServerOptions());
+
+ return server;
+};
diff --git a/spec/frontend_integration/test_helpers/mock_server/routes/404.js b/spec/frontend_integration/test_helpers/mock_server/routes/404.js
new file mode 100644
index 00000000000..9e08016577b
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/mock_server/routes/404.js
@@ -0,0 +1,7 @@
+export default server => {
+ ['get', 'post', 'put', 'delete', 'patch'].forEach(method => {
+ server[method]('*', () => {
+ return new Response(404);
+ });
+ });
+};
diff --git a/spec/frontend_integration/test_helpers/mock_server/routes/ci.js b/spec/frontend_integration/test_helpers/mock_server/routes/ci.js
new file mode 100644
index 00000000000..83951f09c56
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/mock_server/routes/ci.js
@@ -0,0 +1,11 @@
+import { getPipelinesEmptyResponse } from 'test_helpers/fixtures';
+
+export default server => {
+ server.get('*/commit/:id/pipelines', () => {
+ return getPipelinesEmptyResponse();
+ });
+
+ server.get('/api/v4/projects/:id/runners', () => {
+ return [];
+ });
+};
diff --git a/spec/frontend_integration/test_helpers/mock_server/routes/graphql.js b/spec/frontend_integration/test_helpers/mock_server/routes/graphql.js
new file mode 100644
index 00000000000..ebb5415ba97
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/mock_server/routes/graphql.js
@@ -0,0 +1,11 @@
+import { graphqlQuery } from '../graphql';
+
+export default server => {
+ server.post('/api/graphql', (schema, request) => {
+ const batches = JSON.parse(request.requestBody);
+
+ return Promise.all(
+ batches.map(({ query, variables }) => graphqlQuery(query, variables, schema)),
+ );
+ });
+};
diff --git a/spec/frontend_integration/test_helpers/mock_server/routes/index.js b/spec/frontend_integration/test_helpers/mock_server/routes/index.js
new file mode 100644
index 00000000000..eea196b5158
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/mock_server/routes/index.js
@@ -0,0 +1,12 @@
+/* eslint-disable global-require */
+export default server => {
+ [
+ require('./graphql'),
+ require('./projects'),
+ require('./repository'),
+ require('./ci'),
+ require('./404'),
+ ].forEach(({ default: setup }) => {
+ setup(server);
+ });
+};
diff --git a/spec/frontend_integration/test_helpers/mock_server/routes/projects.js b/spec/frontend_integration/test_helpers/mock_server/routes/projects.js
new file mode 100644
index 00000000000..f4d8ce4b23d
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/mock_server/routes/projects.js
@@ -0,0 +1,23 @@
+import { withKeys } from 'test_helpers/utils/obj';
+
+export default server => {
+ server.get('/api/v4/projects/:id', (schema, request) => {
+ const { id } = request.params;
+
+ const proj =
+ schema.projects.findBy({ id }) ?? schema.projects.findBy({ path_with_namespace: id });
+
+ return proj.attrs;
+ });
+
+ server.get('/api/v4/projects/:id/merge_requests', (schema, request) => {
+ const result = schema.mergeRequests.where(
+ withKeys(request.queryParams, {
+ source_project_id: 'project_id',
+ source_branch: 'source_branch',
+ }),
+ );
+
+ return result.models;
+ });
+};
diff --git a/spec/frontend_integration/test_helpers/mock_server/routes/repository.js b/spec/frontend_integration/test_helpers/mock_server/routes/repository.js
new file mode 100644
index 00000000000..c5e91c9e87e
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/mock_server/routes/repository.js
@@ -0,0 +1,38 @@
+import { createNewCommit, createCommitIdGenerator } from 'test_helpers/factories';
+
+export default server => {
+ const commitIdGenerator = createCommitIdGenerator();
+
+ server.get('/api/v4/projects/:id/repository/branches', schema => {
+ return schema.db.branches;
+ });
+
+ server.get('/api/v4/projects/:id/repository/branches/:name', (schema, request) => {
+ const { name } = request.params;
+
+ const branch = schema.branches.findBy({ name });
+
+ return branch.attrs;
+ });
+
+ server.get('*/-/files/:id', schema => {
+ return schema.db.files.map(({ path }) => path);
+ });
+
+ server.post('/api/v4/projects/:id/repository/commits', (schema, request) => {
+ const { branch: branchName, commit_message: message, actions } = JSON.parse(
+ request.requestBody,
+ );
+
+ const branch = schema.branches.findBy({ name: branchName });
+
+ const commit = {
+ ...createNewCommit({ id: commitIdGenerator.next(), message }, branch.attrs.commit),
+ __actions: actions,
+ };
+
+ branch.update({ commit });
+
+ return commit;
+ });
+};
diff --git a/spec/frontend_integration/test_helpers/mock_server/use.js b/spec/frontend_integration/test_helpers/mock_server/use.js
new file mode 100644
index 00000000000..84597d57584
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/mock_server/use.js
@@ -0,0 +1,5 @@
+import { createMockServer } from './index';
+
+if (process.env.NODE_ENV === 'development') {
+ window.mockServer = createMockServer();
+}
diff --git a/spec/frontend_integration/test_helpers/setup/index.js b/spec/frontend_integration/test_helpers/setup/index.js
new file mode 100644
index 00000000000..ba1d256e16e
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/setup/index.js
@@ -0,0 +1,5 @@
+import '../../../frontend/test_setup';
+import './setup_globals';
+import './setup_axios';
+import './setup_serializers';
+import './setup_mock_server';
diff --git a/spec/frontend_integration/test_helpers/setup/setup_axios.js b/spec/frontend_integration/test_helpers/setup/setup_axios.js
new file mode 100644
index 00000000000..efdaf8016f2
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/setup/setup_axios.js
@@ -0,0 +1,5 @@
+import adapter from 'axios/lib/adapters/xhr';
+import axios from '~/lib/utils/axios_utils';
+
+// We're removing our default axios adapter because this is handled by our mock server now
+axios.defaults.adapter = adapter;
diff --git a/spec/frontend_integration/test_helpers/setup/setup_globals.js b/spec/frontend_integration/test_helpers/setup/setup_globals.js
new file mode 100644
index 00000000000..2b0e8f76c3c
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/setup/setup_globals.js
@@ -0,0 +1,15 @@
+import { setTestTimeout } from 'helpers/timeout';
+
+beforeEach(() => {
+ window.gon = {
+ api_version: 'v4',
+ relative_url_root: '',
+ };
+
+ setTestTimeout(5000);
+ jest.useRealTimers();
+});
+
+afterEach(() => {
+ jest.useFakeTimers();
+});
diff --git a/spec/frontend_integration/test_helpers/setup/setup_mock_server.js b/spec/frontend_integration/test_helpers/setup/setup_mock_server.js
new file mode 100644
index 00000000000..343aeebf88e
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/setup/setup_mock_server.js
@@ -0,0 +1,13 @@
+import { createMockServer } from '../mock_server';
+
+beforeEach(() => {
+ const server = createMockServer();
+ server.logging = false;
+
+ global.mockServer = server;
+});
+
+afterEach(() => {
+ global.mockServer.shutdown();
+ global.mockServer = null;
+});
diff --git a/spec/frontend_integration/test_helpers/setup/setup_serializers.js b/spec/frontend_integration/test_helpers/setup/setup_serializers.js
new file mode 100644
index 00000000000..6c1de853129
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/setup/setup_serializers.js
@@ -0,0 +1,3 @@
+import defaultSerializer from '../snapshot_serializer';
+
+expect.addSnapshotSerializer(defaultSerializer);
diff --git a/spec/frontend_integration/test_helpers/snapshot_serializer.js b/spec/frontend_integration/test_helpers/snapshot_serializer.js
new file mode 100644
index 00000000000..8c4f95a9156
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/snapshot_serializer.js
@@ -0,0 +1,18 @@
+export default {
+ test(value) {
+ return value instanceof HTMLElement && !value.$_hit;
+ },
+ print(element, serialize) {
+ element.$_hit = true;
+ element.querySelectorAll('[style]').forEach(el => {
+ el.$_hit = true;
+ if (el.style.display === 'none') {
+ el.textContent = '(jest: contents hidden)';
+ }
+ });
+
+ return serialize(element)
+ .replace(/^\s*<!---->$/gm, '')
+ .replace(/\n\s*\n/gm, '\n');
+ },
+};
diff --git a/spec/frontend_integration/test_helpers/utils/obj.js b/spec/frontend_integration/test_helpers/utils/obj.js
new file mode 100644
index 00000000000..6c301798489
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/utils/obj.js
@@ -0,0 +1,36 @@
+import { has, mapKeys, pick } from 'lodash';
+
+/**
+ * This method is used to type-safely set values on the given object
+ *
+ * @template T
+ * @returns {T} A shallow copy of `obj`, with the values from `values`
+ * @throws {Error} If `values` contains a key that isn't already on `obj`
+ * @param {T} source
+ * @param {Object} values
+ */
+export const withValues = (source, values) =>
+ Object.entries(values).reduce(
+ (acc, [key, value]) => {
+ if (!has(acc, key)) {
+ throw new Error(
+ `[mock_server] Cannot write property that does not exist on object '${key}'`,
+ );
+ }
+
+ return {
+ ...acc,
+ [key]: value,
+ };
+ },
+ { ...source },
+ );
+
+/**
+ * This method returns a subset of the given object and maps the key names based on the
+ * given `keys`.
+ *
+ * @param {Object} obj The source object.
+ * @param {Object} map The object which contains the keys to use and mapped key names.
+ */
+export const withKeys = (obj, map) => mapKeys(pick(obj, Object.keys(map)), (val, key) => map[key]);
diff --git a/spec/frontend_integration/test_helpers/utils/obj_spec.js b/spec/frontend_integration/test_helpers/utils/obj_spec.js
new file mode 100644
index 00000000000..0ad7b4a1a4c
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/utils/obj_spec.js
@@ -0,0 +1,23 @@
+import { withKeys, withValues } from './obj';
+
+describe('frontend_integration/test_helpers/utils/obj', () => {
+ describe('withKeys', () => {
+ it('picks and maps keys', () => {
+ expect(withKeys({ a: '123', b: 456, c: 'd' }, { b: 'lorem', c: 'ipsum', z: 'zed ' })).toEqual(
+ { lorem: 456, ipsum: 'd' },
+ );
+ });
+ });
+
+ describe('withValues', () => {
+ it('sets values', () => {
+ expect(withValues({ a: '123', b: 456 }, { b: 789 })).toEqual({ a: '123', b: 789 });
+ });
+
+ it('throws if values has non-existent key', () => {
+ expect(() => withValues({ a: '123', b: 456 }, { b: 789, bogus: 'throws' })).toThrow(
+ `[mock_server] Cannot write property that does not exist on object 'bogus'`,
+ );
+ });
+ });
+});
diff --git a/spec/frontend_integration/test_helpers/utils/overclock_timers.js b/spec/frontend_integration/test_helpers/utils/overclock_timers.js
new file mode 100644
index 00000000000..046c7f8e527
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/utils/overclock_timers.js
@@ -0,0 +1,65 @@
+/**
+ * This function replaces the existing `setTimeout` and `setInterval` with wrappers that
+ * discount the `ms` passed in by `boost`.
+ *
+ * For example, if a module has:
+ *
+ * ```
+ * setTimeout(cb, 100);
+ * ```
+ *
+ * But a test has:
+ *
+ * ```
+ * useOverclockTimers(25);
+ * ```
+ *
+ * Then the module's call to `setTimeout` effectively becomes:
+ *
+ * ```
+ * setTimeout(cb, 4);
+ * ```
+ *
+ * It's important to note that the timing for `setTimeout` and order of execution is non-deterministic
+ * and discounting the `ms` passed could make this very obvious and expose some underlying issues
+ * with flaky failures.
+ *
+ * WARNING: If flaky spec failures show up in a spec that is using this helper, please consider either:
+ *
+ * - Refactoring the production code so that it's reactive to state changes, not dependent on timers.
+ * - Removing the call to this helper from the spec.
+ *
+ * @param {Number} boost
+ */
+// eslint-disable-next-line import/prefer-default-export
+export const useOverclockTimers = (boost = 50) => {
+ if (boost <= 0) {
+ throw new Error(`[overclock_timers] boost (${boost}) cannot be <= 0`);
+ }
+
+ let origSetTimeout;
+ let origSetInterval;
+ const newSetTimeout = (fn, msParam = 0) => {
+ const ms = msParam > 0 ? Math.floor(msParam / boost) : msParam;
+
+ return origSetTimeout(fn, ms);
+ };
+ const newSetInterval = (fn, msParam = 0) => {
+ const ms = msParam > 0 ? Math.floor(msParam / boost) : msParam;
+
+ return origSetInterval(fn, ms);
+ };
+
+ beforeEach(() => {
+ origSetTimeout = global.setTimeout;
+ origSetInterval = global.setInterval;
+
+ global.setTimeout = newSetTimeout;
+ global.setInterval = newSetInterval;
+ });
+
+ afterEach(() => {
+ global.setTimeout = origSetTimeout;
+ global.setInterval = origSetInterval;
+ });
+};
diff --git a/spec/frontend_integration/test_setup.js b/spec/frontend_integration/test_setup.js
new file mode 100644
index 00000000000..8db22c56245
--- /dev/null
+++ b/spec/frontend_integration/test_setup.js
@@ -0,0 +1 @@
+import './test_helpers/setup';
diff --git a/spec/graphql/features/authorization_spec.rb b/spec/graphql/features/authorization_spec.rb
index 6e5a8b9f4be..e40c44925e2 100644
--- a/spec/graphql/features/authorization_spec.rb
+++ b/spec/graphql/features/authorization_spec.rb
@@ -257,6 +257,7 @@ RSpec.describe 'Gitlab::Graphql::Authorization' do
type.field :id, GraphQL::ID_TYPE, null: false
end
end
+
let(:project_type) do |type|
type_factory do |type|
type.graphql_name 'FakeProjectType'
@@ -264,11 +265,13 @@ RSpec.describe 'Gitlab::Graphql::Authorization' do
resolve: -> (_, _, _) { Issue.where(project: [visible_project, other_project]).order(id: :asc) }
end
end
+
let(:query_type) do
query_factory do |query|
query.field :test_project, project_type, null: false, resolve: -> (_, _, _) { visible_project }
end
end
+
let(:query_string) do
<<~QRY
{ testProject { testIssues(first: 3) { edges { node { id } } } } }
diff --git a/spec/graphql/mutations/boards/issues/issue_move_list_spec.rb b/spec/graphql/mutations/boards/issues/issue_move_list_spec.rb
new file mode 100644
index 00000000000..71c43ed826c
--- /dev/null
+++ b/spec/graphql/mutations/boards/issues/issue_move_list_spec.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Boards::Issues::IssueMoveList do
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:board) { create(:board, group: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:development) { create(:label, project: project, name: 'Development') }
+ let_it_be(:testing) { create(:label, project: project, name: 'Testing') }
+ let_it_be(:list1) { create(:list, board: board, label: development, position: 0) }
+ let_it_be(:list2) { create(:list, board: board, label: testing, position: 1) }
+ let_it_be(:issue1) { create(:labeled_issue, project: project, labels: [development]) }
+ let_it_be(:existing_issue1) { create(:labeled_issue, project: project, labels: [testing], relative_position: 10) }
+ let_it_be(:existing_issue2) { create(:labeled_issue, project: project, labels: [testing], relative_position: 50) }
+
+ let(:current_user) { user }
+ let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
+ let(:params) { { board: board, project_path: project.full_path, iid: issue1.iid } }
+ let(:move_params) do
+ {
+ from_list_id: list1.id,
+ to_list_id: list2.id,
+ move_before_id: existing_issue2.id,
+ move_after_id: existing_issue1.id
+ }
+ end
+
+ before_all do
+ group.add_maintainer(user)
+ group.add_guest(guest)
+ end
+
+ subject do
+ mutation.resolve(params.merge(move_params))
+ end
+
+ describe '#ready?' do
+ it 'raises an error if required arguments are missing' do
+ expect { mutation.ready?(params) }
+ .to raise_error(Gitlab::Graphql::Errors::ArgumentError, "At least one of the arguments " \
+ "fromListId, toListId, afterId or beforeId is required")
+ end
+
+ it 'raises an error if only one of fromListId and toListId is present' do
+ expect { mutation.ready?(params.merge(from_list_id: list1.id)) }
+ .to raise_error(Gitlab::Graphql::Errors::ArgumentError,
+ 'Both fromListId and toListId must be present'
+ )
+ end
+ end
+
+ describe '#resolve' do
+ context 'when user have access to resources' do
+ it 'moves and repositions issue' do
+ subject
+
+ expect(issue1.reload.labels).to eq([testing])
+ expect(issue1.relative_position).to be < existing_issue2.relative_position
+ expect(issue1.relative_position).to be > existing_issue1.relative_position
+ end
+ end
+
+ context 'when user have no access to resources' do
+ shared_examples 'raises a resource not available error' do
+ it { expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable) }
+ end
+
+ context 'when user cannot update issue' do
+ let(:current_user) { guest }
+
+ it_behaves_like 'raises a resource not available error'
+ end
+
+ context 'when user cannot access board' do
+ let(:board) { create(:board, group: create(:group, :private)) }
+
+ it_behaves_like 'raises a resource not available error'
+ end
+
+ context 'when passing board_id as nil' do
+ let(:board) { nil }
+
+ it_behaves_like 'raises a resource not available error'
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/boards/lists/create_spec.rb b/spec/graphql/mutations/boards/lists/create_spec.rb
new file mode 100644
index 00000000000..1a881ac81e8
--- /dev/null
+++ b/spec/graphql/mutations/boards/lists/create_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Boards::Lists::Create do
+ include GraphqlHelpers
+
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:board) { create(:board, group: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+
+ let(:current_user) { user }
+ let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
+ let(:list_create_params) { {} }
+
+ before_all do
+ group.add_reporter(user)
+ group.add_guest(guest)
+ end
+
+ subject { mutation.resolve(board_id: board.to_global_id.to_s, **list_create_params) }
+
+ describe '#ready?' do
+ it 'raises an error if required arguments are missing' do
+ expect { mutation.ready?({ board_id: 'some id' }) }
+ .to raise_error(Gitlab::Graphql::Errors::ArgumentError,
+ 'one and only one of backlog or labelId is required')
+ end
+
+ it 'raises an error if too many required arguments are specified' do
+ expect { mutation.ready?({ board_id: 'some id', backlog: true, label_id: 'some label' }) }
+ .to raise_error(Gitlab::Graphql::Errors::ArgumentError,
+ 'one and only one of backlog or labelId is required')
+ end
+ end
+
+ describe '#resolve' do
+ context 'with proper permissions' do
+ describe 'backlog list' do
+ let(:list_create_params) { { backlog: true } }
+
+ it 'creates one and only one backlog' do
+ expect { subject }.to change { board.lists.backlog.count }.from(0).to(1)
+ expect(board.lists.backlog.first.list_type).to eq 'backlog'
+
+ backlog_id = board.lists.backlog.first.id
+
+ expect { subject }.not_to change { board.lists.backlog.count }
+ expect(board.lists.backlog.last.id).to eq backlog_id
+ end
+ end
+
+ describe 'label list' do
+ let_it_be(:dev_label) do
+ create(:group_label, title: 'Development', color: '#FFAABB', group: group)
+ end
+
+ let(:list_create_params) { { label_id: dev_label.to_global_id.to_s } }
+
+ it 'creates a new issue board list for labels' do
+ expect { subject }.to change { board.lists.count }.from(1).to(2)
+
+ new_list = subject[:list]
+
+ expect(new_list.title).to eq dev_label.title
+ expect(new_list.position).to eq 0
+ end
+ end
+ end
+
+ context 'without proper permissions' do
+ let(:current_user) { guest }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/boards/lists/update_spec.rb b/spec/graphql/mutations/boards/lists/update_spec.rb
new file mode 100644
index 00000000000..d5d8a2af6bf
--- /dev/null
+++ b/spec/graphql/mutations/boards/lists/update_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Boards::Lists::Update do
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:board) { create(:board, group: group) }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:list) { create(:list, board: board, position: 0) }
+ let_it_be(:list2) { create(:list, board: board) }
+ let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
+ let(:list_update_params) { { position: 1, collapsed: true } }
+
+ before_all do
+ group.add_reporter(reporter)
+ group.add_guest(guest)
+ list.update_preferences_for(reporter, collapsed: false)
+ end
+
+ subject { mutation.resolve(list: list, **list_update_params) }
+
+ describe '#resolve' do
+ context 'with permission to admin board lists' do
+ let(:current_user) { reporter }
+
+ it 'updates the list position and collapsed state as expected' do
+ subject
+
+ reloaded_list = list.reload
+ expect(reloaded_list.position).to eq(1)
+ expect(reloaded_list.collapsed?(current_user)).to eq(true)
+ end
+ end
+
+ context 'with permission to read board lists' do
+ let(:current_user) { guest }
+
+ it 'updates the list collapsed state but not the list position' do
+ subject
+
+ reloaded_list = list.reload
+ expect(reloaded_list.position).to eq(0)
+ expect(reloaded_list.collapsed?(current_user)).to eq(true)
+ end
+ end
+
+ context 'without permission to read board lists' do
+ let(:current_user) { create(:user) }
+
+ it 'raises Resource Not Found error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/commits/create_spec.rb b/spec/graphql/mutations/commits/create_spec.rb
index bb0b8c577b0..fb1baafe7bd 100644
--- a/spec/graphql/mutations/commits/create_spec.rb
+++ b/spec/graphql/mutations/commits/create_spec.rb
@@ -147,7 +147,7 @@ RSpec.describe Mutations::Commits::Create do
it 'returns errors' do
expect(mutated_commit).to be_nil
- expect(subject[:errors]).to eq(['3:UserCommitFiles: empty CommitMessage'])
+ expect(subject[:errors].to_s).to match(/3:UserCommitFiles: empty CommitMessage/)
end
end
diff --git a/spec/graphql/mutations/design_management/move_spec.rb b/spec/graphql/mutations/design_management/move_spec.rb
new file mode 100644
index 00000000000..7519347d07c
--- /dev/null
+++ b/spec/graphql/mutations/design_management/move_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::DesignManagement::Move do
+ include DesignManagementTestHelpers
+
+ let_it_be(:issue) { create(:issue) }
+ let_it_be(:designs) { create_list(:design, 3, issue: issue) }
+ let_it_be(:developer) { create(:user, developer_projects: [issue.project]) }
+
+ let(:user) { developer }
+
+ let(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
+
+ let(:current_design) { designs.first }
+ let(:previous_design) { designs.second }
+ let(:next_design) { designs.third }
+
+ before do
+ enable_design_management
+ end
+
+ describe "#resolve" do
+ subject(:resolve) do
+ args = {
+ current_design: current_design.to_global_id,
+ previous_design: previous_design&.to_global_id,
+ next_design: next_design&.to_global_id
+ }.compact
+
+ mutation.resolve(args)
+ end
+
+ shared_examples "resource not available" do
+ it "raises an error" do
+ expect { resolve }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'when the feature is not available' do
+ before do
+ enable_design_management(false)
+ end
+
+ it_behaves_like 'resource not available'
+ end
+
+ %i[current_design previous_design next_design].each do |binding|
+ context "When #{binding} cannot be found" do
+ let(binding) { build_stubbed(:design) }
+
+ it_behaves_like 'resource not available'
+ end
+ end
+
+ context 'the service runs' do
+ before do
+ expect_next_instance_of(::DesignManagement::MoveDesignsService) do |service|
+ expect(service).to receive(:execute).and_return(service_result)
+ end
+ end
+
+ context 'raising an error' do
+ let(:service_result) { ServiceResponse.error(message: 'bang!') }
+
+ it 'reports the service-level error' do
+ expect(resolve).to include(errors: ['bang!'], design_collection: eq(issue.design_collection))
+ end
+ end
+
+ context 'successfully' do
+ let(:service_result) { ServiceResponse.success }
+
+ it 'reports the service-level error' do
+ expect(resolve).to include(errors: be_empty, design_collection: eq(issue.design_collection))
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/issues/set_assignees_spec.rb b/spec/graphql/mutations/issues/set_assignees_spec.rb
new file mode 100644
index 00000000000..77ba511b715
--- /dev/null
+++ b/spec/graphql/mutations/issues/set_assignees_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Issues::SetAssignees do
+ it_behaves_like 'an assignable resource' do
+ let_it_be(:resource, reload: true) { create(:issue) }
+ end
+end
diff --git a/spec/graphql/mutations/issues/set_subscription_spec.rb b/spec/graphql/mutations/issues/set_subscription_spec.rb
new file mode 100644
index 00000000000..9e05a136c0b
--- /dev/null
+++ b/spec/graphql/mutations/issues/set_subscription_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Issues::SetSubscription do
+ it_behaves_like 'a subscribeable graphql resource' do
+ let_it_be(:resource) { create(:issue) }
+ let(:permission_name) { :update_issue }
+ end
+end
diff --git a/spec/graphql/mutations/issues/update_spec.rb b/spec/graphql/mutations/issues/update_spec.rb
index 9a847476e2e..15c15afd9b7 100644
--- a/spec/graphql/mutations/issues/update_spec.rb
+++ b/spec/graphql/mutations/issues/update_spec.rb
@@ -3,16 +3,23 @@
require 'spec_helper'
RSpec.describe Mutations::Issues::Update do
- let(:issue) { create(:issue) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project_label) { create(:label, project: project) }
+ let_it_be(:issue) { create(:issue, project: project, labels: [project_label]) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
+
let(:expected_attributes) do
{
title: 'new title',
description: 'new description',
confidential: true,
- due_date: Date.tomorrow
+ due_date: Date.tomorrow,
+ discussion_locked: true,
+ milestone_id: milestone.id
}
end
+
let(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
let(:mutated_issue) { subject[:issue] }
@@ -21,20 +28,22 @@ RSpec.describe Mutations::Issues::Update do
describe '#resolve' do
let(:mutation_params) do
{
- project_path: issue.project.full_path,
+ project_path: project.full_path,
iid: issue.iid
}.merge(expected_attributes)
end
subject { mutation.resolve(mutation_params) }
- it 'raises an error if the resource is not accessible to the user' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ context 'when the user cannot access the issue' do
+ it 'raises an error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
end
context 'when the user can update the issue' do
before do
- issue.project.add_developer(user)
+ project.add_developer(user)
end
it 'updates issue with correct values' do
@@ -50,6 +59,62 @@ RSpec.describe Mutations::Issues::Update do
expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
+
+ context 'when setting milestone to nil' do
+ let(:expected_attributes) { { milestone_id: nil } }
+
+ it 'changes the milestone corrrectly' do
+ issue.update_column(:milestone_id, milestone.id)
+
+ expect { subject }.to change { issue.reload.milestone }.from(milestone).to(nil)
+ end
+ end
+
+ context 'when changing labels' do
+ let_it_be(:label_1) { create(:label, project: project) }
+ let_it_be(:label_2) { create(:label, project: project) }
+ let_it_be(:external_label) { create(:label, project: create(:project)) }
+
+ it 'adds and removes labels correctly' do
+ mutation_params[:add_label_ids] = [label_1.id, label_2.id]
+ mutation_params[:remove_label_ids] = [project_label.id]
+
+ subject
+
+ expect(issue.reload.labels).to match_array([label_1, label_2])
+ end
+
+ it 'does not add label if label id is nil' do
+ mutation_params[:add_label_ids] = [nil, label_2.id]
+
+ subject
+
+ expect(issue.reload.labels).to match_array([project_label, label_2])
+ end
+
+ it 'does not add label if label is not found' do
+ mutation_params[:add_label_ids] = [external_label.id, label_2.id]
+
+ subject
+
+ expect(issue.reload.labels).to match_array([project_label, label_2])
+ end
+
+ it 'does not modify labels if label is already present' do
+ mutation_params[:add_label_ids] = [project_label.id]
+
+ expect(issue.reload.labels).to match_array([project_label])
+ end
+
+ it 'does not modify labels if label is addded and removed in the same request' do
+ mutation_params[:add_label_ids] = [label_1.id, label_2.id]
+ mutation_params[:remove_label_ids] = [label_1.id]
+
+ subject
+
+ expect(issue.reload.labels).to match_array([project_label, label_2])
+ end
+ end
end
end
end
diff --git a/spec/graphql/mutations/merge_requests/create_spec.rb b/spec/graphql/mutations/merge_requests/create_spec.rb
index ae31790f1f9..ba0ac3cbe66 100644
--- a/spec/graphql/mutations/merge_requests/create_spec.rb
+++ b/spec/graphql/mutations/merge_requests/create_spec.rb
@@ -22,7 +22,8 @@ RSpec.describe Mutations::MergeRequests::Create do
title: title,
source_branch: source_branch,
target_branch: target_branch,
- description: description
+ description: description,
+ labels: labels
)
end
@@ -30,6 +31,7 @@ RSpec.describe Mutations::MergeRequests::Create do
let(:source_branch) { 'feature' }
let(:target_branch) { 'master' }
let(:description) { nil }
+ let(:labels) { nil }
let(:mutated_merge_request) { subject[:merge_request] }
@@ -70,6 +72,15 @@ RSpec.describe Mutations::MergeRequests::Create do
end
end
+ context 'when optional labels field is set' do
+ let(:labels) { %w[label-1 label-2] }
+
+ it 'returns a new merge request with labels' do
+ expect(mutated_merge_request.labels.map(&:title)).to eq(labels)
+ expect(subject[:errors]).to be_empty
+ end
+ end
+
context 'when service cannot create a merge request' do
let(:title) { nil }
diff --git a/spec/graphql/mutations/merge_requests/set_assignees_spec.rb b/spec/graphql/mutations/merge_requests/set_assignees_spec.rb
index 0e7abb849c4..4ac40fc09c6 100644
--- a/spec/graphql/mutations/merge_requests/set_assignees_spec.rb
+++ b/spec/graphql/mutations/merge_requests/set_assignees_spec.rb
@@ -3,106 +3,7 @@
require 'spec_helper'
RSpec.describe Mutations::MergeRequests::SetAssignees do
- let(:merge_request) { create(:merge_request) }
- let(:user) { create(:user) }
-
- subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
-
- describe '#resolve' do
- let(:assignee) { create(:user) }
- let(:assignee2) { create(:user) }
- let(:assignee_usernames) { [assignee.username] }
- let(:mutated_merge_request) { subject[:merge_request] }
-
- subject { mutation.resolve(project_path: merge_request.project.full_path, iid: merge_request.iid, assignee_usernames: assignee_usernames) }
-
- before do
- merge_request.project.add_developer(assignee)
- merge_request.project.add_developer(assignee2)
- end
-
- it 'raises an error if the resource is not accessible to the user' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
- end
-
- context 'when the user can update the merge request' do
- before do
- merge_request.project.add_developer(user)
- end
-
- it 'replaces the assignee' do
- merge_request.assignees = [assignee2]
- merge_request.save!
-
- expect(mutated_merge_request).to eq(merge_request)
- expect(mutated_merge_request.assignees).to contain_exactly(assignee)
- expect(subject[:errors]).to be_empty
- end
-
- it 'returns errors merge request could not be updated' do
- # Make the merge request invalid
- merge_request.allow_broken = true
- merge_request.update!(source_project: nil)
-
- expect(subject[:errors]).not_to be_empty
- end
-
- context 'when passing an empty assignee list' do
- let(:assignee_usernames) { [] }
-
- before do
- merge_request.assignees = [assignee]
- merge_request.save!
- end
-
- it 'removes all assignees' do
- expect(mutated_merge_request).to eq(merge_request)
- expect(mutated_merge_request.assignees).to eq([])
- expect(subject[:errors]).to be_empty
- end
- end
-
- context 'when passing "append" as true' do
- subject { mutation.resolve(project_path: merge_request.project.full_path, iid: merge_request.iid, assignee_usernames: assignee_usernames, operation_mode: Types::MutationOperationModeEnum.enum[:append]) }
-
- before do
- merge_request.assignees = [assignee2]
- merge_request.save!
-
- # In CE, APPEND is a NOOP as you can't have multiple assignees
- # We test multiple assignment in EE specs
- stub_licensed_features(multiple_merge_request_assignees: false)
- end
-
- it 'is a NO-OP in FOSS' do
- expect(mutated_merge_request).to eq(merge_request)
- expect(mutated_merge_request.assignees).to contain_exactly(assignee2)
- expect(subject[:errors]).to be_empty
- end
- end
-
- context 'when passing "remove" as true' do
- before do
- merge_request.assignees = [assignee]
- merge_request.save!
- end
-
- it 'removes named assignee' do
- mutated_merge_request = mutation.resolve(project_path: merge_request.project.full_path, iid: merge_request.iid, assignee_usernames: assignee_usernames, operation_mode: Types::MutationOperationModeEnum.enum[:remove])[:merge_request]
-
- expect(mutated_merge_request).to eq(merge_request)
- expect(mutated_merge_request.assignees).to eq([])
- expect(subject[:errors]).to be_empty
- end
-
- it 'does not remove unnamed assignee' do
- mutated_merge_request = mutation.resolve(project_path: merge_request.project.full_path, iid: merge_request.iid, assignee_usernames: [assignee2.username], operation_mode: Types::MutationOperationModeEnum.enum[:remove])[:merge_request]
-
- expect(mutated_merge_request).to eq(merge_request)
- expect(mutated_merge_request.assignees).to contain_exactly(assignee)
- expect(subject[:errors]).to be_empty
- end
- end
- end
+ it_behaves_like 'an assignable resource' do
+ let_it_be(:resource, reload: true) { create(:merge_request) }
end
end
diff --git a/spec/graphql/mutations/merge_requests/set_subscription_spec.rb b/spec/graphql/mutations/merge_requests/set_subscription_spec.rb
index 20cfed9dd3d..600053637c9 100644
--- a/spec/graphql/mutations/merge_requests/set_subscription_spec.rb
+++ b/spec/graphql/mutations/merge_requests/set_subscription_spec.rb
@@ -3,44 +3,8 @@
require 'spec_helper'
RSpec.describe Mutations::MergeRequests::SetSubscription do
- let(:merge_request) { create(:merge_request) }
- let(:project) { merge_request.project }
- let(:user) { create(:user) }
-
- subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
-
- specify { expect(described_class).to require_graphql_authorizations(:update_merge_request) }
-
- describe '#resolve' do
- let(:subscribe) { true }
- let(:mutated_merge_request) { subject[:merge_request] }
-
- subject { mutation.resolve(project_path: merge_request.project.full_path, iid: merge_request.iid, subscribed_state: subscribe) }
-
- it 'raises an error if the resource is not accessible to the user' do
- expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
- end
-
- context 'when the user can update the merge request' do
- before do
- merge_request.project.add_developer(user)
- end
-
- it 'returns the merge request as discussion locked' do
- expect(mutated_merge_request).to eq(merge_request)
- expect(mutated_merge_request.subscribed?(user, project)).to eq(true)
- expect(subject[:errors]).to be_empty
- end
-
- context 'when passing subscribe as false' do
- let(:subscribe) { false }
-
- it 'unsubscribes from the discussion' do
- merge_request.subscribe(user, project)
-
- expect(mutated_merge_request.subscribed?(user, project)).to eq(false)
- end
- end
- end
+ it_behaves_like 'a subscribeable graphql resource' do
+ let_it_be(:resource) { create(:merge_request) }
+ let(:permission_name) { :update_merge_request }
end
end
diff --git a/spec/graphql/resolvers/alert_management/alert_resolver_spec.rb b/spec/graphql/resolvers/alert_management/alert_resolver_spec.rb
index 0c1ba5aab2c..42830f0024d 100644
--- a/spec/graphql/resolvers/alert_management/alert_resolver_spec.rb
+++ b/spec/graphql/resolvers/alert_management/alert_resolver_spec.rb
@@ -7,8 +7,8 @@ RSpec.describe Resolvers::AlertManagement::AlertResolver do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
- let_it_be(:alert_1) { create(:alert_management_alert, :resolved, project: project, ended_at: 1.year.ago, events: 2, severity: :high) }
- let_it_be(:alert_2) { create(:alert_management_alert, :ignored, project: project, events: 1, severity: :critical) }
+ let_it_be(:resolved_alert) { create(:alert_management_alert, :resolved, project: project, ended_at: 1.year.ago, events: 2, severity: :high) }
+ let_it_be(:ignored_alert) { create(:alert_management_alert, :ignored, project: project, events: 1, severity: :critical) }
let_it_be(:alert_other_proj) { create(:alert_management_alert) }
let(:args) { {} }
@@ -24,18 +24,18 @@ RSpec.describe Resolvers::AlertManagement::AlertResolver do
project.add_developer(current_user)
end
- it { is_expected.to contain_exactly(alert_1, alert_2) }
+ it { is_expected.to contain_exactly(resolved_alert, ignored_alert) }
context 'finding by iid' do
- let(:args) { { iid: alert_1.iid } }
+ let(:args) { { iid: resolved_alert.iid } }
- it { is_expected.to contain_exactly(alert_1) }
+ it { is_expected.to contain_exactly(resolved_alert) }
end
context 'finding by status' do
let(:args) { { status: [Types::AlertManagement::StatusEnum.values['IGNORED'].value] } }
- it { is_expected.to contain_exactly(alert_2) }
+ it { is_expected.to contain_exactly(ignored_alert) }
end
describe 'sorting' do
@@ -45,11 +45,11 @@ RSpec.describe Resolvers::AlertManagement::AlertResolver do
let_it_be(:alert_count_3) { create(:alert_management_alert, project: project, events: 3) }
it 'sorts alerts ascending' do
- expect(resolve_alerts(sort: :event_count_asc)).to eq [alert_2, alert_1, alert_count_3, alert_count_6]
+ expect(resolve_alerts(sort: :event_count_asc)).to eq [ignored_alert, resolved_alert, alert_count_3, alert_count_6]
end
it 'sorts alerts descending' do
- expect(resolve_alerts(sort: :event_count_desc)).to eq [alert_count_6, alert_count_3, alert_1, alert_2]
+ expect(resolve_alerts(sort: :event_count_desc)).to eq [alert_count_6, alert_count_3, resolved_alert, ignored_alert]
end
end
end
diff --git a/spec/graphql/resolvers/board_list_issues_resolver_spec.rb b/spec/graphql/resolvers/board_list_issues_resolver_spec.rb
new file mode 100644
index 00000000000..e23a37b3d69
--- /dev/null
+++ b/spec/graphql/resolvers/board_list_issues_resolver_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::BoardListIssuesResolver do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:unauth_user) { create(:user) }
+ let_it_be(:user_project) { create(:project, creator_id: user.id, namespace: user.namespace ) }
+ let_it_be(:group) { create(:group, :private) }
+
+ shared_examples_for 'group and project board list issues resolver' do
+ let!(:board) { create(:board, resource_parent: board_parent) }
+
+ before do
+ board_parent.add_developer(user)
+ end
+
+ # auth is handled by the parent object
+ context 'when authorized' do
+ let!(:list) { create(:list, board: board, label: label) }
+
+ it 'returns the issues in the correct order' do
+ issue1 = create(:issue, project: project, labels: [label], relative_position: 10)
+ issue2 = create(:issue, project: project, labels: [label], relative_position: 12)
+ issue3 = create(:issue, project: project, labels: [label], relative_position: 10)
+
+ # by relative_position and then ID
+ issues = resolve_board_list_issues.items
+
+ expect(issues.map(&:id)).to eq [issue3.id, issue1.id, issue2.id]
+ end
+ end
+ end
+
+ describe '#resolve' do
+ context 'when project boards' do
+ let(:board_parent) { user_project }
+ let!(:label) { create(:label, project: project, name: 'project label') }
+ let(:project) { user_project }
+
+ it_behaves_like 'group and project board list issues resolver'
+ end
+
+ context 'when group boards' do
+ let(:board_parent) { group }
+ let!(:label) { create(:group_label, group: group, name: 'group label') }
+ let!(:project) { create(:project, :private, group: group) }
+
+ it_behaves_like 'group and project board list issues resolver'
+ end
+ end
+
+ def resolve_board_list_issues(args: {}, current_user: user)
+ resolve(described_class, obj: list, args: args, ctx: { current_user: current_user })
+ end
+end
diff --git a/spec/graphql/resolvers/board_lists_resolver_spec.rb b/spec/graphql/resolvers/board_lists_resolver_spec.rb
index f662e9a0f62..fb6a5ccb781 100644
--- a/spec/graphql/resolvers/board_lists_resolver_spec.rb
+++ b/spec/graphql/resolvers/board_lists_resolver_spec.rb
@@ -57,6 +57,30 @@ RSpec.describe Resolvers::BoardListsResolver do
expect(lists.count).to eq 3
end
end
+
+ context 'when querying for a single list' do
+ it 'returns specified list' do
+ list = resolve_board_lists(args: { id: global_id_of(label_list) }).items
+
+ expect(list).to eq [label_list]
+ end
+
+ it 'returns empty result if list is not found' do
+ external_group = create(:group, :private)
+ external_board = create(:board, resource_parent: external_group )
+ external_label = create(:group_label, group: group)
+ external_list = create(:list, board: external_board, label: external_label)
+
+ list = resolve_board_lists(args: { id: global_id_of(external_list) }).items
+
+ expect(list).to eq List.none
+ end
+
+ it 'raises an argument error if list ID is not valid' do
+ expect { resolve_board_lists(args: { id: 'test' }).items }
+ .to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
+ end
end
end
diff --git a/spec/graphql/resolvers/ci_configuration/sast_resolver_spec.rb b/spec/graphql/resolvers/ci_configuration/sast_resolver_spec.rb
deleted file mode 100644
index de69ad5d450..00000000000
--- a/spec/graphql/resolvers/ci_configuration/sast_resolver_spec.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Resolvers::CiConfiguration::SastResolver do
- include GraphqlHelpers
-
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
-
- describe '#resolve' do
- subject(:sast_config) { resolve(described_class, ctx: { current_user: user }, obj: project) }
-
- it 'returns global variable informations related to SAST' do
- expect(sast_config['global'].first['field']).to eql("SECURE_ANALYZERS_PREFIX")
- expect(sast_config['global'].first['label']).to eql("Image prefix")
- expect(sast_config['global'].first['type']).to eql("string")
-
- expect(sast_config['pipeline'].first['field']).to eql("stage")
- expect(sast_config['pipeline'].first['label']).to eql("Stage")
- expect(sast_config['pipeline'].first['type']).to eql("dropdown")
-
- expect(sast_config['analyzers'].first['name']).to eql("brakeman")
- expect(sast_config['analyzers'].first['label']).to eql("Brakeman")
- expect(sast_config['analyzers'].first['enabled']).to be true
- end
- end
-end
diff --git a/spec/graphql/resolvers/commit_pipelines_resolver_spec.rb b/spec/graphql/resolvers/commit_pipelines_resolver_spec.rb
index 20a0cb842a4..a408981c08e 100644
--- a/spec/graphql/resolvers/commit_pipelines_resolver_spec.rb
+++ b/spec/graphql/resolvers/commit_pipelines_resolver_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe Resolvers::CommitPipelinesResolver do
status: 'success'
)
end
+
let!(:pipeline2) do
create(
:ci_pipeline,
@@ -27,6 +28,7 @@ RSpec.describe Resolvers::CommitPipelinesResolver do
status: 'failed'
)
end
+
let!(:pipeline3) do
create(
:ci_pipeline,
diff --git a/spec/graphql/resolvers/group_issues_resolver_spec.rb b/spec/graphql/resolvers/group_issues_resolver_spec.rb
new file mode 100644
index 00000000000..463cdca699b
--- /dev/null
+++ b/spec/graphql/resolvers/group_issues_resolver_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::GroupIssuesResolver do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:other_project) { create(:project, group: group) }
+ let_it_be(:subgroup) { create(:group, parent: group) }
+ let_it_be(:subproject) { create(:project, group: subgroup) }
+
+ let_it_be(:issue1) { create(:incident, project: project, state: :opened, created_at: 3.hours.ago, updated_at: 3.hours.ago) }
+ let_it_be(:issue2) { create(:issue, project: project, state: :closed, title: 'foo', created_at: 1.hour.ago, updated_at: 1.hour.ago, closed_at: 1.hour.ago) }
+ let_it_be(:issue3) { create(:issue, project: other_project, state: :closed, title: 'foo', created_at: 1.hour.ago, updated_at: 1.hour.ago, closed_at: 1.hour.ago) }
+ let_it_be(:issue4) { create(:issue) }
+
+ let_it_be(:subissue1) { create(:issue, project: subproject) }
+ let_it_be(:subissue2) { create(:issue, project: subproject) }
+ let_it_be(:subissue3) { create(:issue, project: subproject) }
+
+ before_all do
+ group.add_developer(current_user)
+ subgroup.add_developer(current_user)
+ end
+
+ describe '#resolve' do
+ it 'finds all group issues' do
+ result = resolve(described_class, obj: group, ctx: { current_user: current_user })
+
+ expect(result).to contain_exactly(issue1, issue2, issue3)
+ end
+
+ it 'finds all group and subgroup issues' do
+ result = resolve(described_class, obj: group, args: { include_subgroups: true }, ctx: { current_user: current_user })
+
+ expect(result).to contain_exactly(issue1, issue2, issue3, subissue1, subissue2, subissue3)
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/group_milestones_resolver_spec.rb b/spec/graphql/resolvers/group_milestones_resolver_spec.rb
new file mode 100644
index 00000000000..05d0ec38192
--- /dev/null
+++ b/spec/graphql/resolvers/group_milestones_resolver_spec.rb
@@ -0,0 +1,123 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::GroupMilestonesResolver do
+ include GraphqlHelpers
+
+ describe '#resolve' do
+ let_it_be(:current_user) { create(:user) }
+
+ def resolve_group_milestones(args = {}, context = { current_user: current_user })
+ resolve(described_class, obj: group, args: args, ctx: context)
+ end
+
+ let_it_be(:now) { Time.now }
+ let_it_be(:group) { create(:group, :private) }
+
+ before_all do
+ group.add_developer(current_user)
+ end
+
+ it 'calls MilestonesFinder#execute' do
+ expect_next_instance_of(MilestonesFinder) do |finder|
+ expect(finder).to receive(:execute)
+ end
+
+ resolve_group_milestones
+ end
+
+ context 'without parameters' do
+ it 'calls MilestonesFinder to retrieve all milestones' do
+ expect(MilestonesFinder).to receive(:new)
+ .with(ids: nil, group_ids: group.id, state: 'all', start_date: nil, end_date: nil)
+ .and_call_original
+
+ resolve_group_milestones
+ end
+ end
+
+ context 'with parameters' do
+ it 'calls MilestonesFinder with correct parameters' do
+ start_date = now
+ end_date = start_date + 1.hour
+
+ expect(MilestonesFinder).to receive(:new)
+ .with(ids: nil, group_ids: group.id, state: 'closed', start_date: start_date, end_date: end_date)
+ .and_call_original
+
+ resolve_group_milestones(start_date: start_date, end_date: end_date, state: 'closed')
+ end
+ end
+
+ context 'by ids' do
+ it 'calls MilestonesFinder with correct parameters' do
+ milestone = create(:milestone, group: group)
+
+ expect(MilestonesFinder).to receive(:new)
+ .with(ids: [milestone.id.to_s], group_ids: group.id, state: 'all', start_date: nil, end_date: nil)
+ .and_call_original
+
+ resolve_group_milestones(ids: [milestone.to_global_id])
+ end
+ end
+
+ context 'by timeframe' do
+ context 'when start_date and end_date are present' do
+ context 'when start date is after end_date' do
+ it 'raises error' do
+ expect do
+ resolve_group_milestones(start_date: now, end_date: now - 2.days)
+ end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, "startDate is after endDate")
+ end
+ end
+ end
+
+ context 'when only start_date is present' do
+ it 'raises error' do
+ expect do
+ resolve_group_milestones(start_date: now)
+ end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/)
+ end
+ end
+
+ context 'when only end_date is present' do
+ it 'raises error' do
+ expect do
+ resolve_group_milestones(end_date: now)
+ end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/)
+ end
+ end
+ end
+
+ context 'when user cannot read milestones' do
+ it 'raises error' do
+ unauthorized_user = create(:user)
+
+ expect do
+ resolve_group_milestones({}, { current_user: unauthorized_user })
+ end.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'when including descendant milestones in a public group' do
+ let_it_be(:group) { create(:group, :public) }
+ let(:args) { { include_descendants: true } }
+
+ it 'finds milestones only in accessible projects and groups' do
+ accessible_group = create(:group, :private, parent: group)
+ accessible_project = create(:project, group: accessible_group)
+ accessible_group.add_developer(current_user)
+ inaccessible_group = create(:group, :private, parent: group)
+ inaccessible_project = create(:project, :private, group: group)
+ milestone1 = create(:milestone, group: group)
+ milestone2 = create(:milestone, group: accessible_group)
+ milestone3 = create(:milestone, project: accessible_project)
+ create(:milestone, group: inaccessible_group)
+ create(:milestone, project: inaccessible_project)
+
+ expect(resolve_group_milestones(args)).to match_array([milestone1, milestone2, milestone3])
+ end
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/issue_status_counts_resolver_spec.rb b/spec/graphql/resolvers/issue_status_counts_resolver_spec.rb
new file mode 100644
index 00000000000..d2412db35c6
--- /dev/null
+++ b/spec/graphql/resolvers/issue_status_counts_resolver_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::IssueStatusCountsResolver do
+ include GraphqlHelpers
+
+ describe '#resolve' do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue) { create(:issue, project: project, state: :opened, created_at: 3.hours.ago, updated_at: 3.hours.ago) }
+ let_it_be(:incident) { create(:incident, project: project, state: :closed, created_at: 1.hour.ago, updated_at: 1.hour.ago, closed_at: 1.hour.ago) }
+
+ let(:args) { {} }
+
+ before do
+ project.add_developer(current_user)
+ end
+
+ subject { resolve_issue_status_counts(args) }
+
+ it { is_expected.to be_a(Gitlab::IssuablesCountForState) }
+ specify { expect(subject.project).to eq(project) }
+
+ it 'returns expected results' do
+ result = resolve_issue_status_counts
+
+ expect(result.all).to eq 2
+ expect(result.opened).to eq 1
+ expect(result.closed).to eq 1
+ end
+
+ it 'filters by search', :aggregate_failures do
+ result = resolve_issue_status_counts(search: issue.title)
+
+ expect(result.all).to eq 1
+ expect(result.opened).to eq 1
+ expect(result.closed).to eq 0
+ end
+
+ it 'filters by issue type', :aggregate_failures do
+ result = resolve_issue_status_counts(issue_types: ['incident'])
+
+ expect(result.all).to eq 1
+ expect(result.opened).to eq 0
+ expect(result.closed).to eq 1
+ end
+
+ # The state param is ignored in IssuableFinder#count_by_state
+ it 'ignores state filter', :aggregate_failures do
+ result = resolve_issue_status_counts(state: 'closed')
+
+ expect(result.all).to eq 2
+ expect(result.opened).to eq 1
+ expect(result.closed).to eq 1
+ end
+
+ private
+
+ def resolve_issue_status_counts(args = {}, context = { current_user: current_user })
+ resolve(described_class, obj: project, args: args, ctx: context)
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/issues_resolver_spec.rb b/spec/graphql/resolvers/issues_resolver_spec.rb
index eb17e94a450..db5d009f0e7 100644
--- a/spec/graphql/resolvers/issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/issues_resolver_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Resolvers::IssuesResolver do
let_it_be(:milestone) { create(:milestone, project: project) }
let_it_be(:assignee) { create(:user) }
- let_it_be(:issue1) { create(:issue, project: project, state: :opened, created_at: 3.hours.ago, updated_at: 3.hours.ago, milestone: milestone) }
+ let_it_be(:issue1) { create(:incident, project: project, state: :opened, created_at: 3.hours.ago, updated_at: 3.hours.ago, milestone: milestone) }
let_it_be(:issue2) { create(:issue, project: project, state: :closed, title: 'foo', created_at: 1.hour.ago, updated_at: 1.hour.ago, closed_at: 1.hour.ago, assignees: [assignee]) }
let_it_be(:issue3) { create(:issue, project: other_project, state: :closed, title: 'foo', created_at: 1.hour.ago, updated_at: 1.hour.ago, closed_at: 1.hour.ago, assignees: [assignee]) }
let_it_be(:issue4) { create(:issue) }
@@ -95,6 +95,20 @@ RSpec.describe Resolvers::IssuesResolver do
end
end
+ describe 'filters by issue_type' do
+ it 'filters by a single type' do
+ expect(resolve_issues(issue_types: ['incident'])).to contain_exactly(issue1)
+ end
+
+ it 'filters by more than one type' do
+ expect(resolve_issues(issue_types: %w(incident issue))).to contain_exactly(issue1, issue2)
+ end
+
+ it 'ignores the filter if none given' do
+ expect(resolve_issues(issue_types: [])).to contain_exactly(issue1, issue2)
+ end
+ end
+
context 'when searching issues' do
it 'returns correct issues' do
expect(resolve_issues(search: 'foo')).to contain_exactly(issue2)
diff --git a/spec/graphql/resolvers/merge_requests_resolver_spec.rb b/spec/graphql/resolvers/merge_requests_resolver_spec.rb
index 0a8fd82613a..e939edae779 100644
--- a/spec/graphql/resolvers/merge_requests_resolver_spec.rb
+++ b/spec/graphql/resolvers/merge_requests_resolver_spec.rb
@@ -161,6 +161,24 @@ RSpec.describe Resolvers::MergeRequestsResolver do
end
end
+ context 'by merged_after and merged_before' do
+ before do
+ merge_request_1.metrics.update!(merged_at: 10.days.ago)
+ end
+
+ it 'returns merge requests merged between the given period' do
+ result = resolve_mr(project, merged_after: 20.days.ago, merged_before: 5.days.ago)
+
+ expect(result).to eq([merge_request_1])
+ end
+
+ it 'does not return anything' do
+ result = resolve_mr(project, merged_after: 2.days.ago)
+
+ expect(result).to be_empty
+ end
+ end
+
describe 'combinations' do
it 'requires all filters' do
create(:merge_request, :closed, source_project: project, target_project: project, source_branch: merge_request_4.source_branch)
diff --git a/spec/graphql/resolvers/milestone_resolver_spec.rb b/spec/graphql/resolvers/milestone_resolver_spec.rb
deleted file mode 100644
index 36dd5ef03e2..00000000000
--- a/spec/graphql/resolvers/milestone_resolver_spec.rb
+++ /dev/null
@@ -1,113 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Resolvers::MilestoneResolver do
- include GraphqlHelpers
-
- describe '#resolve' do
- let_it_be(:current_user) { create(:user) }
-
- def resolve_group_milestones(args = {}, context = { current_user: current_user })
- resolve(described_class, obj: group, args: args, ctx: context)
- end
-
- context 'for group milestones' do
- let_it_be(:now) { Time.now }
- let_it_be(:group) { create(:group, :private) }
-
- before do
- group.add_developer(current_user)
- end
-
- it 'calls MilestonesFinder#execute' do
- expect_next_instance_of(MilestonesFinder) do |finder|
- expect(finder).to receive(:execute)
- end
-
- resolve_group_milestones
- end
-
- context 'without parameters' do
- it 'calls MilestonesFinder to retrieve all milestones' do
- expect(MilestonesFinder).to receive(:new)
- .with(group_ids: group.id, state: 'all', start_date: nil, end_date: nil)
- .and_call_original
-
- resolve_group_milestones
- end
- end
-
- context 'with parameters' do
- it 'calls MilestonesFinder with correct parameters' do
- start_date = now
- end_date = start_date + 1.hour
-
- expect(MilestonesFinder).to receive(:new)
- .with(group_ids: group.id, state: 'closed', start_date: start_date, end_date: end_date)
- .and_call_original
-
- resolve_group_milestones(start_date: start_date, end_date: end_date, state: 'closed')
- end
- end
-
- context 'by timeframe' do
- context 'when start_date and end_date are present' do
- context 'when start date is after end_date' do
- it 'raises error' do
- expect do
- resolve_group_milestones(start_date: now, end_date: now - 2.days)
- end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, "startDate is after endDate")
- end
- end
- end
-
- context 'when only start_date is present' do
- it 'raises error' do
- expect do
- resolve_group_milestones(start_date: now)
- end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/)
- end
- end
-
- context 'when only end_date is present' do
- it 'raises error' do
- expect do
- resolve_group_milestones(end_date: now)
- end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/)
- end
- end
- end
-
- context 'when user cannot read milestones' do
- it 'raises error' do
- unauthorized_user = create(:user)
-
- expect do
- resolve_group_milestones({}, { current_user: unauthorized_user })
- end.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
- end
- end
- end
-
- context 'when including descendant milestones in a public group' do
- let_it_be(:group) { create(:group, :public) }
- let(:args) { { include_descendants: true } }
-
- it 'finds milestones only in accessible projects and groups' do
- accessible_group = create(:group, :private, parent: group)
- accessible_project = create(:project, group: accessible_group)
- accessible_group.add_developer(current_user)
- inaccessible_group = create(:group, :private, parent: group)
- inaccessible_project = create(:project, :private, group: group)
- milestone1 = create(:milestone, group: group)
- milestone2 = create(:milestone, group: accessible_group)
- milestone3 = create(:milestone, project: accessible_project)
- create(:milestone, group: inaccessible_group)
- create(:milestone, project: inaccessible_project)
-
- expect(resolve_group_milestones(args)).to match_array([milestone1, milestone2, milestone3])
- end
- end
- end
-end
diff --git a/spec/graphql/resolvers/project_milestones_resolver_spec.rb b/spec/graphql/resolvers/project_milestones_resolver_spec.rb
new file mode 100644
index 00000000000..e0b250cfe7c
--- /dev/null
+++ b/spec/graphql/resolvers/project_milestones_resolver_spec.rb
@@ -0,0 +1,117 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::ProjectMilestonesResolver do
+ include GraphqlHelpers
+
+ describe '#resolve' do
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:current_user) { create(:user) }
+
+ before_all do
+ project.add_developer(current_user)
+ end
+
+ def resolve_project_milestones(args = {}, context = { current_user: current_user })
+ resolve(described_class, obj: project, args: args, ctx: context)
+ end
+
+ it 'calls MilestonesFinder to retrieve all milestones' do
+ expect(MilestonesFinder).to receive(:new)
+ .with(ids: nil, project_ids: project.id, state: 'all', start_date: nil, end_date: nil)
+ .and_call_original
+
+ resolve_project_milestones
+ end
+
+ context 'when including ancestor milestones' do
+ let(:parent_group) { create(:group) }
+ let(:group) { create(:group, parent: parent_group) }
+ let(:project) { create(:project, group: group) }
+
+ before do
+ project.add_developer(current_user)
+ end
+
+ it 'calls MilestonesFinder with correct parameters' do
+ expect(MilestonesFinder).to receive(:new)
+ .with(ids: nil, project_ids: project.id, group_ids: contain_exactly(group, parent_group), state: 'all', start_date: nil, end_date: nil)
+ .and_call_original
+
+ resolve_project_milestones(include_ancestors: true)
+ end
+ end
+
+ context 'by ids' do
+ it 'calls MilestonesFinder with correct parameters' do
+ milestone = create(:milestone, project: project)
+
+ expect(MilestonesFinder).to receive(:new)
+ .with(ids: [milestone.id.to_s], project_ids: project.id, state: 'all', start_date: nil, end_date: nil)
+ .and_call_original
+
+ resolve_project_milestones(ids: [milestone.to_global_id])
+ end
+ end
+
+ context 'by state' do
+ it 'calls MilestonesFinder with correct parameters' do
+ expect(MilestonesFinder).to receive(:new)
+ .with(ids: nil, project_ids: project.id, state: 'closed', start_date: nil, end_date: nil)
+ .and_call_original
+
+ resolve_project_milestones(state: 'closed')
+ end
+ end
+
+ context 'by timeframe' do
+ context 'when start_date and end_date are present' do
+ it 'calls MilestonesFinder with correct parameters' do
+ start_date = Time.now
+ end_date = Time.now + 5.days
+
+ expect(MilestonesFinder).to receive(:new)
+ .with(ids: nil, project_ids: project.id, state: 'all', start_date: start_date, end_date: end_date)
+ .and_call_original
+
+ resolve_project_milestones(start_date: start_date, end_date: end_date)
+ end
+
+ context 'when start date is after end_date' do
+ it 'raises error' do
+ expect do
+ resolve_project_milestones(start_date: Time.now, end_date: Time.now - 2.days)
+ end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, "startDate is after endDate")
+ end
+ end
+ end
+
+ context 'when only start_date is present' do
+ it 'raises error' do
+ expect do
+ resolve_project_milestones(start_date: Time.now)
+ end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/)
+ end
+ end
+
+ context 'when only end_date is present' do
+ it 'raises error' do
+ expect do
+ resolve_project_milestones(end_date: Time.now)
+ end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, /Both startDate and endDate/)
+ end
+ end
+ end
+
+ context 'when user cannot read milestones' do
+ it 'raises error' do
+ unauthorized_user = create(:user)
+
+ expect do
+ resolve_project_milestones({}, { current_user: unauthorized_user })
+ end.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/project_pipeline_resolver_spec.rb b/spec/graphql/resolvers/project_pipeline_resolver_spec.rb
index a659b3bdb6e..fada2f9193c 100644
--- a/spec/graphql/resolvers/project_pipeline_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_pipeline_resolver_spec.rb
@@ -33,4 +33,21 @@ RSpec.describe Resolvers::ProjectPipelineResolver do
it 'errors when no iid is passed' do
expect { resolve_pipeline(project, {}) }.to raise_error(ArgumentError)
end
+
+ context 'when the pipeline is not a ci_config_source' do
+ let(:pipeline) do
+ config_source_value = Ci::PipelineEnums.non_ci_config_source_values.first
+ config_source = Ci::PipelineEnums.config_sources.key(config_source_value)
+
+ create(:ci_pipeline, config_source: config_source, project: project)
+ end
+
+ it 'resolves pipeline for the passed iid' do
+ result = batch_sync do
+ resolve_pipeline(project, { iid: pipeline.iid.to_s })
+ end
+
+ expect(result).to eq(pipeline)
+ end
+ end
end
diff --git a/spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb b/spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb
index 4038bcb3e5d..840aea8b8c4 100644
--- a/spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb
+++ b/spec/graphql/resolvers/projects/jira_projects_resolver_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Resolvers::Projects::JiraProjectsResolver do
end
end
- context 'when project has no jira service' do
+ context 'when project has no Jira service' do
let_it_be(:jira_service) { nil }
context 'when user is a maintainer' do
@@ -29,7 +29,7 @@ RSpec.describe Resolvers::Projects::JiraProjectsResolver do
end
end
- context 'when project has jira service' do
+ context 'when project has Jira service' do
let(:jira_service) { create(:jira_service, project: project) }
context 'when user is a developer' do
@@ -46,10 +46,11 @@ RSpec.describe Resolvers::Projects::JiraProjectsResolver do
end
context 'when Jira connection is valid' do
- include_context 'jira projects request context'
+ include_context 'Jira projects request context'
- it 'returns jira projects' do
- jira_projects = resolve_jira_projects
+ it 'returns Jira projects', :aggregate_failures do
+ resolver = resolve_jira_projects
+ jira_projects = resolver.items
project_keys = jira_projects.map(&:key)
project_names = jira_projects.map(&:name)
project_ids = jira_projects.map(&:id)
@@ -58,6 +59,23 @@ RSpec.describe Resolvers::Projects::JiraProjectsResolver do
expect(project_keys).to eq(%w(EX ABC))
expect(project_names).to eq(%w(Example Alphabetical))
expect(project_ids).to eq(%w(10000 10001))
+ expect(resolver.max_page_size).to eq(2)
+ end
+
+ context 'when filtering projects by name' do
+ it 'returns Jira projects', :aggregate_failures do
+ resolver = resolve_jira_projects({ name: 'ABC' })
+ jira_projects = resolver.items
+ project_keys = jira_projects.map(&:key)
+ project_names = jira_projects.map(&:name)
+ project_ids = jira_projects.map(&:id)
+
+ expect(jira_projects.size).to eq 1
+ expect(project_keys).to eq(%w(ABC))
+ expect(project_names).to eq(%w(Alphabetical))
+ expect(project_ids).to eq(%w(10001))
+ expect(resolver.max_page_size).to eq(1)
+ end
end
end
diff --git a/spec/graphql/resolvers/todo_resolver_spec.rb b/spec/graphql/resolvers/todo_resolver_spec.rb
index 0775cb8dae7..83e3140b676 100644
--- a/spec/graphql/resolvers/todo_resolver_spec.rb
+++ b/spec/graphql/resolvers/todo_resolver_spec.rb
@@ -99,7 +99,7 @@ RSpec.describe Resolvers::TodoResolver do
end
end
- context 'when no user is provided' do
+ context 'when no target is provided' do
it 'returns no todos' do
todos = resolve(described_class, obj: nil, args: {}, ctx: { current_user: current_user })
@@ -107,7 +107,7 @@ RSpec.describe Resolvers::TodoResolver do
end
end
- context 'when provided user is not current user' do
+ context 'when target user is not the current user' do
it 'returns no todos' do
other_user = create(:user)
@@ -116,6 +116,16 @@ RSpec.describe Resolvers::TodoResolver do
expect(todos).to be_empty
end
end
+
+ context 'when request is for a todo target' do
+ it 'returns only the todos for the target' do
+ target = issue_todo_pending.target
+
+ todos = resolve(described_class, obj: target, args: {}, ctx: { current_user: current_user })
+
+ expect(todos).to contain_exactly(issue_todo_pending)
+ end
+ end
end
def resolve_todos(args = {}, context = { current_user: current_user })
diff --git a/spec/graphql/types/alert_management/alert_type_spec.rb b/spec/graphql/types/alert_management/alert_type_spec.rb
index 45ac673986d..e14c189d4b6 100644
--- a/spec/graphql/types/alert_management/alert_type_spec.rb
+++ b/spec/graphql/types/alert_management/alert_type_spec.rb
@@ -28,6 +28,10 @@ RSpec.describe GitlabSchema.types['AlertManagementAlert'] do
notes
discussions
metrics_dashboard_url
+ runbook
+ todos
+ details_url
+ prometheus_alert
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/board_list_type_spec.rb b/spec/graphql/types/board_list_type_spec.rb
index 046d1e92bfa..7976936fc1f 100644
--- a/spec/graphql/types/board_list_type_spec.rb
+++ b/spec/graphql/types/board_list_type_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe GitlabSchema.types['BoardList'] do
specify { expect(described_class.graphql_name).to eq('BoardList') }
it 'has specific fields' do
- expected_fields = %w[id list_type position label]
+ expected_fields = %w[id list_type position label issues_count issues]
expect(described_class).to include_graphql_fields(*expected_fields)
end
diff --git a/spec/graphql/types/ci/group_type_spec.rb b/spec/graphql/types/ci/group_type_spec.rb
new file mode 100644
index 00000000000..8d547b19af3
--- /dev/null
+++ b/spec/graphql/types/ci/group_type_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::GroupType do
+ specify { expect(described_class.graphql_name).to eq('CiGroup') }
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[
+ name
+ size
+ jobs
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ci/job_type_spec.rb b/spec/graphql/types/ci/job_type_spec.rb
new file mode 100644
index 00000000000..faf3a95cf25
--- /dev/null
+++ b/spec/graphql/types/ci/job_type_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::JobType do
+ specify { expect(described_class.graphql_name).to eq('CiJob') }
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[
+ name
+ needs
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ci/stage_type_spec.rb b/spec/graphql/types/ci/stage_type_spec.rb
new file mode 100644
index 00000000000..0c352ed27aa
--- /dev/null
+++ b/spec/graphql/types/ci/stage_type_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::StageType do
+ specify { expect(described_class.graphql_name).to eq('CiStage') }
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[
+ name
+ groups
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ci_configuration/sast/analyzers_entity_type_spec.rb b/spec/graphql/types/ci_configuration/sast/analyzers_entity_type_spec.rb
deleted file mode 100644
index 34a22feeaf5..00000000000
--- a/spec/graphql/types/ci_configuration/sast/analyzers_entity_type_spec.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe GitlabSchema.types['SastCiConfigurationAnalyzersEntity'] do
- let(:fields) { %i[name label enabled description] }
-
- it { expect(described_class.graphql_name).to eq('SastCiConfigurationAnalyzersEntity') }
-
- it { expect(described_class).to have_graphql_fields(fields) }
-end
diff --git a/spec/graphql/types/ci_configuration/sast/entity_type_spec.rb b/spec/graphql/types/ci_configuration/sast/entity_type_spec.rb
deleted file mode 100644
index 7c6ad013d4a..00000000000
--- a/spec/graphql/types/ci_configuration/sast/entity_type_spec.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe GitlabSchema.types['SastCiConfigurationEntity'] do
- let(:fields) { %i[field label description type options default_value value] }
-
- it { expect(described_class.graphql_name).to eq('SastCiConfigurationEntity') }
-
- it { expect(described_class).to have_graphql_fields(fields) }
-end
diff --git a/spec/graphql/types/ci_configuration/sast/options_entity_spec.rb b/spec/graphql/types/ci_configuration/sast/options_entity_spec.rb
deleted file mode 100644
index c60c8b9c84a..00000000000
--- a/spec/graphql/types/ci_configuration/sast/options_entity_spec.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe GitlabSchema.types['SastCiConfigurationOptionsEntity'] do
- let(:fields) { %i[label value] }
-
- it { expect(described_class.graphql_name).to eq('SastCiConfigurationOptionsEntity') }
-
- it { expect(described_class).to have_graphql_fields(fields) }
-end
diff --git a/spec/graphql/types/ci_configuration/sast/type_spec.rb b/spec/graphql/types/ci_configuration/sast/type_spec.rb
deleted file mode 100644
index e7a8cd436e4..00000000000
--- a/spec/graphql/types/ci_configuration/sast/type_spec.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe GitlabSchema.types['SastCiConfiguration'] do
- let(:fields) { %i[global pipeline analyzers] }
-
- it { expect(described_class.graphql_name).to eq('SastCiConfiguration') }
-
- it { expect(described_class).to have_graphql_fields(fields) }
-end
diff --git a/spec/graphql/types/commit_type_spec.rb b/spec/graphql/types/commit_type_spec.rb
index 75984786972..d222287270d 100644
--- a/spec/graphql/types/commit_type_spec.rb
+++ b/spec/graphql/types/commit_type_spec.rb
@@ -9,8 +9,8 @@ RSpec.describe GitlabSchema.types['Commit'] do
it 'contains attributes related to commit' do
expect(described_class).to have_graphql_fields(
- :id, :sha, :title, :description, :message, :title_html, :authored_date,
- :author_name, :author_gravatar, :author, :web_url, :latest_pipeline,
+ :id, :sha, :title, :description, :description_html, :message, :title_html, :authored_date,
+ :author_name, :author_gravatar, :author, :web_url, :web_path, :latest_pipeline,
:pipelines, :signature_html
)
end
diff --git a/spec/graphql/types/issue_connection_type_spec.rb b/spec/graphql/types/countable_connection_type_spec.rb
index af34611ecfe..af34611ecfe 100644
--- a/spec/graphql/types/issue_connection_type_spec.rb
+++ b/spec/graphql/types/countable_connection_type_spec.rb
diff --git a/spec/graphql/types/design_management/design_at_version_type_spec.rb b/spec/graphql/types/design_management/design_at_version_type_spec.rb
index 5a6292c924a..4d61ecf62cc 100644
--- a/spec/graphql/types/design_management/design_at_version_type_spec.rb
+++ b/spec/graphql/types/design_management/design_at_version_type_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe GitlabSchema.types['DesignAtVersion'] do
version = design.versions.first
GitlabSchema.id_from_object(create(:design_at_version, design: design, version: version))
end
+
let_it_be(:object_id_b) { GitlabSchema.id_from_object(create(:design_at_version)) }
let(:object_type) { ::Types::DesignManagement::DesignAtVersionType }
end
diff --git a/spec/graphql/types/environment_type_spec.rb b/spec/graphql/types/environment_type_spec.rb
index f7522cb3e2c..abeeeba543f 100644
--- a/spec/graphql/types/environment_type_spec.rb
+++ b/spec/graphql/types/environment_type_spec.rb
@@ -7,11 +7,76 @@ RSpec.describe GitlabSchema.types['Environment'] do
it 'has the expected fields' do
expected_fields = %w[
- name id state metrics_dashboard
+ name id state metrics_dashboard latest_opened_most_severe_alert
]
expect(described_class).to have_graphql_fields(*expected_fields)
end
specify { expect(described_class).to require_graphql_authorizations(:read_environment) }
+
+ context 'when there is an environment' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:environment) { create(:environment, project: project) }
+ let_it_be(:user) { create(:user) }
+
+ subject { GitlabSchema.execute(query, context: { current_user: user }).as_json }
+
+ let(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ environment(name: "#{environment.name}") {
+ name
+ state
+ }
+ }
+ }
+ )
+ end
+
+ before do
+ project.add_developer(user)
+ end
+
+ it 'returns an environment' do
+ expect(subject['data']['project']['environment']['name']).to eq(environment.name)
+ end
+
+ context 'when query alert data for the environment' do
+ let_it_be(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ environment(name: "#{environment.name}") {
+ name
+ state
+ latestOpenedMostSevereAlert {
+ severity
+ title
+ detailsUrl
+ prometheusAlert {
+ humanizedText
+ }
+ }
+ }
+ }
+ }
+ )
+ end
+
+ it 'does not return alert information' do
+ expect(subject['data']['project']['environment']['latestOpenedMostSevereAlert']).to be_nil
+ end
+
+ context 'when alert is raised on the environment' do
+ let!(:prometheus_alert) { create(:prometheus_alert, project: project, environment: environment) }
+ let!(:alert) { create(:alert_management_alert, :triggered, :prometheus, project: project, environment: environment, prometheus_alert: prometheus_alert) }
+
+ it 'returns alert information' do
+ expect(subject['data']['project']['environment']['latestOpenedMostSevereAlert']['severity']).to eq(alert.severity.upcase)
+ end
+ end
+ end
+ end
end
diff --git a/spec/graphql/types/group_type_spec.rb b/spec/graphql/types/group_type_spec.rb
index fb79e9bb85b..0b87805c2ef 100644
--- a/spec/graphql/types/group_type_spec.rb
+++ b/spec/graphql/types/group_type_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe GitlabSchema.types['Group'] do
web_url avatar_url share_with_group_lock project_creation_level
subgroup_creation_level require_two_factor_authentication
two_factor_grace_period auto_devops_enabled emails_disabled
- mentions_disabled parent boards
+ mentions_disabled parent boards milestones
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/issue_status_count_type_spec.rb b/spec/graphql/types/issue_status_count_type_spec.rb
new file mode 100644
index 00000000000..4e273d6415a
--- /dev/null
+++ b/spec/graphql/types/issue_status_count_type_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['IssueStatusCountsType'] do
+ specify { expect(described_class.graphql_name).to eq('IssueStatusCountsType') }
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[
+ all
+ opened
+ closed
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/issue_type_enum_spec.rb b/spec/graphql/types/issue_type_enum_spec.rb
new file mode 100644
index 00000000000..7ae5eb76f28
--- /dev/null
+++ b/spec/graphql/types/issue_type_enum_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::IssueTypeEnum do
+ specify { expect(described_class.graphql_name).to eq('IssueType') }
+
+ it 'exposes all the existing issue type values' do
+ expect(described_class.values.keys).to include(
+ *%w[ISSUE INCIDENT]
+ )
+ end
+end
diff --git a/spec/graphql/types/issue_type_spec.rb b/spec/graphql/types/issue_type_spec.rb
index 4a86b07ab1c..24353f8fe3a 100644
--- a/spec/graphql/types/issue_type_spec.rb
+++ b/spec/graphql/types/issue_type_spec.rb
@@ -132,14 +132,14 @@ RSpec.describe GitlabSchema.types['Issue'] do
let(:query) do
%(
query {
- project(fullPath:"#{project.full_path}"){
- issue(iid:"#{issue.iid}"){
+ project(fullPath: "#{project.full_path}") {
+ issue(iid: "#{issue.iid}") {
descriptionHtml
- notes{
- edges{
- node{
+ notes {
+ edges {
+ node {
bodyHtml
- author{
+ author {
username
}
body
diff --git a/spec/graphql/types/merge_request_type_spec.rb b/spec/graphql/types/merge_request_type_spec.rb
index b3dccde8ce3..b11951190e0 100644
--- a/spec/graphql/types/merge_request_type_spec.rb
+++ b/spec/graphql/types/merge_request_type_spec.rb
@@ -24,9 +24,11 @@ RSpec.describe GitlabSchema.types['MergeRequest'] do
source_branch_exists target_branch_exists
upvotes downvotes head_pipeline pipelines task_completion_status
milestone assignees participants subscribed labels discussion_locked time_estimate
- total_time_spent reference author merged_at
+ total_time_spent reference author merged_at commit_count
]
+ expected_fields << 'approved_by' if Gitlab.ee?
+
expect(described_class).to have_graphql_fields(*expected_fields)
end
end
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index ea88ed6a3f5..8a5d0cdf12d 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -22,11 +22,12 @@ RSpec.describe GitlabSchema.types['Project'] do
only_allow_merge_if_pipeline_succeeds request_access_enabled
only_allow_merge_if_all_discussions_are_resolved printing_merge_request_link_enabled
namespace group statistics repository merge_requests merge_request issues
- issue pipelines removeSourceBranchAfterMerge sentryDetailedError snippets
+ issue milestones pipelines removeSourceBranchAfterMerge sentryDetailedError snippets
grafanaIntegration autocloseReferencedIssues suggestion_commit_message environments
- boards jira_import_status jira_imports services releases release
+ environment boards jira_import_status jira_imports services releases release
alert_management_alerts alert_management_alert alert_management_alert_status_counts
- container_expiration_policy sast_ci_configuration service_desk_enabled service_desk_address
+ container_expiration_policy service_desk_enabled service_desk_address
+ issue_status_counts
]
expect(described_class).to include_graphql_fields(*expected_fields)
@@ -69,7 +70,9 @@ RSpec.describe GitlabSchema.types['Project'] do
:before,
:after,
:first,
- :last
+ :last,
+ :merged_after,
+ :merged_before
)
end
end
@@ -95,6 +98,13 @@ RSpec.describe GitlabSchema.types['Project'] do
it { is_expected.to have_graphql_resolver(Resolvers::EnvironmentsResolver) }
end
+ describe 'environment field' do
+ subject { described_class.fields['environment'] }
+
+ it { is_expected.to have_graphql_type(Types::EnvironmentType) }
+ it { is_expected.to have_graphql_resolver(Resolvers::EnvironmentsResolver.single) }
+ end
+
describe 'members field' do
subject { described_class.fields['projectMembers'] }
@@ -140,93 +150,5 @@ RSpec.describe GitlabSchema.types['Project'] do
it { is_expected.to have_graphql_type(Types::ContainerExpirationPolicyType) }
end
- describe 'sast_ci_configuration' do
- let_it_be(:project) { create(:project) }
- let_it_be(:user) { create(:user) }
- let_it_be(:query) do
- %(
- query {
- project(fullPath: "#{project.full_path}") {
- sastCiConfiguration {
- global {
- nodes {
- type
- options {
- nodes {
- label
- value
- }
- }
- field
- label
- defaultValue
- value
- }
- }
- pipeline {
- nodes {
- type
- options {
- nodes {
- label
- value
- }
- }
- field
- label
- defaultValue
- value
- }
- }
- analyzers {
- nodes {
- name
- label
- enabled
- }
- }
- }
- }
- }
- )
- end
-
- subject { GitlabSchema.execute(query, context: { current_user: user }).as_json }
-
- before do
- project.add_developer(user)
- end
-
- it "returns the project's sast configuration for global variables" do
- query_result = subject.dig('data', 'project', 'sastCiConfiguration', 'global', 'nodes')
- first_config = query_result.first
- fourth_config = query_result[3]
- expect(first_config['type']).to eq('string')
- expect(first_config['field']).to eq('SECURE_ANALYZERS_PREFIX')
- expect(first_config['label']).to eq('Image prefix')
- expect(first_config['defaultValue']).to eq('registry.gitlab.com/gitlab-org/security-products/analyzers')
- expect(first_config['value']).to eq('')
- expect(first_config['options']).to be_nil
- expect(fourth_config['options']['nodes']).to match([{ "value" => "true", "label" => "true (disables SAST)" },
- { "value" => "false", "label" => "false (enables SAST)" }])
- end
-
- it "returns the project's sast configuration for pipeline variables" do
- configuration = subject.dig('data', 'project', 'sastCiConfiguration', 'pipeline', 'nodes').first
- expect(configuration['type']).to eq('dropdown')
- expect(configuration['field']).to eq('stage')
- expect(configuration['label']).to eq('Stage')
- expect(configuration['defaultValue']).to eq('test')
- expect(configuration['value']).to eq('')
- end
-
- it "returns the project's sast configuration for analyzer variables" do
- configuration = subject.dig('data', 'project', 'sastCiConfiguration', 'analyzers', 'nodes').first
- expect(configuration['name']).to eq('brakeman')
- expect(configuration['label']).to eq('Brakeman')
- expect(configuration['enabled']).to eq(true)
- end
- end
-
it_behaves_like 'a GraphQL type with labels'
end
diff --git a/spec/graphql/types/prometheus_alert_type_spec.rb b/spec/graphql/types/prometheus_alert_type_spec.rb
new file mode 100644
index 00000000000..716537ea716
--- /dev/null
+++ b/spec/graphql/types/prometheus_alert_type_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['PrometheusAlert'] do
+ specify { expect(described_class.graphql_name).to eq('PrometheusAlert') }
+
+ it 'has the expected fields' do
+ expected_fields = %w[
+ id humanized_text
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+
+ specify { expect(described_class).to require_graphql_authorizations(:read_prometheus_alerts) }
+end
diff --git a/spec/graphql/types/query_type_spec.rb b/spec/graphql/types/query_type_spec.rb
index 081f99a8307..ab13162b406 100644
--- a/spec/graphql/types/query_type_spec.rb
+++ b/spec/graphql/types/query_type_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe GitlabSchema.types['Query'] do
current_user
snippets
design_management
+ milestone
user
users
]
diff --git a/spec/graphql/types/snippet_type_spec.rb b/spec/graphql/types/snippet_type_spec.rb
index 0341ca2c733..86af69f1294 100644
--- a/spec/graphql/types/snippet_type_spec.rb
+++ b/spec/graphql/types/snippet_type_spec.rb
@@ -33,6 +33,7 @@ RSpec.describe GitlabSchema.types['Snippet'] do
}
)
end
+
let(:response) { subject.dig('data', 'snippets', 'nodes')[0] }
subject { GitlabSchema.execute(query, context: { current_user: current_user }).as_json }
@@ -97,6 +98,7 @@ RSpec.describe GitlabSchema.types['Snippet'] do
}
)
end
+
let(:response) { subject.dig('data', 'snippets', 'nodes')[0] }
subject { GitlabSchema.execute(query, context: { current_user: user }).as_json }
diff --git a/spec/graphql/types/snippets/file_input_action_enum_spec.rb b/spec/graphql/types/snippets/blob_action_enum_spec.rb
index ff9b706240b..9c641bd5446 100644
--- a/spec/graphql/types/snippets/file_input_action_enum_spec.rb
+++ b/spec/graphql/types/snippets/blob_action_enum_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe Types::Snippets::FileInputActionEnum do
- specify { expect(described_class.graphql_name).to eq('SnippetFileInputActionEnum') }
+RSpec.describe Types::Snippets::BlobActionEnum do
+ specify { expect(described_class.graphql_name).to eq('SnippetBlobActionEnum') }
it 'exposes all file input action types' do
expect(described_class.values.keys).to eq(%w[create update delete move])
diff --git a/spec/graphql/types/snippets/file_input_type_spec.rb b/spec/graphql/types/snippets/blob_action_input_type_spec.rb
index c7d4909b542..5d6bd81fb77 100644
--- a/spec/graphql/types/snippets/file_input_type_spec.rb
+++ b/spec/graphql/types/snippets/blob_action_input_type_spec.rb
@@ -2,14 +2,14 @@
require 'spec_helper'
-RSpec.describe Types::Snippets::FileInputType do
- specify { expect(described_class.graphql_name).to eq('SnippetFileInputType') }
+RSpec.describe Types::Snippets::BlobActionInputType do
+ specify { expect(described_class.graphql_name).to eq('SnippetBlobActionInputType') }
it 'has the correct arguments' do
expect(described_class.arguments.keys).to match_array(%w[filePath action previousPath content])
end
- it 'sets the type of action argument to FileInputActionEnum' do
- expect(described_class.arguments['action'].type.of_type).to eq(Types::Snippets::FileInputActionEnum)
+ it 'sets the type of action argument to BlobActionEnum' do
+ expect(described_class.arguments['action'].type.of_type).to eq(Types::Snippets::BlobActionEnum)
end
end
diff --git a/spec/graphql/types/snippets/blob_viewer_type_spec.rb b/spec/graphql/types/snippets/blob_viewer_type_spec.rb
index 8210eb9a95c..295df992c67 100644
--- a/spec/graphql/types/snippets/blob_viewer_type_spec.rb
+++ b/spec/graphql/types/snippets/blob_viewer_type_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe GitlabSchema.types['SnippetBlobViewer'] do
let(:query) do
%(
query {
- snippets(ids:"#{snippet.to_global_id}"){
+ snippets(ids: "#{snippet.to_global_id}") {
edges {
node {
blob {
@@ -70,7 +70,7 @@ RSpec.describe GitlabSchema.types['SnippetBlobViewer'] do
let(:query) do
%(
query {
- snippets(ids:"#{snippet.to_global_id}"){
+ snippets(ids: "#{snippet.to_global_id}") {
edges {
node {
blob {
diff --git a/spec/graphql/types/time_type_spec.rb b/spec/graphql/types/time_type_spec.rb
index 68d346766c2..3b0d257e1d7 100644
--- a/spec/graphql/types/time_type_spec.rb
+++ b/spec/graphql/types/time_type_spec.rb
@@ -15,4 +15,14 @@ RSpec.describe GitlabSchema.types['Time'] do
it 'coerces an ISO-time into Time object' do
expect(described_class.coerce_isolated_input(iso)).to eq(time)
end
+
+ it 'rejects invalid input' do
+ expect { described_class.coerce_isolated_input('not valid') }
+ .to raise_error(GraphQL::CoercionError)
+ end
+
+ it 'rejects nil' do
+ expect { described_class.coerce_isolated_input(nil) }
+ .to raise_error(GraphQL::CoercionError)
+ end
end
diff --git a/spec/graphql/types/tree/blob_type_spec.rb b/spec/graphql/types/tree/blob_type_spec.rb
index 73d61d4860c..2b08b528e38 100644
--- a/spec/graphql/types/tree/blob_type_spec.rb
+++ b/spec/graphql/types/tree/blob_type_spec.rb
@@ -5,5 +5,5 @@ require 'spec_helper'
RSpec.describe Types::Tree::BlobType do
specify { expect(described_class.graphql_name).to eq('Blob') }
- specify { expect(described_class).to have_graphql_fields(:id, :sha, :name, :type, :path, :flat_path, :web_url, :lfs_oid, :mode) }
+ specify { expect(described_class).to have_graphql_fields(:id, :sha, :name, :type, :path, :flat_path, :web_url, :web_path, :lfs_oid, :mode) }
end
diff --git a/spec/graphql/types/tree/tree_entry_type_spec.rb b/spec/graphql/types/tree/tree_entry_type_spec.rb
index 0e5caf66854..82e05b299fc 100644
--- a/spec/graphql/types/tree/tree_entry_type_spec.rb
+++ b/spec/graphql/types/tree/tree_entry_type_spec.rb
@@ -5,5 +5,5 @@ require 'spec_helper'
RSpec.describe Types::Tree::TreeEntryType do
specify { expect(described_class.graphql_name).to eq('TreeEntry') }
- specify { expect(described_class).to have_graphql_fields(:id, :sha, :name, :type, :path, :flat_path, :web_url) }
+ specify { expect(described_class).to have_graphql_fields(:id, :sha, :name, :type, :path, :flat_path, :web_url, :web_path) }
end
diff --git a/spec/graphql/types/user_status_type_spec.rb b/spec/graphql/types/user_status_type_spec.rb
new file mode 100644
index 00000000000..c4421a9cc10
--- /dev/null
+++ b/spec/graphql/types/user_status_type_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::UserStatusType do
+ specify { expect(described_class.graphql_name).to eq('UserStatus') }
+
+ it 'exposes the expected fields' do
+ expected_fields = %i[
+ emoji
+ message
+ message_html
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/user_type_spec.rb b/spec/graphql/types/user_type_spec.rb
index 6cc3f7bcaa1..7710b8efefe 100644
--- a/spec/graphql/types/user_type_spec.rb
+++ b/spec/graphql/types/user_type_spec.rb
@@ -14,10 +14,13 @@ RSpec.describe GitlabSchema.types['User'] do
snippets
name
username
+ email
avatarUrl
webUrl
+ webPath
todos
state
+ status
authoredMergeRequests
assignedMergeRequests
groupMemberships
diff --git a/spec/helpers/appearances_helper_spec.rb b/spec/helpers/appearances_helper_spec.rb
index 179c69b2a67..d972ac27119 100644
--- a/spec/helpers/appearances_helper_spec.rb
+++ b/spec/helpers/appearances_helper_spec.rb
@@ -70,7 +70,7 @@ RSpec.describe AppearancesHelper do
context 'when there is a logo but no associated upload' do
before do
# Legacy attachments were not tracked in the uploads table
- appearance.logo.upload.destroy
+ appearance.logo.upload.destroy!
appearance.reload
end
diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb
index 08107b841d7..ce4e73bdc55 100644
--- a/spec/helpers/application_helper_spec.rb
+++ b/spec/helpers/application_helper_spec.rb
@@ -168,6 +168,42 @@ RSpec.describe ApplicationHelper do
it { expect(helper.active_when(false)).to eq(nil) }
end
+ describe '#promo_host' do
+ subject { helper.promo_host }
+
+ it 'returns the url' do
+ is_expected.to eq('about.gitlab.com')
+ end
+ end
+
+ describe '#promo_url' do
+ subject { helper.promo_url }
+
+ it 'returns the url' do
+ is_expected.to eq('https://about.gitlab.com')
+ end
+
+ it 'changes if promo_host changes' do
+ allow(helper).to receive(:promo_host).and_return('foobar.baz')
+
+ is_expected.to eq('https://foobar.baz')
+ end
+ end
+
+ describe '#contact_sales_url' do
+ subject { helper.contact_sales_url }
+
+ it 'returns the url' do
+ is_expected.to eq('https://about.gitlab.com/sales')
+ end
+
+ it 'changes if promo_url changes' do
+ allow(helper).to receive(:promo_url).and_return('https://somewhere.else')
+
+ is_expected.to eq('https://somewhere.else/sales')
+ end
+ end
+
describe '#support_url' do
context 'when alternate support url is specified' do
let(:alternate_url) { 'http://company.example.com/getting-help' }
diff --git a/spec/helpers/auto_devops_helper_spec.rb b/spec/helpers/auto_devops_helper_spec.rb
index ad705dc5a7b..4f060a0ae3b 100644
--- a/spec/helpers/auto_devops_helper_spec.rb
+++ b/spec/helpers/auto_devops_helper_spec.rb
@@ -128,7 +128,7 @@ RSpec.describe AutoDevopsHelper do
context 'with groups' do
before do
- receiver.update(parent: parent)
+ receiver.update!(parent: parent)
end
context 'when auto devops is enabled on parent' do
diff --git a/spec/helpers/award_emoji_helper_spec.rb b/spec/helpers/award_emoji_helper_spec.rb
index 51e0a1b9721..74ebdad3e8f 100644
--- a/spec/helpers/award_emoji_helper_spec.rb
+++ b/spec/helpers/award_emoji_helper_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe AwardEmojiHelper do
subject { helper.toggle_award_url(note) }
it 'returns correct url' do
- expected_url = "/snippets/#{note.noteable.id}/notes/#{note.id}/toggle_award_emoji"
+ expected_url = "/-/snippets/#{note.noteable.id}/notes/#{note.id}/toggle_award_emoji"
expect(subject).to eq(expected_url)
end
@@ -38,7 +38,7 @@ RSpec.describe AwardEmojiHelper do
let(:awardable) { snippet }
it 'returns correct url' do
- expected_url = "/snippets/#{snippet.id}/toggle_award_emoji"
+ expected_url = "/-/snippets/#{snippet.id}/toggle_award_emoji"
expect(subject).to eq(expected_url)
end
diff --git a/spec/helpers/blame_helper_spec.rb b/spec/helpers/blame_helper_spec.rb
index 6371c2b63ce..d305c4c595e 100644
--- a/spec/helpers/blame_helper_spec.rb
+++ b/spec/helpers/blame_helper_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe BlameHelper do
Time.zone.local(2013, 2, 24, 0, 0, 0),
Time.zone.local(2010, 9, 22, 0, 0, 0)]
end
+
let(:blame_groups) do
[
{ commit: double(committed_date: dates[0]) },
@@ -57,6 +58,7 @@ RSpec.describe BlameHelper do
project = double(created_at: now)
helper.age_map_duration(today_blame_groups, project)
end
+
let(:today_blame_groups) { [{ commit: double(committed_date: now) }] }
let(:now) { Time.zone.now }
diff --git a/spec/helpers/blob_helper_spec.rb b/spec/helpers/blob_helper_spec.rb
index fe975aa7723..3ba9f39d21a 100644
--- a/spec/helpers/blob_helper_spec.rb
+++ b/spec/helpers/blob_helper_spec.rb
@@ -30,6 +30,8 @@ RSpec.describe BlobHelper do
let(:namespace) { create(:namespace, name: 'gitlab') }
let(:project) { create(:project, :repository, namespace: namespace) }
+ subject(:link) { helper.edit_blob_button(project, 'master', 'README.md') }
+
before do
allow(helper).to receive(:current_user).and_return(nil)
allow(helper).to receive(:can?).and_return(true)
@@ -53,15 +55,49 @@ RSpec.describe BlobHelper do
end
it 'returns a link with the proper route' do
- link = helper.edit_blob_button(project, 'master', 'README.md')
-
expect(Capybara.string(link).find_link('Edit')[:href]).to eq("/#{project.full_path}/-/edit/master/README.md")
end
it 'returns a link with the passed link_opts on the expected route' do
- link = helper.edit_blob_button(project, 'master', 'README.md', link_opts: { mr_id: 10 })
+ link_with_mr = helper.edit_blob_button(project, 'master', 'README.md', link_opts: { mr_id: 10 })
- expect(Capybara.string(link).find_link('Edit')[:href]).to eq("/#{project.full_path}/-/edit/master/README.md?mr_id=10")
+ expect(Capybara.string(link_with_mr).find_link('Edit')[:href]).to eq("/#{project.full_path}/-/edit/master/README.md?mr_id=10")
+ end
+
+ context 'when edit is the primary button' do
+ before do
+ stub_feature_flags(web_ide_primary_edit: false)
+ end
+
+ it 'is rendered as primary' do
+ expect(link).not_to match(/btn-inverted/)
+ end
+
+ it 'passes on primary tracking attributes' do
+ parsed_link = Capybara.string(link).find_link('Edit')
+
+ expect(parsed_link[:'data-track-event']).to eq("click_edit")
+ expect(parsed_link[:'data-track-label']).to eq("Edit")
+ expect(parsed_link[:'data-track-property']).to eq(nil)
+ end
+ end
+
+ context 'when Web IDE is the primary button' do
+ before do
+ stub_feature_flags(web_ide_primary_edit: true)
+ end
+
+ it 'is rendered as inverted' do
+ expect(link).to match(/btn-inverted/)
+ end
+
+ it 'passes on secondary tracking attributes' do
+ parsed_link = Capybara.string(link).find_link('Edit')
+
+ expect(parsed_link[:'data-track-event']).to eq("click_edit")
+ expect(parsed_link[:'data-track-label']).to eq("Edit")
+ expect(parsed_link[:'data-track-property']).to eq("secondary")
+ end
end
end
@@ -246,6 +282,16 @@ RSpec.describe BlobHelper do
expect(helper.show_suggest_pipeline_creation_celebration?).to be_falsey
end
end
+
+ context 'blob does not have auxiliary view' do
+ before do
+ allow(blob).to receive(:auxiliary_viewer).and_return(nil)
+ end
+
+ it 'is false' do
+ expect(helper.show_suggest_pipeline_creation_celebration?).to be_falsey
+ end
+ end
end
context 'experiment disabled' do
@@ -285,6 +331,62 @@ RSpec.describe BlobHelper do
end
end
+ describe `#ide_edit_button` do
+ let_it_be(:namespace) { create(:namespace, name: 'gitlab') }
+ let_it_be(:project) { create(:project, :repository, namespace: namespace) }
+ let_it_be(:current_user) { create(:user) }
+ let(:can_push_code) { true }
+ let(:blob) { project.repository.blob_at('refs/heads/master', 'README.md') }
+
+ subject(:link) { helper.ide_edit_button(project, 'master', 'README.md', blob: blob) }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(current_user)
+ allow(helper).to receive(:can?).with(current_user, :push_code, project).and_return(can_push_code)
+ allow(helper).to receive(:can_collaborate_with_project?).and_return(true)
+ end
+
+ it 'returns a link with a Web IDE route' do
+ expect(Capybara.string(link).find_link('Web IDE')[:href]).to eq("/-/ide/project/#{project.full_path}/edit/master/-/README.md")
+ end
+
+ context 'when edit is the primary button' do
+ before do
+ stub_feature_flags(web_ide_primary_edit: false)
+ end
+
+ it 'is rendered as inverted' do
+ expect(link).to match(/btn-inverted/)
+ end
+
+ it 'passes on secondary tracking attributes' do
+ parsed_link = Capybara.string(link).find_link('Web IDE')
+
+ expect(parsed_link[:'data-track-event']).to eq("click_edit_ide")
+ expect(parsed_link[:'data-track-label']).to eq("Web IDE")
+ expect(parsed_link[:'data-track-property']).to eq("secondary")
+ end
+ end
+
+ context 'when Web IDE is the primary button' do
+ before do
+ stub_feature_flags(web_ide_primary_edit: true)
+ end
+
+ it 'is rendered as primary' do
+ expect(link).not_to match(/btn-inverted/)
+ end
+
+ it 'passes on primary tracking attributes' do
+ parsed_link = Capybara.string(link).find_link('Web IDE')
+
+ expect(parsed_link[:'data-track-event']).to eq("click_edit_ide")
+ expect(parsed_link[:'data-track-label']).to eq("Web IDE")
+ expect(parsed_link[:'data-track-property']).to eq(nil)
+ end
+ end
+ end
+
describe '#ide_edit_path' do
let(:project) { create(:project) }
let(:current_user) { create(:user) }
diff --git a/spec/helpers/branches_helper_spec.rb b/spec/helpers/branches_helper_spec.rb
new file mode 100644
index 00000000000..1f7bf25afcd
--- /dev/null
+++ b/spec/helpers/branches_helper_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BranchesHelper do
+ describe '#access_levels_data' do
+ subject { helper.access_levels_data(access_levels) }
+
+ context 'when access_levels is nil' do
+ let(:access_levels) { nil }
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'when access levels are provided' do
+ let(:protected_branch) { create(:protected_branch, :developers_can_merge, :maintainers_can_push) }
+
+ let(:merge_level) { protected_branch.merge_access_levels.first }
+ let(:push_level) { protected_branch.push_access_levels.first }
+ let(:access_levels) { [merge_level, push_level] }
+
+ it 'returns the correct array' do
+ expected_array = [
+ { id: merge_level.id, type: :role, access_level: Gitlab::Access::DEVELOPER },
+ { id: push_level.id, type: :role, access_level: Gitlab::Access::MAINTAINER }
+ ]
+
+ expect(subject).to eq(expected_array)
+ end
+ end
+ end
+end
diff --git a/spec/helpers/ci/pipelines_helper_spec.rb b/spec/helpers/ci/pipelines_helper_spec.rb
new file mode 100644
index 00000000000..89b9907d0c2
--- /dev/null
+++ b/spec/helpers/ci/pipelines_helper_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::PipelinesHelper do
+ include Devise::Test::ControllerHelpers
+
+ describe 'pipeline_warnings' do
+ let(:pipeline) { double(:pipeline, warning_messages: warning_messages) }
+
+ subject { helper.pipeline_warnings(pipeline) }
+
+ context 'when pipeline has no warnings' do
+ let(:warning_messages) { [] }
+
+ it 'is empty' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'when pipeline has warnings' do
+ let(:warning_messages) { [double(content: 'Warning 1'), double(content: 'Warning 2')] }
+
+ it 'returns a warning callout box' do
+ expect(subject).to have_css 'div.alert-warning'
+ expect(subject).to include 'Warning:'
+ end
+
+ it 'lists the the warnings' do
+ expect(subject).to include 'Warning 1'
+ expect(subject).to include 'Warning 2'
+ end
+ end
+ end
+end
diff --git a/spec/helpers/ci/status_helper_spec.rb b/spec/helpers/ci/status_helper_spec.rb
index 12a6acb1ecc..0af396149ef 100644
--- a/spec/helpers/ci/status_helper_spec.rb
+++ b/spec/helpers/ci/status_helper_spec.rb
@@ -127,7 +127,7 @@ RSpec.describe Ci::StatusHelper do
subject { helper.render_status_with_link("success", icon_size: 24) }
it "has the svg class to change size" do
- is_expected.to include("<svg class=\"s24\">")
+ is_expected.to include("<svg class=\"s24\"")
end
end
end
diff --git a/spec/helpers/environments_helper_spec.rb b/spec/helpers/environments_helper_spec.rb
index 90d6096654e..cb7d12b331a 100644
--- a/spec/helpers/environments_helper_spec.rb
+++ b/spec/helpers/environments_helper_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe EnvironmentsHelper do
'metrics-dashboard-base-path' => environment_metrics_path(environment),
'current-environment-name' => environment.name,
'documentation-path' => help_page_path('administration/monitoring/prometheus/index.md'),
- 'add-dashboard-documentation-path' => help_page_path('user/project/integrations/prometheus.md', anchor: 'adding-a-new-dashboard-to-your-project'),
+ 'add-dashboard-documentation-path' => help_page_path('operations/metrics/dashboards/index.md', anchor: 'add-a-new-dashboard-to-your-project'),
'empty-getting-started-svg-path' => match_asset_path('/assets/illustrations/monitoring/getting_started.svg'),
'empty-loading-svg-path' => match_asset_path('/assets/illustrations/monitoring/loading.svg'),
'empty-no-data-svg-path' => match_asset_path('/assets/illustrations/monitoring/no_data.svg'),
@@ -42,9 +42,10 @@ RSpec.describe EnvironmentsHelper do
'custom-metrics-available' => 'true',
'alerts-endpoint' => project_prometheus_alerts_path(project, environment_id: environment.id, format: :json),
'prometheus-alerts-available' => 'true',
- 'custom-dashboard-base-path' => Metrics::Dashboard::CustomDashboardService::DASHBOARD_ROOT,
+ 'custom-dashboard-base-path' => Gitlab::Metrics::Dashboard::RepoDashboardFinder::DASHBOARD_ROOT,
'operations-settings-path' => project_settings_operations_path(project),
- 'can-access-operations-settings' => 'true'
+ 'can-access-operations-settings' => 'true',
+ 'panel-preview-endpoint' => project_metrics_dashboards_builder_path(project, format: :json)
)
end
diff --git a/spec/helpers/events_helper_spec.rb b/spec/helpers/events_helper_spec.rb
index 4ca31405c1e..c629643e248 100644
--- a/spec/helpers/events_helper_spec.rb
+++ b/spec/helpers/events_helper_spec.rb
@@ -200,7 +200,7 @@ RSpec.describe EventsHelper do
it 'returns a project snippet note url' do
event.target = create(:note_on_project_snippet, note: 'keep going')
- expect(subject).to eq("#{project_base_url}/snippets/#{event.note_target.id}#note_#{event.target.id}")
+ expect(subject).to eq("#{project_base_url}/-/snippets/#{event.note_target.id}#note_#{event.target.id}")
end
it 'returns a project issue url' do
diff --git a/spec/helpers/gitlab_routing_helper_spec.rb b/spec/helpers/gitlab_routing_helper_spec.rb
index bd48fc7cee2..1ad7c7bb9ff 100644
--- a/spec/helpers/gitlab_routing_helper_spec.rb
+++ b/spec/helpers/gitlab_routing_helper_spec.rb
@@ -88,7 +88,7 @@ RSpec.describe GitlabRoutingHelper do
it 'returns snippet preview markdown path for a personal snippet' do
@snippet = create(:personal_snippet)
- expect(preview_markdown_path(nil)).to eq("/snippets/preview_markdown")
+ expect(preview_markdown_path(nil)).to eq("/-/snippets/preview_markdown")
end
it 'returns project preview markdown path for a project snippet' do
@@ -153,132 +153,152 @@ RSpec.describe GitlabRoutingHelper do
describe '#gitlab_snippet_path' do
it 'returns the personal snippet path' do
- expect(gitlab_snippet_path(personal_snippet)).to eq("/snippets/#{personal_snippet.id}")
+ expect(gitlab_snippet_path(personal_snippet)).to eq("/-/snippets/#{personal_snippet.id}")
end
it 'returns the project snippet path' do
- expect(gitlab_snippet_path(project_snippet)).to eq("/#{project_snippet.project.full_path}/snippets/#{project_snippet.id}")
+ expect(gitlab_snippet_path(project_snippet)).to eq("/#{project_snippet.project.full_path}/-/snippets/#{project_snippet.id}")
end
end
describe '#gitlab_snippet_url' do
it 'returns the personal snippet url' do
- expect(gitlab_snippet_url(personal_snippet)).to eq("http://test.host/snippets/#{personal_snippet.id}")
+ expect(gitlab_snippet_url(personal_snippet)).to eq("http://test.host/-/snippets/#{personal_snippet.id}")
end
it 'returns the project snippet url' do
- expect(gitlab_snippet_url(project_snippet)).to eq("http://test.host/#{project_snippet.project.full_path}/snippets/#{project_snippet.id}")
+ expect(gitlab_snippet_url(project_snippet)).to eq("http://test.host/#{project_snippet.project.full_path}/-/snippets/#{project_snippet.id}")
end
end
describe '#gitlab_raw_snippet_path' do
it 'returns the raw personal snippet path' do
- expect(gitlab_raw_snippet_path(personal_snippet)).to eq("/snippets/#{personal_snippet.id}/raw")
+ expect(gitlab_raw_snippet_path(personal_snippet)).to eq("/-/snippets/#{personal_snippet.id}/raw")
end
it 'returns the raw project snippet path' do
- expect(gitlab_raw_snippet_path(project_snippet)).to eq("/#{project_snippet.project.full_path}/snippets/#{project_snippet.id}/raw")
+ expect(gitlab_raw_snippet_path(project_snippet)).to eq("/#{project_snippet.project.full_path}/-/snippets/#{project_snippet.id}/raw")
end
end
describe '#gitlab_raw_snippet_blob_path' do
+ let(:snippet) { personal_snippet }
+ let(:blob) { snippet.blobs.first }
let(:ref) { 'test-ref' }
+ let(:args) { {} }
+
+ subject { gitlab_raw_snippet_blob_path(snippet, blob.path, ref, args) }
+
+ it_behaves_like 'snippet blob raw path'
+
+ context 'when an argument is set' do
+ let(:args) { { inline: true } }
- it_behaves_like 'snippet blob raw path' do
- subject { gitlab_raw_snippet_blob_path(blob, ref) }
+ it { expect(subject).to eq("/-/snippets/#{personal_snippet.id}/raw/#{ref}/#{blob.path}?inline=true") }
end
context 'without a ref' do
- let(:blob) { personal_snippet.blobs.first }
- let(:ref) { blob.repository.root_ref }
+ let(:ref) { nil }
+ let(:expected_ref) { blob.repository.root_ref }
it 'uses the root ref' do
- expect(gitlab_raw_snippet_blob_path(blob)).to eq("/-/snippets/#{personal_snippet.id}/raw/#{ref}/#{blob.path}")
+ expect(subject).to eq("/-/snippets/#{personal_snippet.id}/raw/#{expected_ref}/#{blob.path}")
end
end
end
describe '#gitlab_raw_snippet_url' do
it 'returns the raw personal snippet url' do
- expect(gitlab_raw_snippet_url(personal_snippet)).to eq("http://test.host/snippets/#{personal_snippet.id}/raw")
+ expect(gitlab_raw_snippet_url(personal_snippet)).to eq("http://test.host/-/snippets/#{personal_snippet.id}/raw")
end
it 'returns the raw project snippet url' do
- expect(gitlab_raw_snippet_url(project_snippet)).to eq("http://test.host/#{project_snippet.project.full_path}/snippets/#{project_snippet.id}/raw")
+ expect(gitlab_raw_snippet_url(project_snippet)).to eq("http://test.host/#{project_snippet.project.full_path}/-/snippets/#{project_snippet.id}/raw")
end
end
describe '#gitlab_raw_snippet_blob_url' do
let(:blob) { snippet.blobs.first }
let(:ref) { 'snippet-test-ref' }
+ let(:args) { {} }
- context 'for a PersonalSnippet' do
- let(:snippet) { personal_snippet }
+ subject { gitlab_raw_snippet_blob_url(snippet, blob.path, ref, args) }
- it { expect(gitlab_raw_snippet_blob_url(snippet, blob.path, ref)).to eq("http://test.host/-/snippets/#{snippet.id}/raw/#{ref}/#{blob.path}") }
- end
+ it_behaves_like 'snippet blob raw url'
- context 'for a ProjectSnippet' do
- let(:snippet) { project_snippet }
+ context 'when an argument is set' do
+ let(:args) { { inline: true } }
+ let(:snippet) { personal_snippet }
- it { expect(gitlab_raw_snippet_blob_url(snippet, blob.path, ref)).to eq("http://test.host/#{snippet.project.full_path}/-/snippets/#{snippet.id}/raw/#{ref}/#{blob.path}") }
+ it { expect(subject).to eq("http://test.host/-/snippets/#{snippet.id}/raw/#{ref}/#{blob.path}?inline=true") }
end
context 'without a ref' do
let(:snippet) { personal_snippet }
- let(:ref) { snippet.repository.root_ref }
+ let(:ref) { nil }
+ let(:expected_ref) { snippet.repository.root_ref }
it 'uses the root ref' do
- expect(gitlab_raw_snippet_blob_url(snippet, blob.path)).to eq("http://test.host/-/snippets/#{snippet.id}/raw/#{ref}/#{blob.path}")
+ expect(subject).to eq("http://test.host/-/snippets/#{snippet.id}/raw/#{expected_ref}/#{blob.path}")
end
end
end
+ describe '#gitlab_raw_snippet_url' do
+ it 'returns the raw personal snippet url' do
+ expect(gitlab_raw_snippet_url(personal_snippet)).to eq("http://test.host/-/snippets/#{personal_snippet.id}/raw")
+ end
+
+ it 'returns the raw project snippet url' do
+ expect(gitlab_raw_snippet_url(project_snippet)).to eq("http://test.host/#{project_snippet.project.full_path}/-/snippets/#{project_snippet.id}/raw")
+ end
+ end
+
describe '#gitlab_snippet_notes_path' do
it 'returns the notes path for the personal snippet' do
- expect(gitlab_snippet_notes_path(personal_snippet)).to eq("/snippets/#{personal_snippet.id}/notes")
+ expect(gitlab_snippet_notes_path(personal_snippet)).to eq("/-/snippets/#{personal_snippet.id}/notes")
end
end
describe '#gitlab_snippet_notes_url' do
it 'returns the notes url for the personal snippet' do
- expect(gitlab_snippet_notes_url(personal_snippet)).to eq("http://test.host/snippets/#{personal_snippet.id}/notes")
+ expect(gitlab_snippet_notes_url(personal_snippet)).to eq("http://test.host/-/snippets/#{personal_snippet.id}/notes")
end
end
describe '#gitlab_snippet_note_path' do
it 'returns the note path for the personal snippet' do
- expect(gitlab_snippet_note_path(personal_snippet, note)).to eq("/snippets/#{personal_snippet.id}/notes/#{note.id}")
+ expect(gitlab_snippet_note_path(personal_snippet, note)).to eq("/-/snippets/#{personal_snippet.id}/notes/#{note.id}")
end
end
describe '#gitlab_snippet_note_url' do
it 'returns the note url for the personal snippet' do
- expect(gitlab_snippet_note_url(personal_snippet, note)).to eq("http://test.host/snippets/#{personal_snippet.id}/notes/#{note.id}")
+ expect(gitlab_snippet_note_url(personal_snippet, note)).to eq("http://test.host/-/snippets/#{personal_snippet.id}/notes/#{note.id}")
end
end
describe '#gitlab_toggle_award_emoji_snippet_note_path' do
it 'returns the note award emoji path for the personal snippet' do
- expect(gitlab_toggle_award_emoji_snippet_note_path(personal_snippet, note)).to eq("/snippets/#{personal_snippet.id}/notes/#{note.id}/toggle_award_emoji")
+ expect(gitlab_toggle_award_emoji_snippet_note_path(personal_snippet, note)).to eq("/-/snippets/#{personal_snippet.id}/notes/#{note.id}/toggle_award_emoji")
end
end
describe '#gitlab_toggle_award_emoji_snippet_note_url' do
it 'returns the note award emoji url for the personal snippet' do
- expect(gitlab_toggle_award_emoji_snippet_note_url(personal_snippet, note)).to eq("http://test.host/snippets/#{personal_snippet.id}/notes/#{note.id}/toggle_award_emoji")
+ expect(gitlab_toggle_award_emoji_snippet_note_url(personal_snippet, note)).to eq("http://test.host/-/snippets/#{personal_snippet.id}/notes/#{note.id}/toggle_award_emoji")
end
end
describe '#gitlab_toggle_award_emoji_snippet_path' do
it 'returns the award emoji path for the personal snippet' do
- expect(gitlab_toggle_award_emoji_snippet_path(personal_snippet)).to eq("/snippets/#{personal_snippet.id}/toggle_award_emoji")
+ expect(gitlab_toggle_award_emoji_snippet_path(personal_snippet)).to eq("/-/snippets/#{personal_snippet.id}/toggle_award_emoji")
end
end
describe '#gitlab_toggle_award_emoji_snippet_url' do
it 'returns the award url for the personal snippet' do
- expect(gitlab_toggle_award_emoji_snippet_url(personal_snippet)).to eq("http://test.host/snippets/#{personal_snippet.id}/toggle_award_emoji")
+ expect(gitlab_toggle_award_emoji_snippet_url(personal_snippet)).to eq("http://test.host/-/snippets/#{personal_snippet.id}/toggle_award_emoji")
end
end
@@ -288,7 +308,7 @@ RSpec.describe GitlabRoutingHelper do
end
it 'returns the project snippets dashboard path' do
- expect(gitlab_dashboard_snippets_path(project_snippet)).to eq("/#{project_snippet.project.full_path}/snippets")
+ expect(gitlab_dashboard_snippets_path(project_snippet)).to eq("/#{project_snippet.project.full_path}/-/snippets")
end
end
end
diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb
index a739c16f3b1..0790dc1b674 100644
--- a/spec/helpers/groups_helper_spec.rb
+++ b/spec/helpers/groups_helper_spec.rb
@@ -114,12 +114,14 @@ RSpec.describe GroupsHelper do
ancestor_locked_and_has_been_overridden: /This setting is applied on .+ and has been overridden on this subgroup/
}
end
+
let(:possible_linked_ancestors) do
{
root_group: root_group,
subgroup: subgroup
}
end
+
let(:users) do
{
root_owner: root_owner,
diff --git a/spec/helpers/icons_helper_spec.rb b/spec/helpers/icons_helper_spec.rb
index c47bba42ae2..872aa821560 100644
--- a/spec/helpers/icons_helper_spec.rb
+++ b/spec/helpers/icons_helper_spec.rb
@@ -48,19 +48,24 @@ RSpec.describe IconsHelper do
describe 'sprite_icon' do
icon_name = 'clock'
- it 'returns svg icon html' do
+ it 'returns svg icon html with DEFAULT_ICON_SIZE' do
expect(sprite_icon(icon_name).to_s)
- .to eq "<svg><use xlink:href=\"#{icons_path}##{icon_name}\"></use></svg>"
+ .to eq "<svg class=\"s#{IconsHelper::DEFAULT_ICON_SIZE}\" data-testid=\"#{icon_name}-icon\"><use xlink:href=\"#{icons_path}##{icon_name}\"></use></svg>"
+ end
+
+ it 'returns svg icon html without size class' do
+ expect(sprite_icon(icon_name, size: nil).to_s)
+ .to eq "<svg data-testid=\"#{icon_name}-icon\"><use xlink:href=\"#{icons_path}##{icon_name}\"></use></svg>"
end
it 'returns svg icon html + size classes' do
expect(sprite_icon(icon_name, size: 72).to_s)
- .to eq "<svg class=\"s72\"><use xlink:href=\"#{icons_path}##{icon_name}\"></use></svg>"
+ .to eq "<svg class=\"s72\" data-testid=\"#{icon_name}-icon\"><use xlink:href=\"#{icons_path}##{icon_name}\"></use></svg>"
end
it 'returns svg icon html + size classes + additional class' do
expect(sprite_icon(icon_name, size: 72, css_class: 'icon-danger').to_s)
- .to eq "<svg class=\"s72 icon-danger\"><use xlink:href=\"#{icons_path}##{icon_name}\"></use></svg>"
+ .to eq "<svg class=\"s72 icon-danger\" data-testid=\"#{icon_name}-icon\"><use xlink:href=\"#{icons_path}##{icon_name}\"></use></svg>"
end
describe 'non existing icon' do
diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb
index a3945b5bd8a..9b32758c053 100644
--- a/spec/helpers/issuables_helper_spec.rb
+++ b/spec/helpers/issuables_helper_spec.rb
@@ -160,7 +160,7 @@ RSpec.describe IssuablesHelper do
end
before do
- user.destroy
+ user.destroy!
end
it 'returns "Ghost user" as edited_by' do
diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb
index f2757f0e3ed..3f84eeb12c2 100644
--- a/spec/helpers/issues_helper_spec.rb
+++ b/spec/helpers/issues_helper_spec.rb
@@ -162,7 +162,7 @@ RSpec.describe IssuesHelper do
context 'with linked issue' do
context 'with moved issue' do
before do
- issue.update(moved_to: new_issue)
+ issue.update!(moved_to: new_issue)
end
context 'when user has permission to see new issue' do
@@ -181,7 +181,7 @@ RSpec.describe IssuesHelper do
context 'with duplicated issue' do
before do
- issue.update(duplicated_to: new_issue)
+ issue.update!(duplicated_to: new_issue)
end
context 'when user has permission to see new issue' do
@@ -203,7 +203,7 @@ RSpec.describe IssuesHelper do
let(:user) { project.owner }
before do
- issue.update(moved_to: nil, duplicated_to: nil)
+ issue.update!(moved_to: nil, duplicated_to: nil)
end
it_behaves_like 'does not display link'
@@ -220,7 +220,7 @@ RSpec.describe IssuesHelper do
allow(Gitlab::IncomingEmail).to receive(:enabled?) { true }
allow(Gitlab::IncomingEmail).to receive(:supports_wildcard?) { true }
- old_issue.update(moved_to: new_issue)
+ old_issue.update!(moved_to: new_issue)
end
it 'is true when moved issue project has service desk disabled' do
diff --git a/spec/helpers/members_helper_spec.rb b/spec/helpers/members_helper_spec.rb
index 99e8696e960..84b3f99b89a 100644
--- a/spec/helpers/members_helper_spec.rb
+++ b/spec/helpers/members_helper_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe MembersHelper do
context 'an accepted user invitation with no user associated' do
before do
- group_member_invite.update(invite_email: "#{SecureRandom.hex}@example.com", invite_token: nil, user_id: nil)
+ group_member_invite.update_columns(invite_email: "#{SecureRandom.hex}@example.com", invite_token: nil, user_id: nil)
end
it 'logs an exception and shows orphaned status' do
diff --git a/spec/helpers/namespace_storage_limit_alert_helper_spec.rb b/spec/helpers/namespace_storage_limit_alert_helper_spec.rb
new file mode 100644
index 00000000000..ab3cf96edef
--- /dev/null
+++ b/spec/helpers/namespace_storage_limit_alert_helper_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe NamespaceStorageLimitAlertHelper do
+ describe '#display_namespace_storage_limit_alert!' do
+ it 'is defined in CE' do
+ expect { helper.display_namespace_storage_limit_alert! }.not_to raise_error
+ end
+ end
+end
diff --git a/spec/helpers/namespaces_helper_spec.rb b/spec/helpers/namespaces_helper_spec.rb
index 1313a5c9352..1636ba6ef42 100644
--- a/spec/helpers/namespaces_helper_spec.rb
+++ b/spec/helpers/namespaces_helper_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe NamespacesHelper do
:private,
project_creation_level: admin_project_creation_level)
end
+
let!(:user) { create(:user) }
let!(:user_project_creation_level) { nil }
let!(:user_group) do
@@ -17,18 +18,21 @@ RSpec.describe NamespacesHelper do
:private,
project_creation_level: user_project_creation_level)
end
+
let!(:subgroup1) do
create(:group,
:private,
parent: admin_group,
project_creation_level: nil)
end
+
let!(:subgroup2) do
create(:group,
:private,
parent: admin_group,
project_creation_level: ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS)
end
+
let!(:subgroup3) do
create(:group,
:private,
diff --git a/spec/helpers/notes_helper_spec.rb b/spec/helpers/notes_helper_spec.rb
index f29f947ba46..41511b65cc4 100644
--- a/spec/helpers/notes_helper_spec.rb
+++ b/spec/helpers/notes_helper_spec.rb
@@ -77,9 +77,9 @@ RSpec.describe NotesHelper do
context 'for a merge request discusion' do
let(:merge_request) { create(:merge_request, source_project: project, target_project: project, importing: true) }
- let!(:merge_request_diff1) { merge_request.merge_request_diffs.create(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') }
- let!(:merge_request_diff2) { merge_request.merge_request_diffs.create(head_commit_sha: nil) }
- let!(:merge_request_diff3) { merge_request.merge_request_diffs.create(head_commit_sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e') }
+ let!(:merge_request_diff1) { merge_request.merge_request_diffs.create!(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') }
+ let!(:merge_request_diff2) { merge_request.merge_request_diffs.create!(head_commit_sha: nil) }
+ let!(:merge_request_diff3) { merge_request.merge_request_diffs.create!(head_commit_sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e') }
context 'for a diff discussion' do
context 'when the discussion is active' do
@@ -225,7 +225,7 @@ RSpec.describe NotesHelper do
it 'return snippet notes path for personal snippet' do
@snippet = create(:personal_snippet)
- expect(helper.notes_url).to eq("/snippets/#{@snippet.id}/notes")
+ expect(helper.notes_url).to eq("/-/snippets/#{@snippet.id}/notes")
end
it 'return project notes path for project snippet' do
@@ -250,7 +250,7 @@ RSpec.describe NotesHelper do
it 'return snippet notes path for personal snippet' do
note = create(:note_on_personal_snippet)
- expect(helper.note_url(note)).to eq("/snippets/#{note.noteable.id}/notes/#{note.id}")
+ expect(helper.note_url(note)).to eq("/-/snippets/#{note.noteable.id}/notes/#{note.id}")
end
it 'return project notes path for project snippet' do
@@ -284,7 +284,7 @@ RSpec.describe NotesHelper do
@snippet = create(:project_snippet, project: @project)
@note = create(:note_on_personal_snippet)
- expect(helper.form_resources).to eq([@project.namespace, @project, @note])
+ expect(helper.form_resources).to eq([@project, @note])
end
it 'returns namespace, project and note path for other noteables' do
@@ -292,7 +292,7 @@ RSpec.describe NotesHelper do
@project = create(:project, path: 'test', namespace: namespace)
@note = create(:note_on_issue, project: @project)
- expect(helper.form_resources).to eq([@project.namespace, @project, @note])
+ expect(helper.form_resources).to eq([@project, @note])
end
end
diff --git a/spec/helpers/notifications_helper_spec.rb b/spec/helpers/notifications_helper_spec.rb
index 319c85c19f9..8d2806cbef6 100644
--- a/spec/helpers/notifications_helper_spec.rb
+++ b/spec/helpers/notifications_helper_spec.rb
@@ -22,6 +22,7 @@ RSpec.describe NotificationsHelper do
it { expect(notification_event_name(:success_pipeline)).to match('Successful pipeline') }
it { expect(notification_event_name(:failed_pipeline)).to match('Failed pipeline') }
it { expect(notification_event_name(:fixed_pipeline)).to match('Fixed pipeline') }
+ it { expect(notification_event_name(:moved_project)).to match('Moved project') }
end
describe '#notification_icon_level' do
diff --git a/spec/helpers/operations_helper_spec.rb b/spec/helpers/operations_helper_spec.rb
index 73deb2249bc..8e3b1db5272 100644
--- a/spec/helpers/operations_helper_spec.rb
+++ b/spec/helpers/operations_helper_spec.rb
@@ -152,7 +152,7 @@ RSpec.describe OperationsHelper do
send_email: 'false',
pagerduty_active: 'true',
pagerduty_token: operations_settings.pagerduty_token,
- pagerduty_webhook_url: project_incidents_pagerduty_url(project, token: operations_settings.pagerduty_token),
+ pagerduty_webhook_url: project_incidents_integrations_pagerduty_url(project, token: operations_settings.pagerduty_token),
pagerduty_reset_key_path: reset_pagerduty_token_project_settings_operations_path(project)
)
end
diff --git a/spec/helpers/packages_helper_spec.rb b/spec/helpers/packages_helper_spec.rb
new file mode 100644
index 00000000000..1917c851547
--- /dev/null
+++ b/spec/helpers/packages_helper_spec.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe PackagesHelper do
+ let_it_be(:base_url) { "#{Gitlab.config.gitlab.url}/api/v4/" }
+ let_it_be(:project) { create(:project) }
+
+ describe 'package_registry_instance_url' do
+ it 'returns conant instance url when registry_type is conant' do
+ url = helper.package_registry_instance_url(:conan)
+
+ expect(url).to eq("#{base_url}packages/conan")
+ end
+
+ it 'returns npm instance url when registry_type is npm' do
+ url = helper.package_registry_instance_url(:npm)
+
+ expect(url).to eq("#{base_url}packages/npm")
+ end
+ end
+
+ describe 'package_registry_project_url' do
+ it 'returns maven registry url when registry_type is not provided' do
+ url = helper.package_registry_project_url(1)
+
+ expect(url).to eq("#{base_url}projects/1/packages/maven")
+ end
+
+ it 'returns specified registry url when registry_type is provided' do
+ url = helper.package_registry_project_url(1, :npm)
+
+ expect(url).to eq("#{base_url}projects/1/packages/npm")
+ end
+ end
+
+ describe 'pypi_registry_url' do
+ let_it_be(:base_url_with_token) { base_url.sub('://', '://__token__:<your_personal_token>@') }
+
+ it 'returns the pypi registry url' do
+ url = helper.pypi_registry_url(1)
+
+ expect(url).to eq("#{base_url_with_token}projects/1/packages/pypi/simple")
+ end
+ end
+
+ describe 'composer_registry_url' do
+ it 'return the composer registry url' do
+ url = helper.composer_registry_url(1)
+
+ expect(url).to eq("#{base_url}group/1/-/packages/composer/packages.json")
+ end
+ end
+
+ describe 'packages_coming_soon_enabled?' do
+ it 'returns false when the feature flag is disabled' do
+ stub_feature_flags(packages_coming_soon: false)
+
+ expect(helper.packages_coming_soon_enabled?(project)).to eq(false)
+ end
+
+ it 'returns false when not on dev or gitlab.com' do
+ expect(helper.packages_coming_soon_enabled?(project)).to eq(false)
+ end
+ end
+
+ describe 'packages_coming_soon_data' do
+ let_it_be(:group) { create(:group) }
+
+ before do
+ allow(Gitlab).to receive(:dev_env_or_com?) { true }
+ end
+
+ it 'returns the gitlab project on gitlab.com' do
+ allow(Gitlab).to receive(:com?) { true }
+
+ expect(helper.packages_coming_soon_data(project)).to include({ project_path: 'gitlab-org/gitlab' })
+ end
+
+ it 'returns the test project when not on gitlab.com' do
+ expect(helper.packages_coming_soon_data(project)).to include({ project_path: 'gitlab-org/gitlab-test' })
+ end
+
+ it 'works correctly with a group' do
+ expect(helper.packages_coming_soon_data(group)).to include({ project_path: 'gitlab-org/gitlab-test' })
+ end
+ end
+end
diff --git a/spec/helpers/profiles_helper_spec.rb b/spec/helpers/profiles_helper_spec.rb
index 4a8ba2b7113..61b7ff94edb 100644
--- a/spec/helpers/profiles_helper_spec.rb
+++ b/spec/helpers/profiles_helper_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe ProfilesHelper do
end
it 'returns DB stored commit_email' do
- user.update(commit_email: Gitlab::PrivateCommitEmail::TOKEN)
+ user.update!(commit_email: Gitlab::PrivateCommitEmail::TOKEN)
expect(helper.selected_commit_email(user)).to eq(Gitlab::PrivateCommitEmail::TOKEN)
end
diff --git a/spec/helpers/projects/alert_management_helper_spec.rb b/spec/helpers/projects/alert_management_helper_spec.rb
index 859c08b194a..183f0438c35 100644
--- a/spec/helpers/projects/alert_management_helper_spec.rb
+++ b/spec/helpers/projects/alert_management_helper_spec.rb
@@ -28,7 +28,8 @@ RSpec.describe Projects::AlertManagementHelper do
expect(helper.alert_management_data(current_user, project)).to match(
'project-path' => project_path,
'enable-alert-management-path' => setting_path,
- 'populating-alerts-help-url' => 'http://test.host/help/user/project/operations/alert_management.html#enable-alert-management',
+ 'alerts-help-url' => 'http://test.host/help/operations/incident_management/index.md',
+ 'populating-alerts-help-url' => 'http://test.host/help/operations/incident_management/index.md#enable-alert-management',
'empty-alert-svg-path' => match_asset_path('/assets/illustrations/alert-management-empty-state.svg'),
'user-can-enable-alert-management' => 'true',
'alert-management-enabled' => 'false'
@@ -49,7 +50,7 @@ RSpec.describe Projects::AlertManagementHelper do
context 'when alerts service is inactive' do
it 'disables alert management' do
- alerts_service.update(active: false)
+ alerts_service.update!(active: false)
expect(data).to include(
'alert-management-enabled' => 'false'
diff --git a/spec/helpers/projects/incidents_helper_spec.rb b/spec/helpers/projects/incidents_helper_spec.rb
new file mode 100644
index 00000000000..0affa67a902
--- /dev/null
+++ b/spec/helpers/projects/incidents_helper_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::IncidentsHelper do
+ include Gitlab::Routing.url_helpers
+
+ let(:project) { create(:project) }
+ let(:project_path) { project.full_path }
+ let(:new_issue_path) { new_project_issue_path(project) }
+ let(:issue_path) { project_issues_path(project) }
+
+ describe '#incidents_data' do
+ subject(:data) { helper.incidents_data(project) }
+
+ it 'returns frontend configuration' do
+ expect(data).to match(
+ 'project-path' => project_path,
+ 'new-issue-path' => new_issue_path,
+ 'incident-template-name' => 'incident',
+ 'incident-type' => 'incident',
+ 'issue-path' => issue_path,
+ 'empty-list-svg-path' => match_asset_path('/assets/illustrations/incident-empty-state.svg')
+ )
+ end
+ end
+end
diff --git a/spec/helpers/projects/issues/service_desk_helper_spec.rb b/spec/helpers/projects/issues/service_desk_helper_spec.rb
new file mode 100644
index 00000000000..3f488fe692d
--- /dev/null
+++ b/spec/helpers/projects/issues/service_desk_helper_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::Issues::ServiceDeskHelper do
+ let_it_be(:project) { create(:project, :public, service_desk_enabled: true) }
+ let(:user) { build_stubbed(:user) }
+ let(:current_user) { user }
+
+ describe '#service_desk_meta' do
+ subject { helper.service_desk_meta(project) }
+
+ context "when service desk is supported and user can edit project settings" do
+ before do
+ allow(Gitlab::IncomingEmail).to receive(:enabled?).and_return(true)
+ allow(Gitlab::IncomingEmail).to receive(:supports_wildcard?).and_return(true)
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:can?).with(current_user, :admin_project, project).and_return(true)
+ end
+
+ it {
+ is_expected.to eq({
+ is_service_desk_supported: true,
+ is_service_desk_enabled: true,
+ can_edit_project_settings: true,
+ service_desk_address: project.service_desk_address,
+ service_desk_help_page: help_page_path('user/project/service_desk'),
+ edit_project_page: edit_project_path(project),
+ svg_path: ActionController::Base.helpers.image_path('illustrations/service_desk_empty.svg')
+ })
+ }
+ end
+
+ context "when service desk is not supported and user cannot edit project settings" do
+ before do
+ allow(Gitlab::IncomingEmail).to receive(:enabled?).and_return(false)
+ allow(Gitlab::IncomingEmail).to receive(:supports_wildcard?).and_return(false)
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:can?).with(current_user, :admin_project, project).and_return(false)
+ end
+
+ it {
+ is_expected.to eq({
+ is_service_desk_supported: false,
+ is_service_desk_enabled: false,
+ can_edit_project_settings: false,
+ incoming_email_help_page: help_page_path('administration/incoming_email', anchor: 'set-it-up'),
+ svg_path: ActionController::Base.helpers.image_path('illustrations/service-desk-setup.svg')
+ })
+ }
+ end
+ end
+end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index a3d0673f1b3..2b345ff3ae6 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -126,7 +126,7 @@ RSpec.describe ProjectsHelper do
it "returns false if there are permissions and origin project is PRIVATE" do
allow(helper).to receive(:can?) { true }
- project.update(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
expect(helper.can_change_visibility_level?(forked_project, user)).to be_falsey
end
@@ -134,7 +134,7 @@ RSpec.describe ProjectsHelper do
it "returns true if there are permissions and origin project is INTERNAL" do
allow(helper).to receive(:can?) { true }
- project.update(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
expect(helper.can_change_visibility_level?(forked_project, user)).to be_truthy
end
@@ -647,7 +647,7 @@ RSpec.describe ProjectsHelper do
context 'user has a configured commit email' do
before do
confirmed_email = create(:email, :confirmed, user: user)
- user.update(commit_email: confirmed_email)
+ user.update!(commit_email: confirmed_email)
end
it 'returns the commit email' do
@@ -866,7 +866,7 @@ RSpec.describe ProjectsHelper do
when :developer, :maintainer
project.add_user(user, access)
when :owner
- project.namespace.update(owner: user)
+ project.namespace.update!(owner: user)
end
end
diff --git a/spec/helpers/releases_helper_spec.rb b/spec/helpers/releases_helper_spec.rb
index 82fc799f9b0..6ae99648ff3 100644
--- a/spec/helpers/releases_helper_spec.rb
+++ b/spec/helpers/releases_helper_spec.rb
@@ -80,6 +80,7 @@ RSpec.describe ReleasesHelper do
describe '#data_for_new_release_page' do
it 'has the needed data to display the "new release" page' do
keys = %i(project_id
+ releases_page_path
markdown_preview_path
markdown_docs_path
update_release_api_docs_path
diff --git a/spec/helpers/services_helper_spec.rb b/spec/helpers/services_helper_spec.rb
index 10d6ec7b6a7..481bc41bcf3 100644
--- a/spec/helpers/services_helper_spec.rb
+++ b/spec/helpers/services_helper_spec.rb
@@ -3,11 +3,6 @@
require 'spec_helper'
RSpec.describe ServicesHelper do
- describe 'event_action_title' do
- it { expect(event_action_title('comment')).to eq 'Comment' }
- it { expect(event_action_title('something')).to eq 'Something' }
- end
-
describe '#integration_form_data' do
subject { helper.integration_form_data(integration) }
diff --git a/spec/helpers/snippets_helper_spec.rb b/spec/helpers/snippets_helper_spec.rb
index 8fc54f17e71..302122c3990 100644
--- a/spec/helpers/snippets_helper_spec.rb
+++ b/spec/helpers/snippets_helper_spec.rb
@@ -6,21 +6,29 @@ RSpec.describe SnippetsHelper do
include Gitlab::Routing
include IconsHelper
- let_it_be(:public_personal_snippet) { create(:personal_snippet, :public) }
- let_it_be(:public_project_snippet) { create(:project_snippet, :public) }
+ let_it_be(:public_personal_snippet) { create(:personal_snippet, :public, :repository) }
+ let_it_be(:public_project_snippet) { create(:project_snippet, :public, :repository) }
describe '#embedded_raw_snippet_button' do
- subject { embedded_raw_snippet_button.to_s }
+ let(:blob) { snippet.blobs.first }
+ let(:ref) { blob.repository.root_ref }
- it 'returns view raw button of embedded snippets for personal snippets' do
- @snippet = create(:personal_snippet, :public)
- expect(subject).to eq(download_link("http://test.host/snippets/#{@snippet.id}/raw"))
+ subject { embedded_raw_snippet_button(snippet, blob) }
+
+ context 'for Personal Snippets' do
+ let(:snippet) { public_personal_snippet }
+
+ it 'returns view raw button of embedded snippets' do
+ expect(subject).to eq(download_link("http://test.host/-/snippets/#{snippet.id}/raw/#{ref}/#{blob.path}"))
+ end
end
- it 'returns view raw button of embedded snippets for project snippets' do
- @snippet = create(:project_snippet, :public)
+ context 'for Project Snippets' do
+ let(:snippet) { public_project_snippet }
- expect(subject).to eq(download_link("http://test.host/#{@snippet.project.path_with_namespace}/snippets/#{@snippet.id}/raw"))
+ it 'returns view raw button of embedded snippets' do
+ expect(subject).to eq(download_link("http://test.host/#{snippet.project.path_with_namespace}/-/snippets/#{snippet.id}/raw/#{ref}/#{blob.path}"))
+ end
end
def download_link(url)
@@ -29,18 +37,25 @@ RSpec.describe SnippetsHelper do
end
describe '#embedded_snippet_download_button' do
- subject { embedded_snippet_download_button }
+ let(:blob) { snippet.blobs.first }
+ let(:ref) { blob.repository.root_ref }
- it 'returns download button of embedded snippets for personal snippets' do
- @snippet = create(:personal_snippet, :public)
+ subject { embedded_snippet_download_button(snippet, blob) }
- expect(subject).to eq(download_link("http://test.host/snippets/#{@snippet.id}/raw"))
+ context 'for Personal Snippets' do
+ let(:snippet) { public_personal_snippet }
+
+ it 'returns download button of embedded snippets' do
+ expect(subject).to eq(download_link("http://test.host/-/snippets/#{snippet.id}/raw/#{ref}/#{blob.path}"))
+ end
end
- it 'returns download button of embedded snippets for project snippets' do
- @snippet = create(:project_snippet, :public)
+ context 'for Project Snippets' do
+ let(:snippet) { public_project_snippet }
- expect(subject).to eq(download_link("http://test.host/#{@snippet.project.path_with_namespace}/snippets/#{@snippet.id}/raw"))
+ it 'returns download button of embedded snippets' do
+ expect(subject).to eq(download_link("http://test.host/#{snippet.project.path_with_namespace}/-/snippets/#{snippet.id}/raw/#{ref}/#{blob.path}"))
+ end
end
def download_link(url)
@@ -56,7 +71,7 @@ RSpec.describe SnippetsHelper do
context 'public' do
it 'returns a script tag with the snippet full url' do
- expect(subject).to eq(script_embed("http://test.host/snippets/#{snippet.id}"))
+ expect(subject).to eq(script_embed("http://test.host/-/snippets/#{snippet.id}"))
end
end
end
@@ -65,7 +80,7 @@ RSpec.describe SnippetsHelper do
let(:snippet) { public_project_snippet }
it 'returns a script tag with the snippet full url' do
- expect(subject).to eq(script_embed("http://test.host/#{snippet.project.path_with_namespace}/snippets/#{snippet.id}"))
+ expect(subject).to eq(script_embed("http://test.host/#{snippet.project.path_with_namespace}/-/snippets/#{snippet.id}"))
end
end
@@ -81,7 +96,7 @@ RSpec.describe SnippetsHelper do
let(:snippet) { public_personal_snippet }
it 'returns the download button' do
- expect(subject).to eq(download_link("/snippets/#{snippet.id}/raw"))
+ expect(subject).to eq(download_link("/-/snippets/#{snippet.id}/raw"))
end
end
@@ -89,7 +104,7 @@ RSpec.describe SnippetsHelper do
let(:snippet) { public_project_snippet }
it 'returns the download button' do
- expect(subject).to eq(download_link("/#{snippet.project.path_with_namespace}/snippets/#{snippet.id}/raw"))
+ expect(subject).to eq(download_link("/#{snippet.project.path_with_namespace}/-/snippets/#{snippet.id}/raw"))
end
end
@@ -107,7 +122,7 @@ RSpec.describe SnippetsHelper do
let(:visibility) { :private }
it 'returns the snippet badge' do
- expect(subject).to eq "<span class=\"badge badge-gray\"><i class=\"fa fa-lock\"></i> private</span>"
+ expect(subject).to eq "<span class=\"badge badge-gray\">#{sprite_icon('lock', size: 14, css_class: 'gl-vertical-align-middle')} private</span>"
end
end
diff --git a/spec/helpers/user_callouts_helper_spec.rb b/spec/helpers/user_callouts_helper_spec.rb
index 60238053aa2..6f1f358af83 100644
--- a/spec/helpers/user_callouts_helper_spec.rb
+++ b/spec/helpers/user_callouts_helper_spec.rb
@@ -25,7 +25,21 @@ RSpec.describe UserCalloutsHelper do
allow(helper).to receive(:user_dismissed?).and_return(false)
end
- it { is_expected.to be true }
+ context 'when active_nav_link is in the operations section' do
+ before do
+ allow(helper).to receive(:active_nav_link?).and_return(true)
+ end
+
+ it { is_expected.to be true }
+ end
+
+ context 'when active_nav_link is not in the operations section' do
+ before do
+ allow(helper).to receive(:active_nav_link?).and_return(false)
+ end
+
+ it { is_expected.to be false }
+ end
end
context 'when user dismissed' do
@@ -67,6 +81,36 @@ RSpec.describe UserCalloutsHelper do
end
end
+ describe '.show_customize_homepage_banner?' do
+ let(:customize_homepage) { true }
+
+ subject { helper.show_customize_homepage_banner?(customize_homepage) }
+
+ context 'when user has not dismissed' do
+ before do
+ allow(helper).to receive(:user_dismissed?).with(described_class::CUSTOMIZE_HOMEPAGE) { false }
+ end
+
+ context 'when customize_homepage is set' do
+ it { is_expected.to be true }
+ end
+
+ context 'when customize_homepage is false' do
+ let(:customize_homepage) { false }
+
+ it { is_expected.to be false }
+ end
+ end
+
+ context 'when user dismissed' do
+ before do
+ allow(helper).to receive(:user_dismissed?).with(described_class::CUSTOMIZE_HOMEPAGE) { true }
+ end
+
+ it { is_expected.to be false }
+ end
+ end
+
describe '.render_flash_user_callout' do
it 'renders the flash_user_callout partial' do
expect(helper).to receive(:render)
diff --git a/spec/helpers/visibility_level_helper_spec.rb b/spec/helpers/visibility_level_helper_spec.rb
index 9cbace3cfd0..7ef911131ba 100644
--- a/spec/helpers/visibility_level_helper_spec.rb
+++ b/spec/helpers/visibility_level_helper_spec.rb
@@ -171,13 +171,14 @@ RSpec.describe VisibilityLevelHelper do
with_them do
it "provides correct visibility level for forked project" do
- project.update(visibility_level: max_allowed)
+ project.update!(visibility_level: max_allowed)
expect(selected_visibility_level(forked_project, requested_level)).to eq(expected)
end
- it "provides correct visibiility level for project in group" do
- project.group.update(visibility_level: max_allowed)
+ it "provides correct visibility level for project in group" do
+ project.update!(visibility_level: max_allowed)
+ project.group.update!(visibility_level: max_allowed)
expect(selected_visibility_level(project, requested_level)).to eq(expected)
end
diff --git a/spec/helpers/wiki_helper_spec.rb b/spec/helpers/wiki_helper_spec.rb
index 040368b5ebd..6c7172e6232 100644
--- a/spec/helpers/wiki_helper_spec.rb
+++ b/spec/helpers/wiki_helper_spec.rb
@@ -62,7 +62,7 @@ RSpec.describe WikiHelper do
path = "/#{wiki.project.full_path}/-/wikis/pages?direction=#{direction}&sort=#{sort}"
helper.link_to(path, type: 'button', class: classes, title: 'Sort direction') do
- helper.sprite_icon("sort-#{icon_class}", size: 16)
+ helper.sprite_icon("sort-#{icon_class}")
end
end
diff --git a/spec/initializers/carrierwave_patch_spec.rb b/spec/initializers/carrierwave_patch_spec.rb
new file mode 100644
index 00000000000..d577eca2ac7
--- /dev/null
+++ b/spec/initializers/carrierwave_patch_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'CarrierWave::Storage::Fog::File' do
+ let(:uploader_class) { Class.new(CarrierWave::Uploader::Base) }
+ let(:uploader) { uploader_class.new }
+ let(:storage) { CarrierWave::Storage::Fog.new(uploader) }
+ let(:azure_options) do
+ {
+ azure_storage_account_name: 'AZURE_ACCOUNT_NAME',
+ azure_storage_access_key: 'AZURE_ACCESS_KEY',
+ provider: 'AzureRM'
+ }
+ end
+
+ subject { CarrierWave::Storage::Fog::File.new(uploader, storage, 'test') }
+
+ before do
+ require 'fog/azurerm'
+ allow(uploader).to receive(:fog_credentials).and_return(azure_options)
+ Fog.mock!
+ end
+
+ describe '#authenticated_url' do
+ context 'with Azure' do
+ it 'has an authenticated URL' do
+ expect(subject.authenticated_url).to eq("https://sa.blob.core.windows.net/test_container/test_blob?token")
+ end
+ end
+ end
+end
diff --git a/spec/initializers/database_config_spec.rb b/spec/initializers/database_config_spec.rb
index 29d499efcd3..ccd69de0b3a 100644
--- a/spec/initializers/database_config_spec.rb
+++ b/spec/initializers/database_config_spec.rb
@@ -9,65 +9,53 @@ RSpec.describe 'Database config initializer' do
before do
allow(ActiveRecord::Base).to receive(:establish_connection)
+ allow(Gitlab::Runtime).to receive(:max_threads).and_return(max_threads)
end
- context "when using multi-threaded runtime" do
- let(:max_threads) { 8 }
+ let(:max_threads) { 8 }
+ context "no existing pool size is set" do
before do
- allow(Gitlab::Runtime).to receive(:multi_threaded?).and_return(true)
- allow(Gitlab::Runtime).to receive(:max_threads).and_return(max_threads)
+ stub_database_config(pool_size: nil)
end
- context "and no existing pool size is set" do
- before do
- stub_database_config(pool_size: nil)
- end
-
- it "sets it to the max number of worker threads" do
- expect { subject }.to change { Gitlab::Database.config['pool'] }.from(nil).to(max_threads)
- end
+ it "sets it based on the max number of worker threads" do
+ expect { subject }.to change { Gitlab::Database.config['pool'] }.from(nil).to(18)
end
+ end
- context "and the existing pool size is smaller than the max number of worker threads" do
- before do
- stub_database_config(pool_size: max_threads - 1)
- end
-
- it "sets it to the max number of worker threads" do
- expect { subject }.to change { Gitlab::Database.config['pool'] }.by(1)
- end
+ context "the existing pool size is smaller than the max number of worker threads" do
+ before do
+ stub_database_config(pool_size: 1)
end
- context "and the existing pool size is larger than the max number of worker threads" do
- before do
- stub_database_config(pool_size: max_threads + 1)
- end
+ it "sets it based on the max number of worker threads" do
+ expect { subject }.to change { Gitlab::Database.config['pool'] }.from(1).to(18)
+ end
+ end
- it "keeps the configured pool size" do
- expect { subject }.not_to change { Gitlab::Database.config['pool'] }
- end
+ context "and the existing pool size is larger than the max number of worker threads" do
+ before do
+ stub_database_config(pool_size: 100)
end
- context "when specifying headroom through an ENV variable" do
- let(:headroom) { 10 }
+ it "sets it based on the max number of worker threads" do
+ expect { subject }.to change { Gitlab::Database.config['pool'] }.from(100).to(18)
+ end
+ end
- before do
- stub_database_config(pool_size: 1)
- stub_env("DB_POOL_HEADROOM", headroom)
- end
+ context "when specifying headroom through an ENV variable" do
+ let(:headroom) { 15 }
- it "adds headroom on top of the calculated size" do
- expect { subject }.to change { Gitlab::Database.config['pool'] }
- .from(1)
- .to(max_threads + headroom)
- end
+ before do
+ stub_database_config(pool_size: 1)
+ stub_env("DB_POOL_HEADROOM", headroom)
end
- end
- context "when using single-threaded runtime" do
- it "does nothing" do
- expect { subject }.not_to change { Gitlab::Database.config['pool'] }
+ it "adds headroom on top of the calculated size" do
+ expect { subject }.to change { Gitlab::Database.config['pool'] }
+ .from(1)
+ .to(max_threads + headroom)
end
end
diff --git a/spec/initializers/direct_upload_support_spec.rb b/spec/initializers/direct_upload_support_spec.rb
index aa77c0905c9..670deecb4f1 100644
--- a/spec/initializers/direct_upload_support_spec.rb
+++ b/spec/initializers/direct_upload_support_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'Direct upload support' do
end
where(:config_name) do
- %w(lfs artifacts uploads)
+ %w(artifacts lfs uploads)
end
with_them do
@@ -52,11 +52,19 @@ RSpec.describe 'Direct upload support' do
end
end
+ context 'when provider is AzureRM' do
+ let(:provider) { 'AzureRM' }
+
+ it 'succeeds' do
+ expect { subject }.not_to raise_error
+ end
+ end
+
context 'when connection is empty' do
let(:connection) { nil }
it 'raises an error' do
- expect { subject }.to raise_error "No provider configured for '#{config_name}'. Only Google, AWS are supported."
+ expect { subject }.to raise_error "No provider configured for '#{config_name}'. Only Google, AWS, and AzureRM are supported."
end
end
diff --git a/spec/javascripts/test_bundle.js b/spec/javascripts/test_bundle.js
index b81804def57..b37a53515a6 100644
--- a/spec/javascripts/test_bundle.js
+++ b/spec/javascripts/test_bundle.js
@@ -1,5 +1,5 @@
/* eslint-disable
- jasmine/no-global-setup, jasmine/no-unsafe-spy, no-underscore-dangle, no-console
+ jasmine/no-global-setup, no-underscore-dangle, no-console
*/
import $ from 'jquery';
@@ -81,17 +81,6 @@ window.addEventListener('unhandledrejection', event => {
console.error(event.reason.stack || event.reason);
});
-// Add global function to spy on a module's dependencies via rewire
-window.spyOnDependency = (module, name) => {
- const dependency = module.__GetDependency__(name);
- const spy = jasmine.createSpy(name, dependency);
- module.__Rewire__(name, spy);
- return spy;
-};
-
-// Reset any rewired modules after each test (see babel-plugin-rewire)
-afterEach(__rewire_reset_all__); // eslint-disable-line
-
// HACK: Chrome 59 disconnects if there are too many synchronous tests in a row
// because it appears to lock up the thread that communicates to Karma's socket
// This async beforeEach gets called on every spec and releases the JS thread long
diff --git a/spec/lib/api/entities/nuget/dependency_group_spec.rb b/spec/lib/api/entities/nuget/dependency_group_spec.rb
index 5a649be846b..5e6de45adf2 100644
--- a/spec/lib/api/entities/nuget/dependency_group_spec.rb
+++ b/spec/lib/api/entities/nuget/dependency_group_spec.rb
@@ -34,6 +34,7 @@ RSpec.describe API::Entities::Nuget::DependencyGroup do
]
}
end
+
let(:entity) { described_class.new(dependency_group) }
subject { entity.as_json }
diff --git a/spec/lib/api/entities/nuget/dependency_spec.rb b/spec/lib/api/entities/nuget/dependency_spec.rb
index 13897cc91f0..fb87b21bd1e 100644
--- a/spec/lib/api/entities/nuget/dependency_spec.rb
+++ b/spec/lib/api/entities/nuget/dependency_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe API::Entities::Nuget::Dependency do
'range': '2.0.0'
}
end
+
let(:entity) { described_class.new(dependency) }
subject { entity.as_json }
diff --git a/spec/lib/api/entities/nuget/metadatum_spec.rb b/spec/lib/api/entities/nuget/metadatum_spec.rb
index fe94ea3a69a..210ff0abdd3 100644
--- a/spec/lib/api/entities/nuget/metadatum_spec.rb
+++ b/spec/lib/api/entities/nuget/metadatum_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe API::Entities::Nuget::Metadatum do
icon_url: 'http://sandbox.com/icon'
}
end
+
let(:expected) do
{
'projectUrl': 'http://sandbox.com/project',
@@ -17,6 +18,7 @@ RSpec.describe API::Entities::Nuget::Metadatum do
'iconUrl': 'http://sandbox.com/icon'
}
end
+
let(:entity) { described_class.new(metadatum) }
subject { entity.as_json }
diff --git a/spec/lib/api/entities/nuget/search_result_spec.rb b/spec/lib/api/entities/nuget/search_result_spec.rb
index 2a760c70224..a24cd44be9e 100644
--- a/spec/lib/api/entities/nuget/search_result_spec.rb
+++ b/spec/lib/api/entities/nuget/search_result_spec.rb
@@ -27,6 +27,7 @@ RSpec.describe API::Entities::Nuget::SearchResult do
}
}
end
+
let(:expected) do
{
'@type': 'Package',
diff --git a/spec/lib/api/entities/snippet_spec.rb b/spec/lib/api/entities/snippet_spec.rb
index bcb8c364392..068851f7f6c 100644
--- a/spec/lib/api/entities/snippet_spec.rb
+++ b/spec/lib/api/entities/snippet_spec.rb
@@ -123,11 +123,11 @@ RSpec.describe ::API::Entities::Snippet do
it_behaves_like 'common attributes'
it 'returns snippet web_url attribute' do
- expect(subject[:web_url]).to match("/snippets/#{snippet.id}")
+ expect(subject[:web_url]).to match("/-/snippets/#{snippet.id}")
end
it 'returns snippet raw_url attribute' do
- expect(subject[:raw_url]).to match("/snippets/#{snippet.id}/raw")
+ expect(subject[:raw_url]).to match("/-/snippets/#{snippet.id}/raw")
end
end
@@ -137,11 +137,11 @@ RSpec.describe ::API::Entities::Snippet do
it_behaves_like 'common attributes'
it 'returns snippet web_url attribute' do
- expect(subject[:web_url]).to match("#{snippet.project.full_path}/snippets/#{snippet.id}")
+ expect(subject[:web_url]).to match("#{snippet.project.full_path}/-/snippets/#{snippet.id}")
end
it 'returns snippet raw_url attribute' do
- expect(subject[:raw_url]).to match("#{snippet.project.full_path}/snippets/#{snippet.id}/raw")
+ expect(subject[:raw_url]).to match("#{snippet.project.full_path}/-/snippets/#{snippet.id}/raw")
end
end
end
diff --git a/spec/lib/api/helpers/merge_requests_helpers_spec.rb b/spec/lib/api/helpers/merge_requests_helpers_spec.rb
new file mode 100644
index 00000000000..1d68b7985f1
--- /dev/null
+++ b/spec/lib/api/helpers/merge_requests_helpers_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Helpers::MergeRequestsHelpers do
+ describe '#handle_merge_request_errors!' do
+ let(:helper) do
+ Class.new do
+ include API::Helpers::MergeRequestsHelpers
+ end.new
+ end
+
+ let(:merge_request) { double }
+
+ context 'when merge request is valid' do
+ it 'returns nil' do
+ allow(merge_request).to receive(:valid?).and_return(true)
+
+ expect(merge_request).not_to receive(:errors)
+
+ helper.handle_merge_request_errors!(merge_request)
+ end
+ end
+
+ context 'when merge request is invalid' do
+ before do
+ allow(merge_request).to receive(:valid?).and_return(false)
+ allow(helper).to receive_messages([
+ :unprocessable_entity!, :conflict!, :render_validation_error!
+ ])
+ end
+
+ API::Helpers::MergeRequestsHelpers::UNPROCESSABLE_ERROR_KEYS.each do |error_key|
+ it "responds to a #{error_key} error with unprocessable_entity" do
+ error = double
+ allow(merge_request).to receive(:errors).and_return({ error_key => error })
+
+ expect(helper).to receive(:unprocessable_entity!).with(error)
+
+ helper.handle_merge_request_errors!(merge_request)
+ end
+ end
+
+ it 'responds to a validate_branches error with conflict' do
+ error = double
+ allow(merge_request).to receive(:errors).and_return({ validate_branches: error })
+
+ expect(helper).to receive(:conflict!).with(error)
+
+ helper.handle_merge_request_errors!(merge_request)
+ end
+
+ it 'responds with bad request' do
+ error = double
+ allow(merge_request).to receive(:errors).and_return({ other_error: error })
+
+ expect(helper).to receive(:render_validation_error!).with(merge_request)
+
+ helper.handle_merge_request_errors!(merge_request)
+ end
+ end
+ end
+end
diff --git a/spec/lib/api/helpers/packages_manager_clients_helpers_spec.rb b/spec/lib/api/helpers/packages_manager_clients_helpers_spec.rb
index 80be5f7d10a..832f4abe545 100644
--- a/spec/lib/api/helpers/packages_manager_clients_helpers_spec.rb
+++ b/spec/lib/api/helpers/packages_manager_clients_helpers_spec.rb
@@ -8,40 +8,6 @@ RSpec.describe API::Helpers::PackagesManagerClientsHelpers do
let_it_be(:helper) { Class.new.include(described_class).new }
let(:password) { personal_access_token.token }
- describe '#find_personal_access_token_from_http_basic_auth' do
- let(:headers) { { Authorization: basic_http_auth(username, password) } }
-
- subject { helper.find_personal_access_token_from_http_basic_auth }
-
- before do
- allow(helper).to receive(:headers).and_return(headers&.with_indifferent_access)
- end
-
- context 'with a valid Authorization header' do
- it { is_expected.to eq personal_access_token }
- end
-
- context 'with an invalid Authorization header' do
- where(:headers) do
- [
- [{ Authorization: 'Invalid' }],
- [{}],
- [nil]
- ]
- end
-
- with_them do
- it { is_expected.to be nil }
- end
- end
-
- context 'with an unknown Authorization header' do
- let(:password) { 'Unknown' }
-
- it { is_expected.to be nil }
- end
- end
-
describe '#find_job_from_http_basic_auth' do
let_it_be(:user) { personal_access_token.user }
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index 8cba1e0794a..d0fe9163c6e 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -230,4 +230,90 @@ RSpec.describe API::Helpers do
end
end
end
+
+ describe "#destroy_conditionally!" do
+ let!(:project) { create(:project) }
+
+ context 'when unmodified check passes' do
+ before do
+ allow(subject).to receive(:check_unmodified_since!).with(project.updated_at).and_return(true)
+ end
+
+ it 'destroys given project' do
+ allow(subject).to receive(:status).with(204)
+ allow(subject).to receive(:body).with(false)
+ expect(project).to receive(:destroy).and_call_original
+
+ expect { subject.destroy_conditionally!(project) }.to change(Project, :count).by(-1)
+ end
+ end
+
+ context 'when unmodified check fails' do
+ before do
+ allow(subject).to receive(:check_unmodified_since!).with(project.updated_at).and_throw(:error)
+ end
+
+ # #destroy_conditionally! uses Grape errors which Ruby-throws a symbol, shifting execution to somewhere else.
+ # Since this spec isn't in the Grape context, we need to simulate this ourselves.
+ # Grape throws here: https://github.com/ruby-grape/grape/blob/470f80cd48933cdf11d4c1ee02cb43e0f51a7300/lib/grape/dsl/inside_route.rb#L168-L171
+ # And catches here: https://github.com/ruby-grape/grape/blob/cf57d250c3d77a9a488d9f56918d62fd4ac745ff/lib/grape/middleware/error.rb#L38-L40
+ it 'does not destroy given project' do
+ expect(project).not_to receive(:destroy)
+
+ expect { subject.destroy_conditionally!(project) }.to throw_symbol(:error).and change { Project.count }.by(0)
+ end
+ end
+ end
+
+ describe "#check_unmodified_since!" do
+ let(:unmodified_since_header) { Time.now.change(usec: 0) }
+
+ before do
+ allow(subject).to receive(:headers).and_return('If-Unmodified-Since' => unmodified_since_header.to_s)
+ end
+
+ context 'when last modified is later than header value' do
+ it 'renders error' do
+ expect(subject).to receive(:render_api_error!)
+
+ subject.check_unmodified_since!(unmodified_since_header + 1.hour)
+ end
+ end
+
+ context 'when last modified is earlier than header value' do
+ it 'does not render error' do
+ expect(subject).not_to receive(:render_api_error!)
+
+ subject.check_unmodified_since!(unmodified_since_header - 1.hour)
+ end
+ end
+
+ context 'when last modified is equal to header value' do
+ it 'does not render error' do
+ expect(subject).not_to receive(:render_api_error!)
+
+ subject.check_unmodified_since!(unmodified_since_header)
+ end
+ end
+
+ context 'when there is no header value present' do
+ let(:unmodified_since_header) { nil }
+
+ it 'does not render error' do
+ expect(subject).not_to receive(:render_api_error!)
+
+ subject.check_unmodified_since!(Time.now)
+ end
+ end
+
+ context 'when header value is not a valid time value' do
+ let(:unmodified_since_header) { "abcd" }
+
+ it 'does not render error' do
+ expect(subject).not_to receive(:render_api_error!)
+
+ subject.check_unmodified_since!(Time.now)
+ end
+ end
+ end
end
diff --git a/spec/lib/api/support/git_access_actor_spec.rb b/spec/lib/api/support/git_access_actor_spec.rb
index 70753856419..143cc6e56ee 100644
--- a/spec/lib/api/support/git_access_actor_spec.rb
+++ b/spec/lib/api/support/git_access_actor_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe API::Support::GitAccessActor do
describe 'attributes' do
describe '#user' do
context 'when initialized with a User' do
- let(:user) { create(:user) }
+ let(:user) { build(:user) }
it 'returns the User' do
expect(subject.user).to eq(user)
@@ -44,7 +44,7 @@ RSpec.describe API::Support::GitAccessActor do
end
context 'when initialized with a Key' do
- let(:user_for_key) { create(:user) }
+ let(:user_for_key) { build(:user) }
let(:key) { create(:key, user: user_for_key) }
it 'returns the User associated to the Key' do
@@ -85,7 +85,7 @@ RSpec.describe API::Support::GitAccessActor do
describe '#username' do
context 'when initialized with a User' do
- let(:user) { create(:user) }
+ let(:user) { build(:user) }
it 'returns the username' do
expect(subject.username).to eq(user.username)
@@ -104,7 +104,7 @@ RSpec.describe API::Support::GitAccessActor do
end
context 'that has a User associated' do
- let(:user_for_key) { create(:user) }
+ let(:user_for_key) { build(:user) }
it 'returns the username of the User associated to the Key' do
expect(subject.username).to eq(user_for_key.username)
@@ -113,9 +113,47 @@ RSpec.describe API::Support::GitAccessActor do
end
end
+ describe '#key_details' do
+ context 'when initialized with a User' do
+ let(:user) { build(:user) }
+
+ it 'returns an empty Hash' do
+ expect(subject.key_details).to eq({})
+ end
+ end
+
+ context 'when initialized with a Key' do
+ let(:key) { create(:key, user: user_for_key) }
+
+ context 'that has no User associated' do
+ let(:user_for_key) { nil }
+
+ it 'returns a Hash' do
+ expect(subject.key_details).to eq({ gl_key_type: 'key', gl_key_id: key.id })
+ end
+ end
+
+ context 'that has a User associated' do
+ let(:user_for_key) { build(:user) }
+
+ it 'returns a Hash' do
+ expect(subject.key_details).to eq({ gl_key_type: 'key', gl_key_id: key.id })
+ end
+ end
+ end
+
+ context 'when initialized with a DeployKey' do
+ let(:key) { create(:deploy_key) }
+
+ it 'returns a Hash' do
+ expect(subject.key_details).to eq({ gl_key_type: 'deploy_key', gl_key_id: key.id })
+ end
+ end
+ end
+
describe '#update_last_used_at!' do
context 'when initialized with a User' do
- let(:user) { create(:user) }
+ let(:user) { build(:user) }
it 'does nothing' do
expect(user).not_to receive(:update_last_used_at)
diff --git a/spec/lib/api/validations/validators/file_path_spec.rb b/spec/lib/api/validations/validators/file_path_spec.rb
index 2c79260b8d5..cbeada6faa1 100644
--- a/spec/lib/api/validations/validators/file_path_spec.rb
+++ b/spec/lib/api/validations/validators/file_path_spec.rb
@@ -6,31 +6,64 @@ RSpec.describe API::Validations::Validators::FilePath do
include ApiValidatorsHelpers
subject do
- described_class.new(['test'], {}, false, scope.new)
+ described_class.new(['test'], params, false, scope.new)
end
- context 'valid file path' do
- it 'does not raise a validation error' do
- expect_no_validation_error('test' => './foo')
- expect_no_validation_error('test' => './bar.rb')
- expect_no_validation_error('test' => 'foo%2Fbar%2Fnew%2Ffile.rb')
- expect_no_validation_error('test' => 'foo%2Fbar%2Fnew')
- expect_no_validation_error('test' => 'foo%252Fbar%252Fnew%252Ffile.rb')
+ context 'when allowlist is not set' do
+ shared_examples 'file validation' do
+ context 'valid file path' do
+ it 'does not raise a validation error' do
+ expect_no_validation_error('test' => './foo')
+ expect_no_validation_error('test' => './bar.rb')
+ expect_no_validation_error('test' => 'foo%2Fbar%2Fnew%2Ffile.rb')
+ expect_no_validation_error('test' => 'foo%2Fbar%2Fnew')
+ expect_no_validation_error('test' => 'foo/bar')
+ end
+ end
+
+ context 'invalid file path' do
+ it 'raise a validation error' do
+ expect_validation_error('test' => '../foo')
+ expect_validation_error('test' => '../')
+ expect_validation_error('test' => 'foo/../../bar')
+ expect_validation_error('test' => 'foo/../')
+ expect_validation_error('test' => 'foo/..')
+ expect_validation_error('test' => '../')
+ expect_validation_error('test' => '..\\')
+ expect_validation_error('test' => '..\/')
+ expect_validation_error('test' => '%2e%2e%2f')
+ expect_validation_error('test' => '/etc/passwd')
+ expect_validation_error('test' => 'test%0a/etc/passwd')
+ expect_validation_error('test' => '%2Ffoo%2Fbar%2Fnew%2Ffile.rb')
+ expect_validation_error('test' => '%252Ffoo%252Fbar%252Fnew%252Ffile.rb')
+ expect_validation_error('test' => 'foo%252Fbar%252Fnew%252Ffile.rb')
+ expect_validation_error('test' => 'foo%25252Fbar%25252Fnew%25252Ffile.rb')
+ end
+ end
+ end
+
+ it_behaves_like 'file validation' do
+ let(:params) { {} }
+ end
+
+ it_behaves_like 'file validation' do
+ let(:params) { true }
end
end
- context 'invalid file path' do
- it 'raise a validation error' do
- expect_validation_error('test' => '../foo')
- expect_validation_error('test' => '../')
- expect_validation_error('test' => 'foo/../../bar')
- expect_validation_error('test' => 'foo/../')
- expect_validation_error('test' => 'foo/..')
- expect_validation_error('test' => '../')
- expect_validation_error('test' => '..\\')
- expect_validation_error('test' => '..\/')
- expect_validation_error('test' => '%2e%2e%2f')
- expect_validation_error('test' => '/etc/passwd')
+ context 'when allowlist is set' do
+ let(:params) { { allowlist: ['/home/bar'] } }
+
+ context 'when file path is included in the allowlist' do
+ it 'does not raise a validation error' do
+ expect_no_validation_error('test' => '/home/bar')
+ end
+ end
+
+ context 'when file path is not included in the allowlist' do
+ it 'raises a validation error' do
+ expect_validation_error('test' => '/foo/xyz')
+ end
end
end
end
diff --git a/spec/lib/backup/repository_spec.rb b/spec/lib/backup/repository_spec.rb
index c073a45bf68..fef5e018231 100644
--- a/spec/lib/backup/repository_spec.rb
+++ b/spec/lib/backup/repository_spec.rb
@@ -3,8 +3,9 @@
require 'spec_helper'
RSpec.describe Backup::Repository do
+ let_it_be(:project) { create(:project, :wiki_repo) }
+
let(:progress) { StringIO.new }
- let!(:project) { create(:project, :wiki_repo) }
subject { described_class.new(progress) }
@@ -19,13 +20,88 @@ RSpec.describe Backup::Repository do
end
describe '#dump' do
- describe 'repo failure' do
- before do
- allow(Gitlab::Popen).to receive(:popen).and_return(['normal output', 0])
+ before do
+ allow(Gitlab.config.repositories.storages).to receive(:keys).and_return(storage_keys)
+ end
+
+ let_it_be(:projects) { create_list(:project, 5, :wiki_repo) + [project] }
+
+ let(:storage_keys) { %w[default test_second_storage] }
+
+ context 'no concurrency' do
+ it 'creates the expected number of threads' do
+ expect(Thread).not_to receive(:new)
+
+ projects.each do |project|
+ expect(subject).to receive(:dump_project).with(project).and_call_original
+ end
+
+ subject.dump(max_concurrency: 1, max_storage_concurrency: 1)
end
- it 'does not raise error' do
- expect { subject.dump }.not_to raise_error
+ describe 'command failure' do
+ it 'dump_project raises an error' do
+ allow(subject).to receive(:dump_project).and_raise(IOError)
+
+ expect { subject.dump(max_concurrency: 1, max_storage_concurrency: 1) }.to raise_error(IOError)
+ end
+
+ it 'project query raises an error' do
+ allow(Project).to receive(:find_each).and_raise(ActiveRecord::StatementTimeout)
+
+ expect { subject.dump(max_concurrency: 1, max_storage_concurrency: 1) }.to raise_error(ActiveRecord::StatementTimeout)
+ end
+ end
+ end
+
+ [4, 10].each do |max_storage_concurrency|
+ context "max_storage_concurrency #{max_storage_concurrency}" do
+ it 'creates the expected number of threads' do
+ expect(Thread).to receive(:new)
+ .exactly(storage_keys.length * (max_storage_concurrency + 1)).times
+ .and_call_original
+
+ projects.each do |project|
+ expect(subject).to receive(:dump_project).with(project).and_call_original
+ end
+
+ subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency)
+ end
+
+ it 'creates the expected number of threads with extra max concurrency' do
+ expect(Thread).to receive(:new)
+ .exactly(storage_keys.length * (max_storage_concurrency + 1)).times
+ .and_call_original
+
+ projects.each do |project|
+ expect(subject).to receive(:dump_project).with(project).and_call_original
+ end
+
+ subject.dump(max_concurrency: 3, max_storage_concurrency: max_storage_concurrency)
+ end
+
+ describe 'command failure' do
+ it 'dump_project raises an error' do
+ allow(subject).to receive(:dump_project)
+ .and_raise(IOError)
+
+ expect { subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency) }.to raise_error(IOError)
+ end
+
+ it 'project query raises an error' do
+ allow(Project).to receive_message_chain('for_repository_storage.find_each').and_raise(ActiveRecord::StatementTimeout)
+
+ expect { subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency) }.to raise_error(ActiveRecord::StatementTimeout)
+ end
+
+ context 'misconfigured storages' do
+ let(:storage_keys) { %w[test_second_storage] }
+
+ it 'raises an error' do
+ expect { subject.dump(max_concurrency: 1, max_storage_concurrency: max_storage_concurrency) }.to raise_error(Backup::Error, 'repositories.storages in gitlab.yml is misconfigured')
+ end
+ end
+ end
end
end
end
diff --git a/spec/lib/banzai/filter/absolute_link_filter_spec.rb b/spec/lib/banzai/filter/absolute_link_filter_spec.rb
index 2cb70850dca..0c159e8bac8 100644
--- a/spec/lib/banzai/filter/absolute_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/absolute_link_filter_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Banzai::Filter::AbsoluteLinkFilter do
let(:only_path_context) do
{ only_path: false }
end
+
let(:fake_url) { 'http://www.example.com' }
before do
diff --git a/spec/lib/banzai/filter/ascii_doc_post_processing_filter_spec.rb b/spec/lib/banzai/filter/ascii_doc_post_processing_filter_spec.rb
index 334d5c59828..7af22ea7db1 100644
--- a/spec/lib/banzai/filter/ascii_doc_post_processing_filter_spec.rb
+++ b/spec/lib/banzai/filter/ascii_doc_post_processing_filter_spec.rb
@@ -10,6 +10,12 @@ RSpec.describe Banzai::Filter::AsciiDocPostProcessingFilter do
expect(result).to eq('<pre data-math-style="inline" class="code math js-render-math">some code</pre><div data-math>and</div>')
end
+ it "adds class for elements with data-mermaid-style" do
+ result = filter('<pre data-mermaid-style="display">some code</pre>').to_html
+
+ expect(result).to eq('<pre data-mermaid-style="display" class="js-render-mermaid">some code</pre>')
+ end
+
it "keeps content when no data-math-style found" do
result = filter('<pre>some code</pre><div data-math>and</div>').to_html
expect(result).to eq('<pre>some code</pre><div data-math>and</div>')
diff --git a/spec/lib/banzai/filter/gollum_tags_filter_spec.rb b/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
index 2576dd1bf07..f39b5280490 100644
--- a/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
+++ b/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
@@ -6,8 +6,7 @@ RSpec.describe Banzai::Filter::GollumTagsFilter do
include FilterSpecHelper
let(:project) { create(:project) }
- let(:user) { double }
- let(:wiki) { ProjectWiki.new(project, user) }
+ let(:wiki) { ProjectWiki.new(project, nil) }
describe 'validation' do
it 'ensure that a :wiki key exists in context' do
diff --git a/spec/lib/banzai/filter/inline_alert_metrics_filter_spec.rb b/spec/lib/banzai/filter/inline_alert_metrics_filter_spec.rb
new file mode 100644
index 00000000000..be40195f001
--- /dev/null
+++ b/spec/lib/banzai/filter/inline_alert_metrics_filter_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Banzai::Filter::InlineAlertMetricsFilter do
+ include FilterSpecHelper
+
+ let(:params) { ['foo', 'bar', 12] }
+ let(:query_params) { {} }
+
+ let(:trigger_url) { urls.metrics_dashboard_namespace_project_prometheus_alert_url(*params, query_params) }
+ let(:dashboard_url) { urls.metrics_dashboard_namespace_project_prometheus_alert_url(*params, **query_params, embedded: true, format: :json) }
+
+ it_behaves_like 'a metrics embed filter'
+
+ context 'with query params specified' do
+ let(:query_params) { { timestamp: 'yesterday' } }
+
+ it_behaves_like 'a metrics embed filter'
+ end
+end
diff --git a/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb b/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb
index cafcaef8ae2..5f66844f498 100644
--- a/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb
+++ b/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb
@@ -74,5 +74,20 @@ RSpec.describe Banzai::Filter::InlineMetricsRedactorFilter do
end
end
end
+
+ context 'for an alert embed' do
+ let_it_be(:alert) { create(:prometheus_alert, project: project) }
+ let(:url) do
+ urls.metrics_dashboard_project_prometheus_alert_url(
+ project,
+ alert.prometheus_metric_id,
+ environment_id: alert.environment_id,
+ embedded: true
+ )
+ end
+
+ it_behaves_like 'redacts the embed placeholder'
+ it_behaves_like 'retains the embed placeholder when applicable'
+ end
end
end
diff --git a/spec/lib/banzai/filter/label_reference_filter_spec.rb b/spec/lib/banzai/filter/label_reference_filter_spec.rb
index dadf98d9b76..726ef8c57ab 100644
--- a/spec/lib/banzai/filter/label_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/label_reference_filter_spec.rb
@@ -31,6 +31,19 @@ RSpec.describe Banzai::Filter::LabelReferenceFilter do
expect(doc.css('a').first.attr('class')).to eq 'gfm gfm-label has-tooltip gl-link gl-label-link'
end
+ it 'avoids N+1 cached queries', :use_sql_query_cache, :request_store do
+ # Run this once to establish a baseline
+ reference_filter("Label #{reference}")
+
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ reference_filter("Label #{reference}")
+ end
+
+ labels_markdown = Array.new(10, "Label #{reference}").join('\n')
+
+ expect { reference_filter(labels_markdown) }.not_to exceed_all_query_limit(control_count.count)
+ end
+
it 'includes a data-project attribute' do
doc = reference_filter("Label #{reference}")
link = doc.css('a').first
diff --git a/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb b/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb
index f24fcf98b1f..df78a3321ba 100644
--- a/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb
@@ -220,6 +220,7 @@ RSpec.describe Banzai::Filter::MergeRequestReferenceFilter do
let(:reference) do
urls.project_merge_request_url(mr.project, mr) + "/diffs?commit_id=#{mr.diff_head_sha}"
end
+
let(:commit) { mr.commits.find { |commit| commit.sha == mr.diff_head_sha } }
it 'links to a valid reference' do
diff --git a/spec/lib/banzai/filter/reference_filter_spec.rb b/spec/lib/banzai/filter/reference_filter_spec.rb
index d5978db13c0..2888965dbc4 100644
--- a/spec/lib/banzai/filter/reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/reference_filter_spec.rb
@@ -110,20 +110,6 @@ RSpec.describe Banzai::Filter::ReferenceFilter do
expect(filter.instance_variable_get(:@new_nodes)).to eq({ index => [filter.each_node.to_a[index]] })
end
-
- context "with update_nodes_for_banzai_reference_filter feature flag disabled" do
- before do
- stub_feature_flags(update_nodes_for_banzai_reference_filter: false)
- end
-
- it 'does not call replace_and_update_new_nodes' do
- expect(filter).not_to receive(:replace_and_update_new_nodes).with(filter.nodes[index], index, html)
-
- filter.send(method_name, *args) do
- html
- end
- end
- end
end
end
@@ -198,49 +184,20 @@ RSpec.describe Banzai::Filter::ReferenceFilter do
end
describe "#call_and_update_nodes" do
- context "with update_nodes_for_banzai_reference_filter feature flag enabled" do
- include_context 'new nodes'
- let(:document) { Nokogiri::HTML.fragment('<a href="foo">foo</a>') }
- let(:filter) { described_class.new(document, project: project) }
-
- before do
- stub_feature_flags(update_nodes_for_banzai_reference_filter: true)
- end
-
- it "updates all new nodes", :aggregate_failures do
- filter.instance_variable_set('@nodes', nodes)
-
- expect(filter).to receive(:call) { filter.instance_variable_set('@new_nodes', new_nodes) }
- expect(filter).to receive(:with_update_nodes).and_call_original
- expect(filter).to receive(:update_nodes!).and_call_original
-
- filter.call_and_update_nodes
-
- expect(filter.result[:reference_filter_nodes]).to eq(expected_nodes)
- end
- end
-
- context "with update_nodes_for_banzai_reference_filter feature flag disabled" do
- include_context 'new nodes'
-
- before do
- stub_feature_flags(update_nodes_for_banzai_reference_filter: false)
- end
+ include_context 'new nodes'
+ let(:document) { Nokogiri::HTML.fragment('<a href="foo">foo</a>') }
+ let(:filter) { described_class.new(document, project: project) }
- it "does not change nodes", :aggregate_failures do
- document = Nokogiri::HTML.fragment('<a href="foo">foo</a>')
- filter = described_class.new(document, project: project)
- filter.instance_variable_set('@nodes', nodes)
+ it "updates all new nodes", :aggregate_failures do
+ filter.instance_variable_set('@nodes', nodes)
- expect(filter).to receive(:call) { filter.instance_variable_set('@new_nodes', new_nodes) }
- expect(filter).not_to receive(:with_update_nodes)
- expect(filter).not_to receive(:update_nodes!)
+ expect(filter).to receive(:call) { filter.instance_variable_set('@new_nodes', new_nodes) }
+ expect(filter).to receive(:with_update_nodes).and_call_original
+ expect(filter).to receive(:update_nodes!).and_call_original
- filter.call_and_update_nodes
+ filter.call_and_update_nodes
- expect(filter.nodes).to eq(nodes)
- expect(filter.result[:reference_filter_nodes]).to be nil
- end
+ expect(filter.result[:reference_filter_nodes]).to eq(expected_nodes)
end
end
@@ -251,10 +208,6 @@ RSpec.describe Banzai::Filter::ReferenceFilter do
let(:result) { { reference_filter_nodes: nodes } }
- before do
- stub_feature_flags(update_nodes_for_banzai_reference_filter: true)
- end
-
it "updates all nodes", :aggregate_failures do
expect_next_instance_of(described_class) do |filter|
expect(filter).to receive(:call_and_update_nodes).and_call_original
@@ -267,26 +220,5 @@ RSpec.describe Banzai::Filter::ReferenceFilter do
expect(result[:reference_filter_nodes]).to eq(expected_nodes)
end
-
- context "with update_nodes_for_banzai_reference_filter feature flag disabled" do
- let(:result) { {} }
-
- before do
- stub_feature_flags(update_nodes_for_banzai_reference_filter: false)
- end
-
- it "updates all nodes", :aggregate_failures do
- expect_next_instance_of(described_class) do |filter|
- expect(filter).to receive(:call_and_update_nodes).and_call_original
- expect(filter).not_to receive(:with_update_nodes)
- expect(filter).to receive(:call) { filter.instance_variable_set('@new_nodes', new_nodes) }
- expect(filter).not_to receive(:update_nodes!)
- end
-
- described_class.call(document, { project: project }, result)
-
- expect(result[:reference_filter_nodes]).to be nil
- end
- end
end
end
diff --git a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
index a2875fad421..78f84ee44f7 100644
--- a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
+++ b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
@@ -26,6 +26,14 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
include_examples "XSS prevention", ""
end
+ context "when contains mermaid diagrams" do
+ it "ignores mermaid blocks" do
+ result = filter('<pre data-mermaid-style="display"><code>mermaid code</code></pre>')
+
+ expect(result.to_html).to eq('<pre data-mermaid-style="display"><code>mermaid code</code></pre>')
+ end
+ end
+
context "when a valid language is specified" do
it "highlights as that language" do
result = filter('<pre><code lang="ruby">def fun end</code></pre>')
diff --git a/spec/lib/banzai/filter/wiki_link_filter_spec.rb b/spec/lib/banzai/filter/wiki_link_filter_spec.rb
index 7a4464a2604..d1f6ee49260 100644
--- a/spec/lib/banzai/filter/wiki_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/wiki_link_filter_spec.rb
@@ -7,8 +7,7 @@ RSpec.describe Banzai::Filter::WikiLinkFilter do
let(:namespace) { build_stubbed(:namespace, name: "wiki_link_ns") }
let(:project) { build_stubbed(:project, :public, name: "wiki_link_project", namespace: namespace) }
- let(:user) { double }
- let(:wiki) { ProjectWiki.new(project, user) }
+ let(:wiki) { ProjectWiki.new(project, nil) }
let(:repository_upload_folder) { Wikis::CreateAttachmentService::ATTACHMENT_PATH }
it "doesn't rewrite absolute links" do
diff --git a/spec/lib/banzai/issuable_extractor_spec.rb b/spec/lib/banzai/issuable_extractor_spec.rb
index c4ee7160e12..8fec9691d7f 100644
--- a/spec/lib/banzai/issuable_extractor_spec.rb
+++ b/spec/lib/banzai/issuable_extractor_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe Banzai::IssuableExtractor do
"<a href='' data-issue='#{issue.id}' data-reference-type='issue' class='gfm'>text</a>"
)
end
+
let(:merge_request_link) do
html_to_node(
"<a href='' data-merge-request='#{merge_request.id}' data-reference-type='merge_request' class='gfm'>text</a>"
diff --git a/spec/lib/banzai/object_renderer_spec.rb b/spec/lib/banzai/object_renderer_spec.rb
index f8d7acd3148..e64ab5dfce3 100644
--- a/spec/lib/banzai/object_renderer_spec.rb
+++ b/spec/lib/banzai/object_renderer_spec.rb
@@ -73,6 +73,7 @@ RSpec.describe Banzai::ObjectRenderer do
end
end
end
+
let(:cacheless_thing) do
cacheless_class.new.tap do |thing|
thing.title = "Merge branch 'branch-merged' into 'master'"
diff --git a/spec/lib/banzai/pipeline/full_pipeline_spec.rb b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
index 0127ac11c81..9391ca386cf 100644
--- a/spec/lib/banzai/pipeline/full_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
@@ -109,6 +109,7 @@ RSpec.describe Banzai::Pipeline::FullPipeline do
# Header
MARKDOWN
end
+
let(:invalid_markdown) do
<<-MARKDOWN.strip_heredoc
test [[_TOC_]]
diff --git a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
index beb760637b0..247f4591632 100644
--- a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
@@ -30,34 +30,6 @@ RSpec.describe Banzai::Pipeline::GfmPipeline do
described_class.call(markdown, project: project)
end
- context "with update_nodes_for_banzai_reference_filter feature flag disabled" do
- before do
- stub_feature_flags(update_nodes_for_banzai_reference_filter: false)
- end
-
- context 'when shorthand pattern #ISSUE_ID is used' do
- it 'links an internal issues and doesnt store nodes in result[:reference_filter_nodes]', :aggregate_failures do
- issue = create(:issue, project: project)
- markdown = "text #{issue.to_reference(project, full: true)}"
- result = described_class.call(markdown, project: project)
- link = result[:output].css('a').first
-
- expect(link['href']).to eq(Gitlab::Routing.url_helpers.project_issue_path(project, issue))
- expect(result[:reference_filter_nodes]).to eq nil
- end
- end
-
- it 'execute :each_node for each reference_filter', :aggregate_failures do
- issue = create(:issue, project: project)
- markdown = "text #{issue.to_reference(project, full: true)}"
- described_class.reference_filters do |reference_filter|
- expect_any_instance_of(reference_filter).to receive(:each_node).once
- end
-
- described_class.call(markdown, project: project)
- end
- end
-
context 'when shorthand pattern #ISSUE_ID is used' do
it 'links an internal issue if it exists' do
issue = create(:issue, project: project)
diff --git a/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb b/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb
index 4af782c7d73..b102de24041 100644
--- a/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Banzai::Pipeline::WikiPipeline do
let_it_be(:namespace) { create(:namespace, name: "wiki_link_ns") }
let_it_be(:project) { create(:project, :public, name: "wiki_link_project", namespace: namespace) }
- let_it_be(:wiki) { ProjectWiki.new(project, double(:user)) }
+ let_it_be(:wiki) { ProjectWiki.new(project, nil) }
let_it_be(:page) { build(:wiki_page, wiki: wiki, title: 'nested/twice/start-page') }
describe 'TableOfContents' do
diff --git a/spec/lib/banzai/reference_parser/base_parser_spec.rb b/spec/lib/banzai/reference_parser/base_parser_spec.rb
index 0eea51262ba..5ab76b2c68b 100644
--- a/spec/lib/banzai/reference_parser/base_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/base_parser_spec.rb
@@ -8,13 +8,14 @@ RSpec.describe Banzai::ReferenceParser::BaseParser do
let(:user) { create(:user) }
let(:project) { create(:project, :public) }
let(:context) { Banzai::RenderContext.new(project, user) }
-
- subject do
- klass = Class.new(described_class) do
+ let(:parser_class) do
+ Class.new(described_class) do
self.reference_type = :foo
end
+ end
- klass.new(context)
+ subject do
+ parser_class.new(context)
end
describe '.reference_type=' do
@@ -43,12 +44,20 @@ RSpec.describe Banzai::ReferenceParser::BaseParser do
let(:link) { empty_html_link }
context 'when the link has a data-project attribute' do
- it 'checks if user can read the resource' do
+ before do
link['data-project'] = project.id.to_s
+ end
- expect(subject).to receive(:can_read_reference?).with(user, project, link)
+ it 'includes the link if can_read_reference? returns true' do
+ expect(subject).to receive(:can_read_reference?).with(user, project, link).and_return(true)
- subject.nodes_visible_to_user(user, [link])
+ expect(subject.nodes_visible_to_user(user, [link])).to contain_exactly(link)
+ end
+
+ it 'excludes the link if can_read_reference? returns false' do
+ expect(subject).to receive(:can_read_reference?).with(user, project, link).and_return(false)
+
+ expect(subject.nodes_visible_to_user(user, [link])).to be_empty
end
end
@@ -178,58 +187,56 @@ RSpec.describe Banzai::ReferenceParser::BaseParser do
it 'gathers the references for every node matching the reference type' do
dummy = Class.new(described_class) do
self.reference_type = :test
- end
-
- instance = dummy.new(Banzai::RenderContext.new(project, user))
- document = Nokogiri::HTML.fragment('<a class="gfm"></a><a class="gfm" data-reference-type="test"></a>')
- expect(instance).to receive(:gather_references)
- .with([document.children[1]])
- .and_return([user])
+ def gather_references(nodes)
+ nodes
+ end
+ end
- expect(instance.process([document])).to eq([user])
+ instance = dummy.new(context)
+ document_a = Nokogiri::HTML.fragment(<<-FRAG)
+ <a class="gfm">one</a>
+ <a class="gfm" data-reference-type="test">two</a>
+ <a class="gfm" data-reference-type="other">three</a>
+ FRAG
+ document_b = Nokogiri::HTML.fragment(<<-FRAG)
+ <a class="gfm" data-reference-type="test">four</a>
+ FRAG
+ document_c = Nokogiri::HTML.fragment('')
+
+ expect(instance.process([document_a, document_b, document_c]))
+ .to contain_exactly(document_a.css('a')[1], document_b.css('a')[0])
end
end
describe '#gather_references' do
- let(:link) { double(:link) }
+ let(:nodes) { (1..10).map { |n| double(:link, id: n) } }
- it 'does not process links a user can not reference' do
- expect(subject).to receive(:nodes_user_can_reference)
- .with(user, [link])
- .and_return([])
+ let(:parser_class) do
+ Class.new(described_class) do
+ def nodes_user_can_reference(_user, nodes)
+ nodes.select { |n| n.id.even? }
+ end
- expect(subject).to receive(:referenced_by).with([])
+ def nodes_visible_to_user(_user, nodes)
+ nodes.select { |n| n.id > 5 }
+ end
- subject.gather_references([link])
+ def referenced_by(nodes)
+ nodes.map(&:id)
+ end
+ end
end
- it 'does not process links a user can not see' do
- expect(subject).to receive(:nodes_user_can_reference)
- .with(user, [link])
- .and_return([link])
-
- expect(subject).to receive(:nodes_visible_to_user)
- .with(user, [link])
- .and_return([])
-
- expect(subject).to receive(:referenced_by).with([])
-
- subject.gather_references([link])
+ it 'returns referenceable and visible objects, alongside nodes that are referenceable but not visible' do
+ expect(subject.gather_references(nodes)).to match(
+ visible: contain_exactly(6, 8, 10),
+ not_visible: match_array(nodes.select { |n| n.id.even? && n.id <= 5 })
+ )
end
- it 'returns the references if a user can reference and see a link' do
- expect(subject).to receive(:nodes_user_can_reference)
- .with(user, [link])
- .and_return([link])
-
- expect(subject).to receive(:nodes_visible_to_user)
- .with(user, [link])
- .and_return([link])
-
- expect(subject).to receive(:referenced_by).with([link])
-
- subject.gather_references([link])
+ it 'is always empty if the input is empty' do
+ expect(subject.gather_references([])) .to match(visible: be_empty, not_visible: be_empty)
end
end
diff --git a/spec/lib/banzai/reference_parser/snippet_parser_spec.rb b/spec/lib/banzai/reference_parser/snippet_parser_spec.rb
index cdc660b4f4a..3459784708f 100644
--- a/spec/lib/banzai/reference_parser/snippet_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/snippet_parser_spec.rb
@@ -33,6 +33,17 @@ RSpec.describe Banzai::ReferenceParser::SnippetParser do
project.project_feature.update_attribute(:snippets_access_level, ProjectFeature::ENABLED)
end
+ it 'avoids N+1 cached queries', :use_sql_query_cache do
+ # Run this once to establish a baseline
+ visible_references(:public)
+
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
+ subject.nodes_visible_to_user(user, [link])
+ end
+
+ expect { subject.nodes_visible_to_user(user, Array.new(10, link)) }.not_to exceed_all_query_limit(control_count.count)
+ end
+
it 'creates a reference for guest for a public snippet' do
expect(visible_references(:public)).to eq([link])
end
diff --git a/spec/lib/container_registry/client_spec.rb b/spec/lib/container_registry/client_spec.rb
index de92ca5eeec..aa947329c33 100644
--- a/spec/lib/container_registry/client_spec.rb
+++ b/spec/lib/container_registry/client_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe ContainerRegistry::Client do
'User-Agent' => "GitLab/#{Gitlab::VERSION}"
}
end
+
let(:headers_with_accept_types) do
{
'Accept' => 'application/vnd.docker.distribution.manifest.v2+json, application/vnd.oci.image.manifest.v1+json',
diff --git a/spec/lib/gitlab/alert_management/alert_params_spec.rb b/spec/lib/gitlab/alert_management/alert_params_spec.rb
index 393838ab042..1fe27365c83 100644
--- a/spec/lib/gitlab/alert_management/alert_params_spec.rb
+++ b/spec/lib/gitlab/alert_management/alert_params_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe Gitlab::AlertManagement::AlertParams do
'some' => { 'extra' => { 'payload' => 'here' } }
}
end
+
let(:payload) { default_payload }
subject { described_class.from_generic_alert(project: project, payload: payload) }
@@ -75,6 +76,7 @@ RSpec.describe Gitlab::AlertManagement::AlertParams do
'fingerprint' => 'b6ac4d42057c43c1'
}
end
+
let(:parsed_alert) { Gitlab::Alerting::Alert.new(project: project, payload: payload) }
subject { described_class.from_prometheus_alert(project: project, parsed_alert: parsed_alert) }
diff --git a/spec/lib/gitlab/alert_management/alert_status_counts_spec.rb b/spec/lib/gitlab/alert_management/alert_status_counts_spec.rb
index 4e471a8eac0..a2b8f0aa8d4 100644
--- a/spec/lib/gitlab/alert_management/alert_status_counts_spec.rb
+++ b/spec/lib/gitlab/alert_management/alert_status_counts_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe Gitlab::AlertManagement::AlertStatusCounts do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
- let_it_be(:alert_1) { create(:alert_management_alert, :resolved, project: project) }
- let_it_be(:alert_2) { create(:alert_management_alert, :ignored, project: project) }
- let_it_be(:alert_3) { create(:alert_management_alert) }
+ let_it_be(:alert_resolved) { create(:alert_management_alert, :resolved, project: project) }
+ let_it_be(:alert_ignored) { create(:alert_management_alert, :ignored, project: project) }
+ let_it_be(:alert_triggered) { create(:alert_management_alert) }
let(:params) { {} }
describe '#execute' do
@@ -52,7 +52,7 @@ RSpec.describe Gitlab::AlertManagement::AlertStatusCounts do
end
context 'when search param is included' do
- let(:params) { { search: alert_1.title } }
+ let(:params) { { search: alert_resolved.title } }
it 'returns the correct countss' do
expect(counts.open).to eq(0)
diff --git a/spec/lib/gitlab/alerting/alert_spec.rb b/spec/lib/gitlab/alerting/alert_spec.rb
index 9663e6af0d2..b53b71e3f3e 100644
--- a/spec/lib/gitlab/alerting/alert_spec.rb
+++ b/spec/lib/gitlab/alerting/alert_spec.rb
@@ -191,7 +191,7 @@ RSpec.describe Gitlab::Alerting::Alert do
end
context 'with payload' do
- let(:time) { Time.now.change(usec: 0) }
+ let(:time) { Time.current.change(usec: 0) }
before do
payload['startsAt'] = time.rfc3339
@@ -274,7 +274,7 @@ RSpec.describe Gitlab::Alerting::Alert do
before do
payload.update(
'annotations' => { 'title' => 'some title' },
- 'startsAt' => Time.now.rfc3339
+ 'startsAt' => Time.current.rfc3339
)
end
diff --git a/spec/lib/gitlab/analytics/unique_visits_spec.rb b/spec/lib/gitlab/analytics/unique_visits_spec.rb
index ff3623a3a71..1432c9ac58f 100644
--- a/spec/lib/gitlab/analytics/unique_visits_spec.rb
+++ b/spec/lib/gitlab/analytics/unique_visits_spec.rb
@@ -7,8 +7,11 @@ RSpec.describe Gitlab::Analytics::UniqueVisits, :clean_gitlab_redis_shared_state
let(:target1_id) { 'g_analytics_contribution' }
let(:target2_id) { 'g_analytics_insights' }
let(:target3_id) { 'g_analytics_issues' }
+ let(:target4_id) { 'g_compliance_dashboard' }
+ let(:target5_id) { 'i_compliance_credential_inventory' }
let(:visitor1_id) { 'dfb9d2d2-f56c-4c77-8aeb-6cddc4a1f857' }
let(:visitor2_id) { '1dd9afb2-a3ee-4de1-8ae3-a405579c8584' }
+ let(:visitor3_id) { '34rfjuuy-ce56-sa35-ds34-dfer567dfrf2' }
around do |example|
# We need to freeze to a reference time
@@ -29,24 +32,40 @@ RSpec.describe Gitlab::Analytics::UniqueVisits, :clean_gitlab_redis_shared_state
unique_visits.track_visit(visitor1_id, target2_id, 8.days.ago)
unique_visits.track_visit(visitor1_id, target2_id, 15.days.ago)
- expect(unique_visits.weekly_unique_visits_for_target(target1_id)).to eq(2)
- expect(unique_visits.weekly_unique_visits_for_target(target2_id)).to eq(1)
+ unique_visits.track_visit(visitor3_id, target4_id, 7.days.ago)
- expect(unique_visits.weekly_unique_visits_for_target(target2_id, week_of: 15.days.ago)).to eq(1)
+ unique_visits.track_visit(visitor3_id, target5_id, 15.days.ago)
+ unique_visits.track_visit(visitor2_id, target5_id, 15.days.ago)
- expect(unique_visits.weekly_unique_visits_for_target(target3_id)).to eq(0)
+ expect(unique_visits.unique_visits_for(targets: target1_id)).to eq(2)
+ expect(unique_visits.unique_visits_for(targets: target2_id)).to eq(1)
+ expect(unique_visits.unique_visits_for(targets: target4_id)).to eq(1)
- expect(unique_visits.weekly_unique_visits_for_any_target).to eq(2)
- expect(unique_visits.weekly_unique_visits_for_any_target(week_of: 15.days.ago)).to eq(1)
- expect(unique_visits.weekly_unique_visits_for_any_target(week_of: 30.days.ago)).to eq(0)
+ expect(unique_visits.unique_visits_for(targets: target2_id, start_date: 15.days.ago)).to eq(1)
+
+ expect(unique_visits.unique_visits_for(targets: target3_id)).to eq(0)
+
+ expect(unique_visits.unique_visits_for(targets: target5_id, start_date: 15.days.ago)).to eq(2)
+
+ expect(unique_visits.unique_visits_for(targets: :analytics)).to eq(2)
+ expect(unique_visits.unique_visits_for(targets: :analytics, start_date: 15.days.ago)).to eq(1)
+ expect(unique_visits.unique_visits_for(targets: :analytics, start_date: 30.days.ago)).to eq(0)
+
+ expect(unique_visits.unique_visits_for(targets: :analytics, start_date: 4.weeks.ago, end_date: Date.current)).to eq(2)
+
+ expect(unique_visits.unique_visits_for(targets: :compliance)).to eq(1)
+ expect(unique_visits.unique_visits_for(targets: :compliance, start_date: 15.days.ago)).to eq(2)
+ expect(unique_visits.unique_visits_for(targets: :compliance, start_date: 30.days.ago)).to eq(0)
+
+ expect(unique_visits.unique_visits_for(targets: :compliance, start_date: 4.weeks.ago, end_date: Date.current)).to eq(2)
end
- it 'sets the keys in Redis to expire automatically after 28 days' do
+ it 'sets the keys in Redis to expire automatically after 12 weeks' do
unique_visits.track_visit(visitor1_id, target1_id)
Gitlab::Redis::SharedState.with do |redis|
- redis.scan_each(match: "#{target1_id}-*").each do |key|
- expect(redis.ttl(key)).to be_within(5.seconds).of(28.days)
+ redis.scan_each(match: "{#{target1_id}}-*").each do |key|
+ expect(redis.ttl(key)).to be_within(5.seconds).of(12.weeks)
end
end
end
@@ -56,7 +75,7 @@ RSpec.describe Gitlab::Analytics::UniqueVisits, :clean_gitlab_redis_shared_state
expect do
unique_visits.track_visit(visitor1_id, invalid_target_id)
- end.to raise_error("Invalid target id #{invalid_target_id}")
+ end.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
end
end
end
diff --git a/spec/lib/gitlab/app_logger_spec.rb b/spec/lib/gitlab/app_logger_spec.rb
index 166b1fda268..23bac444dbe 100644
--- a/spec/lib/gitlab/app_logger_spec.rb
+++ b/spec/lib/gitlab/app_logger_spec.rb
@@ -19,4 +19,12 @@ RSpec.describe Gitlab::AppLogger do
subject.info('Hello World!')
end
+
+ it 'logs info to only the AppJsonLogger when unstructured logs are disabled' do
+ stub_env('UNSTRUCTURED_RAILS_LOG', 'false')
+ expect_any_instance_of(Gitlab::AppTextLogger).not_to receive(:info).and_call_original
+ expect_any_instance_of(Gitlab::AppJsonLogger).to receive(:info).and_call_original
+
+ subject.info('Hello World!')
+ end
end
diff --git a/spec/lib/gitlab/application_rate_limiter_spec.rb b/spec/lib/gitlab/application_rate_limiter_spec.rb
index 14a7e25a2e8..2525b1ce41e 100644
--- a/spec/lib/gitlab/application_rate_limiter_spec.rb
+++ b/spec/lib/gitlab/application_rate_limiter_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe Gitlab::ApplicationRateLimiter, :clean_gitlab_redis_cache do
}
}
end
+
let(:key) { rate_limits.keys[0] }
subject { described_class }
diff --git a/spec/lib/gitlab/asciidoc/include_processor_spec.rb b/spec/lib/gitlab/asciidoc/include_processor_spec.rb
index 067dcefb525..5c225575965 100644
--- a/spec/lib/gitlab/asciidoc/include_processor_spec.rb
+++ b/spec/lib/gitlab/asciidoc/include_processor_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe Gitlab::Asciidoc::IncludeProcessor do
ref: ref
}
end
+
let(:ref) { project.repository.root_ref }
let(:max_includes) { 10 }
diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb
index 40a4ab3e173..1b669e691e7 100644
--- a/spec/lib/gitlab/asciidoc_spec.rb
+++ b/spec/lib/gitlab/asciidoc_spec.rb
@@ -418,6 +418,50 @@ module Gitlab
expect(output).to include("a href=\"README.adoc\"")
end
end
+
+ context 'with mermaid diagrams' do
+ it 'adds class js-render-mermaid to the output' do
+ input = <<~MD
+ [mermaid]
+ ....
+ graph LR
+ A[Square Rect] -- Link text --> B((Circle))
+ A --> C(Round Rect)
+ B --> D{Rhombus}
+ C --> D
+ ....
+ MD
+
+ output = <<~HTML
+ <pre data-mermaid-style="display" class="js-render-mermaid">graph LR
+ A[Square Rect] -- Link text --&gt; B((Circle))
+ A --&gt; C(Round Rect)
+ B --&gt; D{Rhombus}
+ C --&gt; D</pre>
+ HTML
+
+ expect(render(input, context)).to include(output.strip)
+ end
+
+ it 'applies subs in diagram block' do
+ input = <<~MD
+ :class-name: AveryLongClass
+
+ [mermaid,subs=+attributes]
+ ....
+ classDiagram
+ Class01 <|-- {class-name} : Cool
+ ....
+ MD
+
+ output = <<~HTML
+ <pre data-mermaid-style="display" class="js-render-mermaid">classDiagram
+ Class01 &lt;|-- AveryLongClass : Cool</pre>
+ HTML
+
+ expect(render(input, context)).to include(output.strip)
+ end
+ end
end
context 'with project' do
@@ -429,6 +473,7 @@ module Gitlab
requested_path: requested_path
}
end
+
let(:commit) { project.commit(ref) }
let(:project) { create(:project, :repository) }
let(:ref) { 'asciidoc' }
diff --git a/spec/lib/gitlab/audit/null_author_spec.rb b/spec/lib/gitlab/audit/null_author_spec.rb
new file mode 100644
index 00000000000..eb80e5faa89
--- /dev/null
+++ b/spec/lib/gitlab/audit/null_author_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Audit::NullAuthor do
+ subject { described_class }
+
+ describe '.for' do
+ it 'returns an DeletedAuthor' do
+ expect(subject.for(666, 'Old Hat')).to be_a(Gitlab::Audit::DeletedAuthor)
+ end
+
+ it 'returns an UnauthenticatedAuthor when id equals -1', :aggregate_failures do
+ expect(subject.for(-1, 'Frank')).to be_a(Gitlab::Audit::UnauthenticatedAuthor)
+ expect(subject.for(-1, 'Frank')).to have_attributes(id: -1, name: 'Frank')
+ end
+ end
+
+ describe '#current_sign_in_ip' do
+ it { expect(subject.new(id: 888, name: 'Guest').current_sign_in_ip).to be_nil }
+ end
+end
diff --git a/spec/lib/gitlab/audit/unauthenticated_author_spec.rb b/spec/lib/gitlab/audit/unauthenticated_author_spec.rb
new file mode 100644
index 00000000000..4e5c477fc2a
--- /dev/null
+++ b/spec/lib/gitlab/audit/unauthenticated_author_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Audit::UnauthenticatedAuthor do
+ describe '#initialize' do
+ it 'sets correct attributes' do
+ expect(described_class.new(name: 'Peppa Pig'))
+ .to have_attributes(id: -1, name: 'Peppa Pig')
+ end
+
+ it 'sets default name when it is not provided' do
+ expect(described_class.new)
+ .to have_attributes(id: -1, name: 'An unauthenticated user')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb
index d0f5d0a9b35..a73ac0b34af 100644
--- a/spec/lib/gitlab/auth/auth_finders_spec.rb
+++ b/spec/lib/gitlab/auth/auth_finders_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
'rack.input' => ''
}
end
+
let(:request) { ActionDispatch::Request.new(env) }
def set_param(key, value)
@@ -554,7 +555,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
end
context 'with CI username' do
- let(:username) { ::Ci::Build::CI_REGISTRY_USER }
+ let(:username) { ::Gitlab::Auth::CI_JOB_USER }
let(:user) { create(:user) }
let(:build) { create(:ci_build, user: user) }
@@ -727,7 +728,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
context 'when the job token is provided via basic auth' do
let(:route_authentication_setting) { { job_token_allowed: :basic_auth } }
- let(:username) { Ci::Build::CI_REGISTRY_USER }
+ let(:username) { ::Gitlab::Auth::CI_JOB_USER }
let(:token) { job.token }
before do
@@ -744,6 +745,56 @@ RSpec.describe Gitlab::Auth::AuthFinders do
end
end
+ describe '#cluster_agent_token_from_authorization_token' do
+ let_it_be(:agent_token) { create(:cluster_agent_token) }
+
+ context 'when route_setting is empty' do
+ it 'returns nil' do
+ expect(cluster_agent_token_from_authorization_token).to be_nil
+ end
+ end
+
+ context 'when route_setting allows cluster agent token' do
+ let(:route_authentication_setting) { { cluster_agent_token_allowed: true } }
+
+ context 'Authorization header is empty' do
+ it 'returns nil' do
+ expect(cluster_agent_token_from_authorization_token).to be_nil
+ end
+ end
+
+ context 'Authorization header is incorrect' do
+ before do
+ request.headers['Authorization'] = 'Bearer ABCD'
+ end
+
+ it 'returns nil' do
+ expect(cluster_agent_token_from_authorization_token).to be_nil
+ end
+ end
+
+ context 'Authorization header is malformed' do
+ before do
+ request.headers['Authorization'] = 'Bearer'
+ end
+
+ it 'returns nil' do
+ expect(cluster_agent_token_from_authorization_token).to be_nil
+ end
+ end
+
+ context 'Authorization header matches agent token' do
+ before do
+ request.headers['Authorization'] = "Bearer #{agent_token.token}"
+ end
+
+ it 'returns the agent token' do
+ expect(cluster_agent_token_from_authorization_token).to eq(agent_token)
+ end
+ end
+ end
+ end
+
describe '#find_runner_from_token' do
let(:runner) { create(:ci_runner) }
diff --git a/spec/lib/gitlab/auth/ldap/user_spec.rb b/spec/lib/gitlab/auth/ldap/user_spec.rb
index 7ca2878e583..ccaed94b5c8 100644
--- a/spec/lib/gitlab/auth/ldap/user_spec.rb
+++ b/spec/lib/gitlab/auth/ldap/user_spec.rb
@@ -14,9 +14,11 @@ RSpec.describe Gitlab::Auth::Ldap::User do
nickname: 'john'
}
end
+
let(:auth_hash) do
OmniAuth::AuthHash.new(uid: 'uid=John Smith,ou=People,dc=example,dc=com', provider: 'ldapmain', info: info)
end
+
let(:ldap_user_upper_case) { described_class.new(auth_hash_upper_case) }
let(:info_upper_case) do
{
@@ -25,6 +27,7 @@ RSpec.describe Gitlab::Auth::Ldap::User do
nickname: 'john'
}
end
+
let(:auth_hash_upper_case) do
OmniAuth::AuthHash.new(uid: 'uid=John Smith,ou=People,dc=example,dc=com', provider: 'ldapmain', info: info_upper_case)
end
diff --git a/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb b/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb
index 7a60acca95b..67ffdee0c4a 100644
--- a/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Gitlab::Auth::OAuth::AuthHash do
let(:uid_raw) do
+"CN=Onur K\xC3\xBC\xC3\xA7\xC3\xBCk,OU=Test,DC=example,DC=net"
end
+
let(:email_raw) { +"onur.k\xC3\xBC\xC3\xA7\xC3\xBCk_ABC-123@example.net" }
let(:nickname_raw) { +"ok\xC3\xBC\xC3\xA7\xC3\xBCk" }
let(:first_name_raw) { +'Onur' }
diff --git a/spec/lib/gitlab/auth/o_auth/user_spec.rb b/spec/lib/gitlab/auth/o_auth/user_spec.rb
index ad04fddc675..12e774ec1f8 100644
--- a/spec/lib/gitlab/auth/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/user_spec.rb
@@ -22,6 +22,7 @@ RSpec.describe Gitlab::Auth::OAuth::User do
}
}
end
+
let(:ldap_user) { Gitlab::Auth::Ldap::Person.new(Net::LDAP::Entry.new, 'ldapmain') }
describe '#persisted?' do
@@ -193,6 +194,43 @@ RSpec.describe Gitlab::Auth::OAuth::User do
end
end
+ context "with auto_link_user disabled (default)" do
+ before do
+ stub_omniauth_config(auto_link_user: false)
+ end
+
+ include_examples "to verify compliance with allow_single_sign_on"
+ end
+
+ context "with auto_link_user enabled" do
+ before do
+ stub_omniauth_config(auto_link_user: true)
+ end
+
+ context "and a current GitLab user with a matching email" do
+ let!(:existing_user) { create(:user, email: 'john@mail.com', username: 'john') }
+
+ it "adds the OmniAuth identity to the GitLab user account" do
+ oauth_user.save
+
+ expect(gl_user).to be_valid
+ expect(gl_user.username).to eql 'john'
+ expect(gl_user.email).to eql 'john@mail.com'
+ expect(gl_user.identities.length).to be 1
+ identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } }
+ expect(identities_as_hash).to match_array(
+ [
+ { provider: 'twitter', extern_uid: uid }
+ ]
+ )
+ end
+ end
+
+ context "and no current GitLab user with a matching email" do
+ include_examples "to verify compliance with allow_single_sign_on"
+ end
+ end
+
context "with auto_link_ldap_user disabled (default)" do
before do
stub_omniauth_config(auto_link_ldap_user: false)
@@ -229,39 +267,56 @@ RSpec.describe Gitlab::Auth::OAuth::User do
end
context "and no account for the LDAP user" do
- before do
- allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_uid).and_return(ldap_user)
+ context 'when the LDAP user is found by UID' do
+ before do
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_uid).and_return(ldap_user)
- oauth_user.save
- end
+ oauth_user.save
+ end
- it "creates a user with dual LDAP and omniauth identities" do
- expect(gl_user).to be_valid
- expect(gl_user.username).to eql uid
- expect(gl_user.name).to eql 'John Doe'
- expect(gl_user.email).to eql 'johndoe@example.com'
- expect(gl_user.identities.length).to be 2
- identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } }
- expect(identities_as_hash).to match_array(
- [
- { provider: 'ldapmain', extern_uid: dn },
- { provider: 'twitter', extern_uid: uid }
- ]
- )
- end
+ it "creates a user with dual LDAP and omniauth identities" do
+ expect(gl_user).to be_valid
+ expect(gl_user.username).to eql uid
+ expect(gl_user.name).to eql 'John Doe'
+ expect(gl_user.email).to eql 'johndoe@example.com'
+ expect(gl_user.identities.length).to be 2
+ identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } }
+ expect(identities_as_hash).to match_array(
+ [
+ { provider: 'ldapmain', extern_uid: dn },
+ { provider: 'twitter', extern_uid: uid }
+ ]
+ )
+ end
- it "has name and email set as synced" do
- expect(gl_user.user_synced_attributes_metadata.name_synced).to be_truthy
- expect(gl_user.user_synced_attributes_metadata.email_synced).to be_truthy
- end
+ it "has name and email set as synced" do
+ expect(gl_user.user_synced_attributes_metadata.name_synced).to be_truthy
+ expect(gl_user.user_synced_attributes_metadata.email_synced).to be_truthy
+ end
- it "has name and email set as read-only" do
- expect(gl_user.read_only_attribute?(:name)).to be_truthy
- expect(gl_user.read_only_attribute?(:email)).to be_truthy
+ it "has name and email set as read-only" do
+ expect(gl_user.read_only_attribute?(:name)).to be_truthy
+ expect(gl_user.read_only_attribute?(:email)).to be_truthy
+ end
+
+ it "has synced attributes provider set to ldapmain" do
+ expect(gl_user.user_synced_attributes_metadata.provider).to eql 'ldapmain'
+ end
end
- it "has synced attributes provider set to ldapmain" do
- expect(gl_user.user_synced_attributes_metadata.provider).to eql 'ldapmain'
+ context 'when the LDAP user is found by email address' do
+ before do
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_uid).and_return(nil)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_email).with(uid, any_args).and_return(nil)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_email).with(info_hash[:email], any_args).and_return(ldap_user)
+
+ oauth_user.save
+ end
+
+ it 'creates the LDAP identity' do
+ identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } }
+ expect(identities_as_hash).to include({ provider: 'ldapmain', extern_uid: dn })
+ end
end
end
@@ -363,6 +418,90 @@ RSpec.describe Gitlab::Auth::OAuth::User do
end
end
end
+
+ context "with both auto_link_user and auto_link_ldap_user enabled" do
+ before do
+ stub_omniauth_config(auto_link_user: true, auto_link_ldap_user: true)
+ end
+
+ context "and at least one LDAP provider is defined" do
+ before do
+ stub_ldap_config(providers: %w(ldapmain))
+ end
+
+ context "and a corresponding LDAP person" do
+ before do
+ allow(ldap_user).to receive_messages(
+ uid: uid,
+ username: uid,
+ name: 'John Doe',
+ email: ['john@mail.com'],
+ dn: dn
+ )
+ end
+
+ context "and no account for the LDAP user" do
+ before do
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_uid).and_return(ldap_user)
+
+ oauth_user.save
+ end
+
+ it "creates a user with dual LDAP and omniauth identities" do
+ expect(gl_user).to be_valid
+ expect(gl_user.username).to eql uid
+ expect(gl_user.name).to eql 'John Doe'
+ expect(gl_user.email).to eql 'john@mail.com'
+ expect(gl_user.identities.length).to be 2
+ identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } }
+ expect(identities_as_hash).to match_array(
+ [
+ { provider: 'ldapmain', extern_uid: dn },
+ { provider: 'twitter', extern_uid: uid }
+ ]
+ )
+ end
+
+ it "has name and email set as synced" do
+ expect(gl_user.user_synced_attributes_metadata.name_synced).to be_truthy
+ expect(gl_user.user_synced_attributes_metadata.email_synced).to be_truthy
+ end
+
+ it "has name and email set as read-only" do
+ expect(gl_user.read_only_attribute?(:name)).to be_truthy
+ expect(gl_user.read_only_attribute?(:email)).to be_truthy
+ end
+
+ it "has synced attributes provider set to ldapmain" do
+ expect(gl_user.user_synced_attributes_metadata.provider).to eql 'ldapmain'
+ end
+ end
+
+ context "and LDAP user has an account already" do
+ let!(:existing_user) { create(:omniauth_user, name: 'John Doe', email: 'john@mail.com', extern_uid: dn, provider: 'ldapmain', username: 'john') }
+
+ it "adds the omniauth identity to the LDAP account" do
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_uid).and_return(ldap_user)
+
+ oauth_user.save
+
+ expect(gl_user).to be_valid
+ expect(gl_user.username).to eql 'john'
+ expect(gl_user.name).to eql 'John Doe'
+ expect(gl_user.email).to eql 'john@mail.com'
+ expect(gl_user.identities.length).to be 2
+ identities_as_hash = gl_user.identities.map { |id| { provider: id.provider, extern_uid: id.extern_uid } }
+ expect(identities_as_hash).to match_array(
+ [
+ { provider: 'ldapmain', extern_uid: dn },
+ { provider: 'twitter', extern_uid: uid }
+ ]
+ )
+ end
+ end
+ end
+ end
+ end
end
describe 'blocking' do
@@ -790,7 +929,7 @@ RSpec.describe Gitlab::Auth::OAuth::User do
end
end
- describe '.find_by_uid_and_provider' do
+ describe '._uid_and_provider' do
let!(:existing_user) { create(:omniauth_user, extern_uid: 'my-uid', provider: 'my-provider') }
it 'normalizes extern_uid' do
diff --git a/spec/lib/gitlab/auth/request_authenticator_spec.rb b/spec/lib/gitlab/auth/request_authenticator_spec.rb
index 32d64519e2c..ef83321cc0e 100644
--- a/spec/lib/gitlab/auth/request_authenticator_spec.rb
+++ b/spec/lib/gitlab/auth/request_authenticator_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Gitlab::Auth::RequestAuthenticator do
'REQUEST_METHOD' => 'GET'
}
end
+
let(:request) { ActionDispatch::Request.new(env) }
subject { described_class.new(request) }
diff --git a/spec/lib/gitlab/auth/saml/user_spec.rb b/spec/lib/gitlab/auth/saml/user_spec.rb
index 7f8346f0486..fd48492f18d 100644
--- a/spec/lib/gitlab/auth/saml/user_spec.rb
+++ b/spec/lib/gitlab/auth/saml/user_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe Gitlab::Auth::Saml::User do
email: 'john@mail.com'
}
end
+
let(:ldap_user) { Gitlab::Auth::Ldap::Person.new(Net::LDAP::Entry.new, 'ldapmain') }
describe '#save' do
@@ -194,6 +195,7 @@ RSpec.describe Gitlab::Auth::Saml::User do
}
}
end
+
let(:auth_hash) { OmniAuth::AuthHash.new(auth_hash_base_attributes) }
let(:uid_types) { %w(uid dn email) }
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index b62f9b55b64..dcaaa8d4188 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -149,7 +149,9 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end
context 'build token' do
- subject { gl_auth.find_for_git_client('gitlab-ci-token', build.token, project: project, ip: 'ip') }
+ subject { gl_auth.find_for_git_client(username, build.token, project: project, ip: 'ip') }
+
+ let(:username) { 'gitlab-ci-token' }
context 'for running build' do
let!(:build) { create(:ci_build, :running) }
@@ -170,6 +172,14 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
expect(subject).to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
end
+
+ context 'username is not gitlab-ci-token' do
+ let(:username) { 'another_username' }
+
+ it 'fails to authenticate' do
+ expect(subject).to eq(Gitlab::Auth::Result.new(nil, nil, nil, nil))
+ end
+ end
end
(Ci::HasStatus::AVAILABLE_STATUSES - ['running']).each do |build_status|
@@ -628,6 +638,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
password: password,
password_confirmation: password)
end
+
let(:username) { 'John' } # username isn't lowercase, test this
let(:password) { 'my-secret' }
diff --git a/spec/lib/gitlab/background_migration/archive_legacy_traces_spec.rb b/spec/lib/gitlab/background_migration/archive_legacy_traces_spec.rb
deleted file mode 100644
index 7991ad69007..00000000000
--- a/spec/lib/gitlab/background_migration/archive_legacy_traces_spec.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::ArchiveLegacyTraces do
- include TraceHelpers
-
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:builds) { table(:ci_builds) }
- let(:job_artifacts) { table(:ci_job_artifacts) }
-
- before do
- namespaces.create!(id: 123, name: 'gitlab1', path: 'gitlab1')
- projects.create!(id: 123, name: 'gitlab1', path: 'gitlab1', namespace_id: 123)
- @build = builds.create!(id: 1, project_id: 123, status: 'success', type: 'Ci::Build')
- end
-
- context 'when trace file exsits at the right place' do
- before do
- create_legacy_trace(@build, 'trace in file')
- end
-
- it 'correctly archive legacy traces' do
- expect(job_artifacts.count).to eq(0)
- expect(File.exist?(legacy_trace_path(@build))).to be_truthy
-
- described_class.new.perform(1, 1)
-
- expect(job_artifacts.count).to eq(1)
- expect(File.exist?(legacy_trace_path(@build))).to be_falsy
- expect(File.read(archived_trace_path(job_artifacts.first))).to eq('trace in file')
- end
- end
-
- context 'when trace file does not exsits at the right place' do
- it 'does not raise errors nor create job artifact' do
- expect { described_class.new.perform(1, 1) }.not_to raise_error
-
- expect(job_artifacts.count).to eq(0)
- end
- end
-
- context 'when trace data exsits in database' do
- before do
- create_legacy_trace_in_db(@build, 'trace in db')
- end
-
- it 'correctly archive legacy traces' do
- expect(job_artifacts.count).to eq(0)
- expect(@build.read_attribute(:trace)).not_to be_empty
-
- described_class.new.perform(1, 1)
-
- @build.reload
- expect(job_artifacts.count).to eq(1)
- expect(@build.read_attribute(:trace)).to be_nil
- expect(File.read(archived_trace_path(job_artifacts.first))).to eq('trace in db')
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_hashed_project_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_hashed_project_repositories_spec.rb
deleted file mode 100644
index 79b344ea6fa..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_hashed_project_repositories_spec.rb
+++ /dev/null
@@ -1,7 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillHashedProjectRepositories do
- it_behaves_like 'backfill migration for project repositories', :hashed
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_project_fullpath_in_repo_config_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_fullpath_in_repo_config_spec.rb
deleted file mode 100644
index 1b2e1ed0c1a..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_project_fullpath_in_repo_config_spec.rb
+++ /dev/null
@@ -1,88 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillProjectFullpathInRepoConfig do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:group) { namespaces.create!(name: 'foo', path: 'foo') }
- let(:subgroup) { namespaces.create!(name: 'bar', path: 'bar', parent_id: group.id) }
-
- describe described_class::Storage::Hashed do
- let(:project) { double(id: 555) }
-
- subject(:project_storage) { described_class.new(project) }
-
- it 'has the correct disk_path' do
- expect(project_storage.disk_path).to eq('@hashed/91/a7/91a73fd806ab2c005c13b4dc19130a884e909dea3f72d46e30266fe1a1f588d8')
- end
- end
-
- describe described_class::Storage::LegacyProject do
- let(:project) { double(full_path: 'this/is/the/full/path') }
-
- subject(:project_storage) { described_class.new(project) }
-
- it 'has the correct disk_path' do
- expect(project_storage.disk_path).to eq('this/is/the/full/path')
- end
- end
-
- describe described_class::Project do
- let(:project_record) { projects.create!(namespace_id: subgroup.id, name: 'baz', path: 'baz') }
-
- subject(:project) { described_class.find(project_record.id) }
-
- describe '#full_path' do
- it 'returns path containing all parent namespaces' do
- expect(project.full_path).to eq('foo/bar/baz')
- end
-
- it 'raises OrphanedNamespaceError when any parent namespace does not exist' do
- subgroup.update_attribute(:parent_id, non_existing_record_id)
-
- expect { project.full_path }.to raise_error(Gitlab::BackgroundMigration::BackfillProjectFullpathInRepoConfig::OrphanedNamespaceError)
- end
- end
- end
-
- describe described_class::Up do
- describe '#perform' do
- subject(:migrate) { described_class.new.perform(projects.minimum(:id), projects.maximum(:id)) }
-
- it 'asks the gitaly client to set config' do
- projects.create!(namespace_id: subgroup.id, name: 'baz', path: 'baz')
- projects.create!(namespace_id: subgroup.id, name: 'buzz', path: 'buzz', storage_version: 1)
-
- expect_next_instance_of(Gitlab::GitalyClient::RepositoryService) do |repository_service|
- allow(repository_service).to receive(:cleanup)
- expect(repository_service).to receive(:set_config).with('gitlab.fullpath' => 'foo/bar/baz')
- end
-
- expect_next_instance_of(Gitlab::GitalyClient::RepositoryService) do |repository_service|
- allow(repository_service).to receive(:cleanup)
- expect(repository_service).to receive(:set_config).with('gitlab.fullpath' => 'foo/bar/buzz')
- end
-
- migrate
- end
- end
- end
-
- describe described_class::Down do
- describe '#perform' do
- subject(:migrate) { described_class.new.perform(projects.minimum(:id), projects.maximum(:id)) }
-
- it 'asks the gitaly client to set config' do
- projects.create!(namespace_id: subgroup.id, name: 'baz', path: 'baz')
-
- expect_next_instance_of(Gitlab::GitalyClient::RepositoryService) do |repository_service|
- allow(repository_service).to receive(:cleanup)
- expect(repository_service).to receive(:delete_config).with(['gitlab.fullpath'])
- end
-
- migrate
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
index ec2fd3cc4e0..fad33265030 100644
--- a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
@@ -264,6 +264,7 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat
user_type: user_type,
confirmed_at: 1.day.ago)
end
+
let!(:invalid_snippet) { snippets.create(id: 4, type: 'PersonalSnippet', author_id: user.id, file_name: '.', content: content) }
let!(:snippet) { snippets.create(id: 5, type: 'PersonalSnippet', author_id: other_user.id, file_name: file_name, content: content) }
let(:ids) { [4, 5] }
diff --git a/spec/lib/gitlab/background_migration/copy_merge_request_target_project_to_merge_request_metrics_spec.rb b/spec/lib/gitlab/background_migration/copy_merge_request_target_project_to_merge_request_metrics_spec.rb
new file mode 100644
index 00000000000..71bb794d539
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/copy_merge_request_target_project_to_merge_request_metrics_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::CopyMergeRequestTargetProjectToMergeRequestMetrics, :migration, schema: 20200723125205 do
+ let(:migration) { described_class.new }
+
+ let_it_be(:namespaces) { table(:namespaces) }
+ let_it_be(:projects) { table(:projects) }
+ let_it_be(:merge_requests) { table(:merge_requests) }
+ let_it_be(:metrics) { table(:merge_request_metrics) }
+
+ let!(:namespace) { namespaces.create!(name: 'namespace', path: 'namespace') }
+ let!(:project_1) { projects.create!(namespace_id: namespace.id) }
+ let!(:project_2) { projects.create!(namespace_id: namespace.id) }
+ let!(:merge_request_to_migrate_1) { merge_requests.create!(source_branch: 'a', target_branch: 'b', target_project_id: project_1.id) }
+ let!(:merge_request_to_migrate_2) { merge_requests.create!(source_branch: 'c', target_branch: 'd', target_project_id: project_2.id) }
+ let!(:merge_request_without_metrics) { merge_requests.create!(source_branch: 'e', target_branch: 'f', target_project_id: project_2.id) }
+
+ let!(:metrics_1) { metrics.create!(merge_request_id: merge_request_to_migrate_1.id) }
+ let!(:metrics_2) { metrics.create!(merge_request_id: merge_request_to_migrate_2.id) }
+
+ let(:merge_request_ids) { [merge_request_to_migrate_1.id, merge_request_to_migrate_2.id, merge_request_without_metrics.id] }
+
+ subject { migration.perform(merge_request_ids.min, merge_request_ids.max) }
+
+ it 'copies `target_project_id` to the associated `merge_request_metrics` record' do
+ subject
+
+ expect(metrics_1.reload.target_project_id).to eq(project_1.id)
+ expect(metrics_2.reload.target_project_id).to eq(project_2.id)
+ end
+
+ it 'does not create metrics record when it is missing' do
+ subject
+
+ expect(metrics.find_by_merge_request_id(merge_request_without_metrics.id)).to be_nil
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/fix_cross_project_label_links_spec.rb b/spec/lib/gitlab/background_migration/fix_cross_project_label_links_spec.rb
deleted file mode 100644
index 8e3ace083fc..00000000000
--- a/spec/lib/gitlab/background_migration/fix_cross_project_label_links_spec.rb
+++ /dev/null
@@ -1,111 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::FixCrossProjectLabelLinks do
- let(:namespaces_table) { table(:namespaces) }
- let(:projects_table) { table(:projects) }
- let(:issues_table) { table(:issues) }
- let(:merge_requests_table) { table(:merge_requests) }
- let(:labels_table) { table(:labels) }
- let(:label_links_table) { table(:label_links) }
-
- let!(:group1) { namespaces_table.create(id: 10, type: 'Group', name: 'group1', path: 'group1') }
- let!(:group2) { namespaces_table.create(id: 20, type: 'Group', name: 'group2', path: 'group2') }
-
- let!(:project1) { projects_table.create(id: 1, name: 'project1', path: 'group1/project1', namespace_id: 10) }
- let!(:project2) { projects_table.create(id: 3, name: 'project2', path: 'group1/project2', namespace_id: 20) }
-
- let!(:label1) { labels_table.create(id: 1, title: 'bug', color: 'red', group_id: 10, type: 'GroupLabel') }
- let!(:label2) { labels_table.create(id: 2, title: 'bug', color: 'red', group_id: 20, type: 'GroupLabel') }
-
- def create_merge_request(id, project_id)
- merge_requests_table.create(id: id,
- target_project_id: project_id,
- target_branch: 'master',
- source_project_id: project_id,
- source_branch: 'mr name',
- title: "mr name#{id}")
- end
-
- def create_issue(id, project_id)
- issues_table.create(id: id, title: "issue#{id}", project_id: project_id)
- end
-
- def create_resource(target_type, id, project_id)
- target_type == 'Issue' ? create_issue(id, project_id) : create_merge_request(id, project_id)
- end
-
- shared_examples_for 'resource with cross-project labels' do
- it 'updates only cross-project label links which exist in the local project or group' do
- create_resource(target_type, 1, 1)
- create_resource(target_type, 2, 3)
- labels_table.create(id: 3, title: 'bug', color: 'red', project_id: 3, type: 'ProjectLabel')
- link = label_links_table.create(label_id: 2, target_type: target_type, target_id: 1)
- link2 = label_links_table.create(label_id: 3, target_type: target_type, target_id: 2)
-
- subject.perform(1, 100)
-
- expect(link.reload.label_id).to eq(1)
- expect(link2.reload.label_id).to eq(3)
- end
-
- it 'ignores cross-project label links if label color is different' do
- labels_table.create(id: 3, title: 'bug', color: 'green', group_id: 20, type: 'GroupLabel')
- create_resource(target_type, 1, 1)
- link = label_links_table.create(label_id: 3, target_type: target_type, target_id: 1)
-
- subject.perform(1, 100)
-
- expect(link.reload.label_id).to eq(3)
- end
-
- it 'ignores cross-project label links if label name is different' do
- labels_table.create(id: 3, title: 'bug1', color: 'red', group_id: 20, type: 'GroupLabel')
- create_resource(target_type, 1, 1)
- link = label_links_table.create(label_id: 3, target_type: target_type, target_id: 1)
-
- subject.perform(1, 100)
-
- expect(link.reload.label_id).to eq(3)
- end
-
- context 'with nested group' do
- before do
- namespaces_table.create(id: 11, type: 'Group', name: 'subgroup1', path: 'group1/subgroup1', parent_id: 10)
- projects_table.create(id: 2, name: 'subproject1', path: 'group1/subgroup1/subproject1', namespace_id: 11)
- create_resource(target_type, 1, 2)
- end
-
- it 'ignores label links referencing ancestor group labels' do
- labels_table.create(id: 4, title: 'bug', color: 'red', project_id: 2, type: 'ProjectLabel')
- label_links_table.create(label_id: 4, target_type: target_type, target_id: 1)
- link = label_links_table.create(label_id: 1, target_type: target_type, target_id: 1)
-
- subject.perform(1, 100)
-
- expect(link.reload.label_id).to eq(1)
- end
-
- it 'checks also issues and MRs in subgroups' do
- link = label_links_table.create(label_id: 2, target_type: target_type, target_id: 1)
-
- subject.perform(1, 100)
-
- expect(link.reload.label_id).to eq(1)
- end
- end
- end
-
- context 'resource is Issue' do
- it_behaves_like 'resource with cross-project labels' do
- let(:target_type) { 'Issue' }
- end
- end
-
- context 'resource is Merge Request' do
- it_behaves_like 'resource with cross-project labels' do
- let(:target_type) { 'MergeRequest' }
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb b/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb
index bf793e7c537..1637589d272 100644
--- a/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb
+++ b/spec/lib/gitlab/background_migration/legacy_upload_mover_spec.rb
@@ -264,6 +264,7 @@ RSpec.describe Gitlab::BackgroundMigration::LegacyUploadMover do
let(:remote_file) do
{ key: "#{legacy_upload.path}" }
end
+
let(:connection) { ::Fog::Storage.new(FileUploader.object_store_credentials) }
let(:bucket) { connection.directories.create(key: 'uploads') }
diff --git a/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb b/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb
deleted file mode 100644
index 65d45ec694f..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_build_stage_spec.rb
+++ /dev/null
@@ -1,84 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::MigrateBuildStage do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:pipelines) { table(:ci_pipelines) }
- let(:stages) { table(:ci_stages) }
- let(:jobs) { table(:ci_builds) }
-
- let(:statuses) do
- {
- created: 0,
- pending: 1,
- running: 2,
- success: 3,
- failed: 4,
- canceled: 5,
- skipped: 6,
- manual: 7
- }
- end
-
- before do
- namespace = namespaces.create!(name: 'gitlab-org', path: 'gitlab-org')
- projects.create!(id: 123, name: 'gitlab', path: 'gitlab-ce', namespace_id: namespace.id)
- pipelines.create!(id: 1, project_id: 123, ref: 'master', sha: 'adf43c3a')
-
- jobs.create!(id: 1, commit_id: 1, project_id: 123,
- stage_idx: 2, stage: 'build', status: :success)
- jobs.create!(id: 2, commit_id: 1, project_id: 123,
- stage_idx: 2, stage: 'build', status: :success)
- jobs.create!(id: 3, commit_id: 1, project_id: 123,
- stage_idx: 1, stage: 'test', status: :failed)
- jobs.create!(id: 4, commit_id: 1, project_id: 123,
- stage_idx: 1, stage: 'test', status: :success)
- jobs.create!(id: 5, commit_id: 1, project_id: 123,
- stage_idx: 3, stage: 'deploy', status: :pending)
- jobs.create!(id: 6, commit_id: 1, project_id: 123,
- stage_idx: 3, stage: nil, status: :pending)
- end
-
- it 'correctly migrates builds stages' do
- expect(stages.count).to be_zero
-
- described_class.new.perform(1, 6)
-
- expect(stages.count).to eq 3
- expect(stages.all.pluck(:name)).to match_array %w[test build deploy]
- expect(jobs.where(stage_id: nil)).to be_one
- expect(jobs.find_by(stage_id: nil).id).to eq 6
- expect(stages.all.pluck(:status)).to match_array [statuses[:success],
- statuses[:failed],
- statuses[:pending]]
- end
-
- it 'recovers from unique constraint violation only twice', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/28128' do
- allow(described_class::Migratable::Stage)
- .to receive(:find_by).and_return(nil)
-
- expect(described_class::Migratable::Stage)
- .to receive(:find_by).exactly(3).times
-
- expect { described_class.new.perform(1, 6) }
- .to raise_error ActiveRecord::RecordNotUnique
- end
-
- context 'when invalid class can be loaded due to single table inheritance' do
- let(:commit_status) do
- jobs.create!(id: 7, commit_id: 1, project_id: 123, stage_idx: 4,
- stage: 'post-deploy', status: :failed)
- end
-
- before do
- commit_status.update_column(:type, 'SomeClass')
- end
-
- it 'does ignore single table inheritance type' do
- expect { described_class.new.perform(1, 7) }.not_to raise_error
- expect(jobs.find(7)).to have_attributes(stage_id: (a_value > 0))
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb b/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb
index d53f79c61c1..d829fd5daf5 100644
--- a/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb
@@ -286,9 +286,11 @@ RSpec.describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, s
let!(:jira_service_invalid) do
services.create(id: 19, title: 'invalid - title', description: 'invalid - description', type: 'JiraService', properties: 'invalid data', category: 'issue_tracker')
end
+
let!(:jira_service_valid) do
services.create(id: 20, type: 'JiraService', properties: jira_properties.to_json, category: 'issue_tracker')
end
+
let!(:bugzilla_service_valid) do
services.create(id: 11, type: 'BugzillaService', title: nil, properties: tracker_properties.to_json, category: 'issue_tracker')
end
diff --git a/spec/lib/gitlab/background_migration/migrate_stage_index_spec.rb b/spec/lib/gitlab/background_migration/migrate_stage_index_spec.rb
deleted file mode 100644
index 81874ff7982..00000000000
--- a/spec/lib/gitlab/background_migration/migrate_stage_index_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::MigrateStageIndex do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:pipelines) { table(:ci_pipelines) }
- let(:stages) { table(:ci_stages) }
- let(:jobs) { table(:ci_builds) }
- let(:namespace) { namespaces.create(name: 'gitlab-org', path: 'gitlab-org') }
- let(:project) { projects.create!(namespace_id: namespace.id, name: 'gitlab', path: 'gitlab') }
- let(:pipeline) { pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a') }
- let(:stage1) { stages.create(project_id: project.id, pipeline_id: pipeline.id, name: 'build') }
- let(:stage2) { stages.create(project_id: project.id, pipeline_id: pipeline.id, name: 'test') }
-
- before do
- jobs.create!(commit_id: pipeline.id, project_id: project.id,
- stage_idx: 2, stage_id: stage1.id)
- jobs.create!(commit_id: pipeline.id, project_id: project.id,
- stage_idx: 2, stage_id: stage1.id)
- jobs.create!(commit_id: pipeline.id, project_id: project.id,
- stage_idx: 10, stage_id: stage1.id)
- jobs.create!(commit_id: pipeline.id, project_id: project.id,
- stage_idx: 3, stage_id: stage2.id)
- end
-
- it 'correctly migrates stages indices' do
- expect(stages.all.pluck(:position)).to all(be_nil)
-
- described_class.new.perform(stage1.id, stage2.id)
-
- expect(stages.all.order(:id).pluck(:position)).to eq [2, 3]
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_cluster_kubernetes_namespace_table_spec.rb b/spec/lib/gitlab/background_migration/populate_cluster_kubernetes_namespace_table_spec.rb
deleted file mode 100644
index 73faca54b52..00000000000
--- a/spec/lib/gitlab/background_migration/populate_cluster_kubernetes_namespace_table_spec.rb
+++ /dev/null
@@ -1,94 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::PopulateClusterKubernetesNamespaceTable do
- include MigrationHelpers::ClusterHelpers
-
- let(:migration) { described_class.new }
- let(:clusters_table) { table(:clusters) }
- let(:cluster_projects_table) { table(:cluster_projects) }
- let(:cluster_kubernetes_namespaces_table) { table(:clusters_kubernetes_namespaces) }
- let(:projects_table) { table(:projects) }
- let(:namespaces_table) { table(:namespaces) }
- let(:provider_gcp_table) { table(:cluster_providers_gcp) }
- let(:platform_kubernetes_table) { table(:cluster_platforms_kubernetes) }
-
- before do
- create_cluster_project_list(10)
- end
-
- shared_examples 'consistent kubernetes namespace attributes' do
- it 'populates namespace and service account information' do
- migration.perform
-
- clusters_with_namespace.each do |cluster|
- cluster_project = cluster_projects_table.find_by(cluster_id: cluster.id)
- project = projects_table.find(cluster_project.project_id)
- kubernetes_namespace = cluster_kubernetes_namespaces_table.find_by(cluster_id: cluster.id)
- namespace = "#{project.path}-#{project.id}"
-
- expect(kubernetes_namespace).to be_present
- expect(kubernetes_namespace.cluster_project_id).to eq(cluster_project.id)
- expect(kubernetes_namespace.project_id).to eq(cluster_project.project_id)
- expect(kubernetes_namespace.cluster_id).to eq(cluster_project.cluster_id)
- expect(kubernetes_namespace.namespace).to eq(namespace)
- expect(kubernetes_namespace.service_account_name).to eq("#{namespace}-service-account")
- end
- end
- end
-
- context 'when no Clusters::Project has a Clusters::KubernetesNamespace' do
- let(:cluster_projects) { cluster_projects_table.all }
-
- it 'creates a Clusters::KubernetesNamespace per Clusters::Project' do
- expect do
- migration.perform
- end.to change(Clusters::KubernetesNamespace, :count).by(cluster_projects_table.count)
- end
-
- it_behaves_like 'consistent kubernetes namespace attributes' do
- let(:clusters_with_namespace) { clusters_table.all }
- end
- end
-
- context 'when every Clusters::Project has Clusters::KubernetesNamespace' do
- before do
- create_kubernetes_namespace(clusters_table.all)
- end
-
- it 'does not create any Clusters::KubernetesNamespace' do
- expect do
- migration.perform
- end.not_to change(Clusters::KubernetesNamespace, :count)
- end
- end
-
- context 'when only some Clusters::Project have Clusters::KubernetesNamespace related' do
- let(:with_kubernetes_namespace) { clusters_table.first(6) }
- let(:with_no_kubernetes_namespace) { clusters_table.last(4) }
-
- before do
- create_kubernetes_namespace(with_kubernetes_namespace)
- end
-
- it 'creates limited number of Clusters::KubernetesNamespace' do
- expect do
- migration.perform
- end.to change(Clusters::KubernetesNamespace, :count).by(with_no_kubernetes_namespace.count)
- end
-
- it 'does not modify clusters with Clusters::KubernetesNamespace' do
- migration.perform
-
- with_kubernetes_namespace.each do |cluster|
- kubernetes_namespace = cluster_kubernetes_namespaces_table.where(cluster_id: cluster.id)
- expect(kubernetes_namespace.count).to eq(1)
- end
- end
-
- it_behaves_like 'consistent kubernetes namespace attributes' do
- let(:clusters_with_namespace) { with_no_kubernetes_namespace }
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_personal_snippet_statistics_spec.rb b/spec/lib/gitlab/background_migration/populate_personal_snippet_statistics_spec.rb
new file mode 100644
index 00000000000..e746451b1b9
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/populate_personal_snippet_statistics_spec.rb
@@ -0,0 +1,141 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::PopulatePersonalSnippetStatistics do
+ let(:file_name) { 'file_name.rb' }
+ let(:content) { 'content' }
+ let(:snippets) { table(:snippets) }
+ let(:snippet_repositories) { table(:snippet_repositories) }
+ let(:users) { table(:users) }
+ let(:namespaces) { table(:namespaces) }
+ let(:snippet_statistics) { table(:snippet_statistics) }
+ let(:namespace_statistics) { table(:namespace_root_storage_statistics) }
+ let(:routes) { table(:routes) }
+ let(:repo_size) { 123456 }
+ let(:expected_repo_size) { repo_size.megabytes }
+
+ let(:user1) { users.create!(id: 1, email: 'test@example.com', projects_limit: 100, username: 'test1') }
+ let(:user2) { users.create!(id: 2, email: 'test2@example.com', projects_limit: 100, username: 'test2') }
+ let!(:user1_namespace) { namespaces.create!(id: 1, name: 'user1', path: 'user1', owner_id: user1.id) }
+ let!(:user2_namespace) { namespaces.create!(id: 2, name: 'user2', path: 'user2', owner_id: user2.id) }
+ let(:user1_namespace_statistics) { namespace_statistics.find_by(namespace_id: user1_namespace.id) }
+ let(:user2_namespace_statistics) { namespace_statistics.find_by(namespace_id: user2_namespace.id) }
+
+ let(:ids) { snippets.pluck(:id) }
+ let(:migration) { described_class.new }
+
+ subject do
+ migration.perform(ids)
+ end
+
+ before do
+ allow_any_instance_of(Repository).to receive(:size).and_return(repo_size)
+ end
+
+ after do
+ snippets.all.each { |s| raw_repository(s).remove }
+ end
+
+ context 'with existing personal snippets' do
+ let!(:snippet1) { create_snippet(1, user1) }
+ let!(:snippet2) { create_snippet(2, user1) }
+ let!(:snippet3) { create_snippet(3, user2) }
+ let!(:snippet4) { create_snippet(4, user2) }
+
+ before do
+ create_snippet_statistics(2, 0)
+ create_snippet_statistics(4, 123)
+ end
+
+ it 'creates/updates all snippet_statistics' do
+ expect { subject }.to change { snippet_statistics.count }.from(2).to(4)
+
+ expect(snippet_statistics.pluck(:repository_size)).to be_all(expected_repo_size)
+ end
+
+ it 'creates/updates the associated namespace statistics' do
+ expect(migration).to receive(:update_namespace_statistics).twice.and_call_original
+
+ subject
+
+ stats = snippet_statistics.where(snippet_id: [snippet1, snippet2]).sum(:repository_size)
+ expect(user1_namespace_statistics.snippets_size).to eq stats
+
+ stats = snippet_statistics.where(snippet_id: [snippet3, snippet4]).sum(:repository_size)
+ expect(user2_namespace_statistics.snippets_size).to eq stats
+ end
+
+ context 'when an error is raised when updating a namespace statistics' do
+ it 'logs the error and continue execution' do
+ expect_next_instance_of(Namespaces::StatisticsRefresherService) do |instance|
+ expect(instance).to receive(:execute).with(Namespace.find(user1_namespace.id)).and_raise('Error')
+ end
+
+ expect_next_instance_of(Namespaces::StatisticsRefresherService) do |instance|
+ expect(instance).to receive(:execute).and_call_original
+ end
+
+ expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
+ expect(instance).to receive(:error).with(message: /Error updating statistics for namespace/).once
+ end
+
+ subject
+
+ expect(user1_namespace_statistics).to be_nil
+
+ stats = snippet_statistics.where(snippet_id: [snippet3, snippet4]).sum(:repository_size)
+ expect(user2_namespace_statistics.snippets_size).to eq stats
+ end
+ end
+ end
+
+ context 'when a snippet repository is empty' do
+ let!(:snippet1) { create_snippet(1, user1, with_repo: false) }
+ let!(:snippet2) { create_snippet(2, user1) }
+
+ it 'logs error and continues execution' do
+ expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
+ expect(instance).to receive(:error).with(message: /Invalid snippet repository/).once
+ end
+
+ subject
+
+ expect(snippet_statistics.find_by(snippet_id: snippet1.id)).to be_nil
+ expect(user1_namespace_statistics.snippets_size).to eq expected_repo_size
+ end
+ end
+
+ def create_snippet(id, author, with_repo: true)
+ snippets.create!(id: id, type: 'PersonalSnippet', author_id: author.id, file_name: file_name, content: content).tap do |snippet|
+ if with_repo
+ allow(snippet).to receive(:disk_path).and_return(disk_path(snippet))
+
+ TestEnv.copy_repo(snippet,
+ bare_repo: TestEnv.factory_repo_path_bare,
+ refs: TestEnv::BRANCH_SHA)
+
+ raw_repository(snippet).create_repository
+ end
+ end
+ end
+
+ def create_snippet_statistics(snippet_id, repository_size = 0)
+ snippet_statistics.create!(snippet_id: snippet_id, repository_size: repository_size)
+ end
+
+ def raw_repository(snippet)
+ Gitlab::Git::Repository.new('default',
+ "#{disk_path(snippet)}.git",
+ Gitlab::GlRepository::SNIPPET.identifier_for_container(snippet),
+ "@snippets/#{snippet.id}")
+ end
+
+ def hashed_repository(snippet)
+ Storage::Hashed.new(snippet, prefix: '@snippets')
+ end
+
+ def disk_path(snippet)
+ hashed_repository(snippet).disk_path
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/populate_untracked_uploads_dependencies/untracked_file_spec.rb b/spec/lib/gitlab/background_migration/populate_untracked_uploads_dependencies/untracked_file_spec.rb
deleted file mode 100644
index 6a25e8e2784..00000000000
--- a/spec/lib/gitlab/background_migration/populate_untracked_uploads_dependencies/untracked_file_spec.rb
+++ /dev/null
@@ -1,263 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::PopulateUntrackedUploadsDependencies::UntrackedFile do
- include MigrationsHelpers::TrackUntrackedUploadsHelpers
-
- let!(:appearances) { table(:appearances) }
- let!(:namespaces) { table(:namespaces) }
- let!(:projects) { table(:projects) }
- let!(:routes) { table(:routes) }
- let!(:uploads) { table(:uploads) }
-
- before(:all) do
- ensure_temporary_tracking_table_exists
- end
-
- describe '#upload_path' do
- def assert_upload_path(file_path, expected_upload_path)
- untracked_file = create_untracked_file(file_path)
-
- expect(untracked_file.upload_path).to eq(expected_upload_path)
- end
-
- context 'for an appearance logo file path' do
- it 'returns the file path relative to the CarrierWave root' do
- assert_upload_path('/-/system/appearance/logo/1/some_logo.jpg', 'uploads/-/system/appearance/logo/1/some_logo.jpg')
- end
- end
-
- context 'for an appearance header_logo file path' do
- it 'returns the file path relative to the CarrierWave root' do
- assert_upload_path('/-/system/appearance/header_logo/1/some_logo.jpg', 'uploads/-/system/appearance/header_logo/1/some_logo.jpg')
- end
- end
-
- context 'for a pre-Markdown Note attachment file path' do
- it 'returns the file path relative to the CarrierWave root' do
- assert_upload_path('/-/system/note/attachment/1234/some_attachment.pdf', 'uploads/-/system/note/attachment/1234/some_attachment.pdf')
- end
- end
-
- context 'for a user avatar file path' do
- it 'returns the file path relative to the CarrierWave root' do
- assert_upload_path('/-/system/user/avatar/1234/avatar.jpg', 'uploads/-/system/user/avatar/1234/avatar.jpg')
- end
- end
-
- context 'for a group avatar file path' do
- it 'returns the file path relative to the CarrierWave root' do
- assert_upload_path('/-/system/group/avatar/1234/avatar.jpg', 'uploads/-/system/group/avatar/1234/avatar.jpg')
- end
- end
-
- context 'for a project avatar file path' do
- it 'returns the file path relative to the CarrierWave root' do
- assert_upload_path('/-/system/project/avatar/1234/avatar.jpg', 'uploads/-/system/project/avatar/1234/avatar.jpg')
- end
- end
-
- context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
- it 'returns the file path relative to the project directory in uploads' do
- project = create_project
- random_hex = SecureRandom.hex
-
- assert_upload_path("/#{get_full_path(project)}/#{random_hex}/Some file.jpg", "#{random_hex}/Some file.jpg")
- end
- end
- end
-
- describe '#uploader' do
- def assert_uploader(file_path, expected_uploader)
- untracked_file = create_untracked_file(file_path)
-
- expect(untracked_file.uploader).to eq(expected_uploader)
- end
-
- context 'for an appearance logo file path' do
- it 'returns AttachmentUploader as a string' do
- assert_uploader('/-/system/appearance/logo/1/some_logo.jpg', 'AttachmentUploader')
- end
- end
-
- context 'for an appearance header_logo file path' do
- it 'returns AttachmentUploader as a string' do
- assert_uploader('/-/system/appearance/header_logo/1/some_logo.jpg', 'AttachmentUploader')
- end
- end
-
- context 'for a pre-Markdown Note attachment file path' do
- it 'returns AttachmentUploader as a string' do
- assert_uploader('/-/system/note/attachment/1234/some_attachment.pdf', 'AttachmentUploader')
- end
- end
-
- context 'for a user avatar file path' do
- it 'returns AvatarUploader as a string' do
- assert_uploader('/-/system/user/avatar/1234/avatar.jpg', 'AvatarUploader')
- end
- end
-
- context 'for a group avatar file path' do
- it 'returns AvatarUploader as a string' do
- assert_uploader('/-/system/group/avatar/1234/avatar.jpg', 'AvatarUploader')
- end
- end
-
- context 'for a project avatar file path' do
- it 'returns AvatarUploader as a string' do
- assert_uploader('/-/system/project/avatar/1234/avatar.jpg', 'AvatarUploader')
- end
- end
-
- context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
- it 'returns FileUploader as a string' do
- project = create_project
-
- assert_uploader("/#{get_full_path(project)}/#{SecureRandom.hex}/Some file.jpg", 'FileUploader')
- end
- end
- end
-
- describe '#model_type' do
- def assert_model_type(file_path, expected_model_type)
- untracked_file = create_untracked_file(file_path)
-
- expect(untracked_file.model_type).to eq(expected_model_type)
- end
-
- context 'for an appearance logo file path' do
- it 'returns Appearance as a string' do
- assert_model_type('/-/system/appearance/logo/1/some_logo.jpg', 'Appearance')
- end
- end
-
- context 'for an appearance header_logo file path' do
- it 'returns Appearance as a string' do
- assert_model_type('/-/system/appearance/header_logo/1/some_logo.jpg', 'Appearance')
- end
- end
-
- context 'for a pre-Markdown Note attachment file path' do
- it 'returns Note as a string' do
- assert_model_type('/-/system/note/attachment/1234/some_attachment.pdf', 'Note')
- end
- end
-
- context 'for a user avatar file path' do
- it 'returns User as a string' do
- assert_model_type('/-/system/user/avatar/1234/avatar.jpg', 'User')
- end
- end
-
- context 'for a group avatar file path' do
- it 'returns Namespace as a string' do
- assert_model_type('/-/system/group/avatar/1234/avatar.jpg', 'Namespace')
- end
- end
-
- context 'for a project avatar file path' do
- it 'returns Project as a string' do
- assert_model_type('/-/system/project/avatar/1234/avatar.jpg', 'Project')
- end
- end
-
- context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
- it 'returns Project as a string' do
- project = create_project
-
- assert_model_type("/#{get_full_path(project)}/#{SecureRandom.hex}/Some file.jpg", 'Project')
- end
- end
- end
-
- describe '#model_id' do
- def assert_model_id(file_path, expected_model_id)
- untracked_file = create_untracked_file(file_path)
-
- expect(untracked_file.model_id).to eq(expected_model_id)
- end
-
- context 'for an appearance logo file path' do
- it 'returns the ID as a string' do
- assert_model_id('/-/system/appearance/logo/1/some_logo.jpg', 1)
- end
- end
-
- context 'for an appearance header_logo file path' do
- it 'returns the ID as a string' do
- assert_model_id('/-/system/appearance/header_logo/1/some_logo.jpg', 1)
- end
- end
-
- context 'for a pre-Markdown Note attachment file path' do
- it 'returns the ID as a string' do
- assert_model_id('/-/system/note/attachment/1234/some_attachment.pdf', 1234)
- end
- end
-
- context 'for a user avatar file path' do
- it 'returns the ID as a string' do
- assert_model_id('/-/system/user/avatar/1234/avatar.jpg', 1234)
- end
- end
-
- context 'for a group avatar file path' do
- it 'returns the ID as a string' do
- assert_model_id('/-/system/group/avatar/1234/avatar.jpg', 1234)
- end
- end
-
- context 'for a project avatar file path' do
- it 'returns the ID as a string' do
- assert_model_id('/-/system/project/avatar/1234/avatar.jpg', 1234)
- end
- end
-
- context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
- it 'returns the ID as a string' do
- project = create_project
-
- assert_model_id("/#{get_full_path(project)}/#{SecureRandom.hex}/Some file.jpg", project.id)
- end
- end
- end
-
- describe '#file_size' do
- context 'for an appearance logo file path' do
- let(:appearance) { create_or_update_appearance(logo: true) }
- let(:untracked_file) { described_class.create!(path: get_uploads(appearance, 'Appearance').first.path) }
-
- it 'returns the file size' do
- expect(untracked_file.file_size).to eq(1062)
- end
- end
-
- context 'for a project avatar file path' do
- let(:project) { create_project(avatar: true) }
- let(:untracked_file) { described_class.create!(path: get_uploads(project, 'Project').first.path) }
-
- it 'returns the file size' do
- expect(untracked_file.file_size).to eq(1062)
- end
- end
-
- context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
- let(:project) { create_project }
- let(:untracked_file) { create_untracked_file("/#{get_full_path(project)}/#{get_uploads(project, 'Project').first.path}") }
-
- before do
- add_markdown_attachment(project)
- end
-
- it 'returns the file size' do
- expect(untracked_file.file_size).to eq(1062)
- end
- end
- end
-
- def create_untracked_file(path_relative_to_upload_dir)
- described_class.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}#{path_relative_to_upload_dir}")
- end
-end
diff --git a/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb b/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb
deleted file mode 100644
index 787cc54e79a..00000000000
--- a/spec/lib/gitlab/background_migration/populate_untracked_uploads_spec.rb
+++ /dev/null
@@ -1,254 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::PopulateUntrackedUploads do
- include MigrationsHelpers::TrackUntrackedUploadsHelpers
-
- subject { described_class.new }
-
- let!(:appearances) { table(:appearances) }
- let!(:namespaces) { table(:namespaces) }
- let!(:notes) { table(:notes) }
- let!(:projects) { table(:projects) }
- let!(:routes) { table(:routes) }
- let!(:untracked_files_for_uploads) { table(:untracked_files_for_uploads) }
- let!(:uploads) { table(:uploads) }
- let!(:users) { table(:users) }
-
- before do
- ensure_temporary_tracking_table_exists
- uploads.delete_all
- end
-
- context 'with untracked files and tracked files in untracked_files_for_uploads' do
- let!(:appearance) { create_or_update_appearance(logo: true, header_logo: true) }
- let!(:user1) { create_user(avatar: true) }
- let!(:user2) { create_user(avatar: true) }
- let!(:project1) { create_project(avatar: true) }
- let!(:project2) { create_project(avatar: true) }
-
- before do
- add_markdown_attachment(project1)
- add_markdown_attachment(project2)
-
- # File records created by PrepareUntrackedUploads
- untracked_files_for_uploads.create!(path: get_uploads(appearance, 'Appearance').first.path)
- untracked_files_for_uploads.create!(path: get_uploads(appearance, 'Appearance').last.path)
- untracked_files_for_uploads.create!(path: get_uploads(user1, 'User').first.path)
- untracked_files_for_uploads.create!(path: get_uploads(user2, 'User').first.path)
- untracked_files_for_uploads.create!(path: get_uploads(project1, 'Project').first.path)
- untracked_files_for_uploads.create!(path: get_uploads(project2, 'Project').first.path)
- untracked_files_for_uploads.create!(path: "#{legacy_project_uploads_dir(project1).sub("#{MigrationsHelpers::TrackUntrackedUploadsHelpers::PUBLIC_DIR}/", '')}/#{get_uploads(project1, 'Project').last.path}")
- untracked_files_for_uploads.create!(path: "#{legacy_project_uploads_dir(project2).sub("#{MigrationsHelpers::TrackUntrackedUploadsHelpers::PUBLIC_DIR}/", '')}/#{get_uploads(project2, 'Project').last.path}")
-
- # Untrack 4 files
- get_uploads(user2, 'User').delete_all
- get_uploads(project2, 'Project').delete_all # 2 files: avatar and a Markdown upload
- get_uploads(appearance, 'Appearance').where("path like '%header_logo%'").delete_all
- end
-
- it 'adds untracked files to the uploads table' do
- expect do
- subject.perform(1, untracked_files_for_uploads.reorder(:id).last.id)
- end.to change { uploads.count }.from(4).to(8)
-
- expect(get_uploads(user2, 'User').count).to eq(1)
- expect(get_uploads(project2, 'Project').count).to eq(2)
- expect(get_uploads(appearance, 'Appearance').count).to eq(2)
- end
-
- it 'deletes rows after processing them' do
- expect(subject).to receive(:drop_temp_table_if_finished) # Don't drop the table so we can look at it
-
- expect do
- subject.perform(1, untracked_files_for_uploads.last.id)
- end.to change { untracked_files_for_uploads.count }.from(8).to(0)
- end
-
- it 'does not create duplicate uploads of already tracked files' do
- subject.perform(1, untracked_files_for_uploads.last.id)
-
- expect(get_uploads(user1, 'User').count).to eq(1)
- expect(get_uploads(project1, 'Project').count).to eq(2)
- expect(get_uploads(appearance, 'Appearance').count).to eq(2)
- end
-
- it 'uses the start and end batch ids [only 1st half]' do
- ids = untracked_files_for_uploads.all.order(:id).pluck(:id)
- start_id = ids[0]
- end_id = ids[3]
-
- expect do
- subject.perform(start_id, end_id)
- end.to change { uploads.count }.from(4).to(6)
-
- expect(get_uploads(user1, 'User').count).to eq(1)
- expect(get_uploads(user2, 'User').count).to eq(1)
- expect(get_uploads(appearance, 'Appearance').count).to eq(2)
- expect(get_uploads(project1, 'Project').count).to eq(2)
- expect(get_uploads(project2, 'Project').count).to eq(0)
-
- # Only 4 have been either confirmed or added to uploads
- expect(untracked_files_for_uploads.count).to eq(4)
- end
-
- it 'uses the start and end batch ids [only 2nd half]' do
- ids = untracked_files_for_uploads.all.order(:id).pluck(:id)
- start_id = ids[4]
- end_id = ids[7]
-
- expect do
- subject.perform(start_id, end_id)
- end.to change { uploads.count }.from(4).to(6)
-
- expect(get_uploads(user1, 'User').count).to eq(1)
- expect(get_uploads(user2, 'User').count).to eq(0)
- expect(get_uploads(appearance, 'Appearance').count).to eq(1)
- expect(get_uploads(project1, 'Project').count).to eq(2)
- expect(get_uploads(project2, 'Project').count).to eq(2)
-
- # Only 4 have been either confirmed or added to uploads
- expect(untracked_files_for_uploads.count).to eq(4)
- end
-
- it 'does not drop the temporary tracking table after processing the batch, if there are still untracked rows' do
- subject.perform(1, untracked_files_for_uploads.last.id - 1)
-
- expect(ActiveRecord::Base.connection.table_exists?(:untracked_files_for_uploads)).to be_truthy
- end
-
- it 'drops the temporary tracking table after processing the batch, if there are no untracked rows left' do
- expect(subject).to receive(:drop_temp_table_if_finished)
-
- subject.perform(1, untracked_files_for_uploads.last.id)
- end
-
- it 'does not block a whole batch because of one bad path' do
- untracked_files_for_uploads.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{get_full_path(project2)}/._7d37bf4c747916390e596744117d5d1a")
- expect(untracked_files_for_uploads.count).to eq(9)
- expect(uploads.count).to eq(4)
-
- subject.perform(1, untracked_files_for_uploads.last.id)
-
- expect(untracked_files_for_uploads.count).to eq(1)
- expect(uploads.count).to eq(8)
- end
-
- it 'an unparseable path is shown in error output' do
- bad_path = "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{get_full_path(project2)}/._7d37bf4c747916390e596744117d5d1a"
- untracked_files_for_uploads.create!(path: bad_path)
-
- expect(Rails.logger).to receive(:error).with(/Error parsing path "#{bad_path}":/)
-
- subject.perform(1, untracked_files_for_uploads.last.id)
- end
- end
-
- context 'with no untracked files' do
- it 'does not add to the uploads table (and does not raise error)' do
- expect do
- subject.perform(1, 1000)
- end.not_to change { uploads.count }.from(0)
- end
- end
-
- describe 'upload outcomes for each path pattern' do
- shared_examples_for 'non_markdown_file' do
- let!(:expected_upload_attrs) { model_uploads.first.attributes.slice('path', 'uploader', 'size', 'checksum') }
- let!(:untracked_file) { untracked_files_for_uploads.create!(path: expected_upload_attrs['path']) }
-
- before do
- model_uploads.delete_all
- end
-
- it 'creates an Upload record' do
- expect do
- subject.perform(1, untracked_files_for_uploads.last.id)
- end.to change { model_uploads.count }.from(0).to(1)
-
- expect(model_uploads.first.attributes).to include(expected_upload_attrs)
- end
- end
-
- context 'for an appearance logo file path' do
- let(:model) { create_or_update_appearance(logo: true) }
- let(:model_uploads) { get_uploads(model, 'Appearance') }
-
- it_behaves_like 'non_markdown_file'
- end
-
- context 'for an appearance header_logo file path' do
- let(:model) { create_or_update_appearance(header_logo: true) }
- let(:model_uploads) { get_uploads(model, 'Appearance') }
-
- it_behaves_like 'non_markdown_file'
- end
-
- context 'for a pre-Markdown Note attachment file path' do
- let(:model) { create_note(attachment: true) }
- let!(:expected_upload_attrs) { get_uploads(model, 'Note').first.attributes.slice('path', 'uploader', 'size', 'checksum') }
- let!(:untracked_file) { untracked_files_for_uploads.create!(path: expected_upload_attrs['path']) }
-
- before do
- get_uploads(model, 'Note').delete_all
- end
-
- # Can't use the shared example because Note doesn't have an `uploads` association
- it 'creates an Upload record' do
- expect do
- subject.perform(1, untracked_files_for_uploads.last.id)
- end.to change { get_uploads(model, 'Note').count }.from(0).to(1)
-
- expect(get_uploads(model, 'Note').first.attributes).to include(expected_upload_attrs)
- end
- end
-
- context 'for a user avatar file path' do
- let(:model) { create_user(avatar: true) }
- let(:model_uploads) { get_uploads(model, 'User') }
-
- it_behaves_like 'non_markdown_file'
- end
-
- context 'for a group avatar file path' do
- let(:model) { create_group(avatar: true) }
- let(:model_uploads) { get_uploads(model, 'Namespace') }
-
- it_behaves_like 'non_markdown_file'
- end
-
- context 'for a project avatar file path' do
- let(:model) { create_project(avatar: true) }
- let(:model_uploads) { get_uploads(model, 'Project') }
-
- it_behaves_like 'non_markdown_file'
- end
-
- context 'for a project Markdown attachment (notes, issues, MR descriptions) file path' do
- let(:model) { create_project }
-
- before do
- # Upload the file
- add_markdown_attachment(model)
-
- # Create the untracked_files_for_uploads record
- untracked_files_for_uploads.create!(path: "#{Gitlab::BackgroundMigration::PrepareUntrackedUploads::RELATIVE_UPLOAD_DIR}/#{get_full_path(model)}/#{get_uploads(model, 'Project').first.path}")
-
- # Save the expected upload attributes
- @expected_upload_attrs = get_uploads(model, 'Project').first.attributes.slice('path', 'uploader', 'size', 'checksum')
-
- # Untrack the file
- get_uploads(model, 'Project').delete_all
- end
-
- it 'creates an Upload record' do
- expect do
- subject.perform(1, untracked_files_for_uploads.last.id)
- end.to change { get_uploads(model, 'Project').count }.from(0).to(1)
-
- expect(get_uploads(model, 'Project').first.attributes).to include(@expected_upload_attrs)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb b/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
deleted file mode 100644
index 9b01407dc8b..00000000000
--- a/spec/lib/gitlab/background_migration/prepare_untracked_uploads_spec.rb
+++ /dev/null
@@ -1,159 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-# Rollback DB to 10.5 (later than this was originally written for) because it still needs to work.
-RSpec.describe Gitlab::BackgroundMigration::PrepareUntrackedUploads do
- include MigrationsHelpers::TrackUntrackedUploadsHelpers
-
- let!(:untracked_files_for_uploads) { table(:untracked_files_for_uploads) }
- let!(:appearances) { table(:appearances) }
- let!(:namespaces) { table(:namespaces) }
- let!(:projects) { table(:projects) }
- let!(:routes) { table(:routes) }
- let!(:uploads) { table(:uploads) }
- let!(:users) { table(:users) }
-
- around do |example|
- # Especially important so the follow-up migration does not get run
- Sidekiq::Testing.fake! do
- example.run
- end
- end
-
- shared_examples 'prepares the untracked_files_for_uploads table' do
- context 'when files were uploaded before and after hashed storage was enabled' do
- let!(:appearance) { create_or_update_appearance(logo: true, header_logo: true) }
- let!(:user) { create_user(avatar: true) }
- let!(:project1) { create_project(avatar: true) }
- let(:project2) { create_project } # instantiate after enabling hashed_storage
-
- before do
- # Markdown upload before enabling hashed_storage
- add_markdown_attachment(project1)
-
- # Markdown upload after enabling hashed_storage
- add_markdown_attachment(project2, hashed_storage: true)
- end
-
- it 'has a path field long enough for really long paths' do
- described_class.new.perform
-
- component = 'a' * 255
-
- long_path = [
- 'uploads',
- component, # project.full_path
- component # filename
- ].flatten.join('/')
-
- record = untracked_files_for_uploads.create!(path: long_path)
- expect(record.reload.path.size).to eq(519)
- end
-
- it 'adds unhashed files to the untracked_files_for_uploads table' do
- described_class.new.perform
-
- expect(untracked_files_for_uploads.count).to eq(5)
- end
-
- it 'adds files with paths relative to CarrierWave.root' do
- described_class.new.perform
- untracked_files_for_uploads.all.each do |file|
- expect(file.path.start_with?('uploads/')).to be_truthy
- end
- end
-
- it 'does not add hashed files to the untracked_files_for_uploads table' do
- described_class.new.perform
-
- hashed_file_path = get_uploads(project2, 'Project').find_by(uploader: 'FileUploader').path
- expect(untracked_files_for_uploads.where("path like '%#{hashed_file_path}%'").exists?).to be_falsey
- end
-
- it 'correctly schedules the follow-up background migration jobs' do
- described_class.new.perform
-
- ids = described_class::UntrackedFile.all.order(:id).pluck(:id)
- expect(described_class::FOLLOW_UP_MIGRATION).to be_scheduled_migration(ids.first, ids.last)
- expect(BackgroundMigrationWorker.jobs.size).to eq(1)
- end
-
- # E.g. from a previous failed run of this background migration
- context 'when there is existing data in untracked_files_for_uploads' do
- before do
- described_class.new.perform
- end
-
- it 'does not error or produce duplicates of existing data' do
- expect do
- described_class.new.perform
- end.not_to change { untracked_files_for_uploads.count }.from(5)
- end
- end
-
- # E.g. The installation is in use at the time of migration, and someone has
- # just uploaded a file
- context 'when there are files in /uploads/tmp' do
- let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
-
- before do
- FileUtils.mkdir(File.dirname(tmp_file))
- FileUtils.touch(tmp_file)
- end
-
- after do
- FileUtils.rm(tmp_file)
- end
-
- it 'does not add files from /uploads/tmp' do
- described_class.new.perform
-
- expect(untracked_files_for_uploads.count).to eq(5)
- end
- end
-
- context 'when the last batch size exactly matches the max batch size' do
- it 'does not raise error' do
- stub_const("#{described_class}::FIND_BATCH_SIZE", 5)
-
- expect do
- described_class.new.perform
- end.not_to raise_error
-
- expect(untracked_files_for_uploads.count).to eq(5)
- end
- end
- end
- end
-
- # If running on Postgres 9.2 (like on CI), this whole context is skipped
- # since we're unable to use ON CONFLICT DO NOTHING or IGNORE.
- context "test bulk insert with ON CONFLICT DO NOTHING or IGNORE", if: described_class.new.send(:can_bulk_insert_and_ignore_duplicates?) do
- it_behaves_like 'prepares the untracked_files_for_uploads table'
- end
-
- # If running on Postgres 9.2 (like on CI), the stubbed method has no effect.
- #
- # If running on Postgres 9.5+ or MySQL, then this context effectively tests
- # the bulk insert functionality without ON CONFLICT DO NOTHING or IGNORE.
- context 'test bulk insert without ON CONFLICT DO NOTHING or IGNORE' do
- before do
- allow_any_instance_of(described_class).to receive(:postgresql_pre_9_5?).and_return(true)
- end
-
- it_behaves_like 'prepares the untracked_files_for_uploads table'
- end
-
- # Very new or lightly-used installations that are running this migration
- # may not have an upload directory because they have no uploads.
- context 'when no files were ever uploaded' do
- it 'deletes the `untracked_files_for_uploads` table (and does not raise error)' do
- background_migration = described_class.new
-
- expect(background_migration).to receive(:drop_temp_table)
-
- background_migration.perform
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/remove_restricted_todos_spec.rb b/spec/lib/gitlab/background_migration/remove_restricted_todos_spec.rb
deleted file mode 100644
index 7019d5d4212..00000000000
--- a/spec/lib/gitlab/background_migration/remove_restricted_todos_spec.rb
+++ /dev/null
@@ -1,126 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::RemoveRestrictedTodos do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:users) { table(:users) }
- let(:todos) { table(:todos) }
- let(:issues) { table(:issues) }
- let(:assignees) { table(:issue_assignees) }
- let(:project_authorizations) { table(:project_authorizations) }
- let(:project_features) { table(:project_features) }
-
- let(:todo_params) { { author_id: 1, target_type: 'Issue', action: 1, state: :pending } }
-
- before do
- users.create(id: 1, email: 'user@example.com', projects_limit: 10)
- users.create(id: 2, email: 'reporter@example.com', projects_limit: 10)
- users.create(id: 3, email: 'guest@example.com', projects_limit: 10)
-
- namespace = namespaces.create(name: 'gitlab-org', path: 'gitlab-org')
- projects.create!(id: 1, name: 'project-1', path: 'project-1', visibility_level: 0, namespace_id: namespace.id)
- projects.create!(id: 2, name: 'project-2', path: 'project-2', visibility_level: 0, namespace_id: namespace.id)
-
- issues.create(id: 1, project_id: 1)
- issues.create(id: 2, project_id: 2)
-
- project_authorizations.create(user_id: 2, project_id: 2, access_level: 20) # reporter
- project_authorizations.create(user_id: 3, project_id: 2, access_level: 10) # guest
-
- todos.create(todo_params.merge(user_id: 1, project_id: 1, target_id: 1)) # out of project ids range
- todos.create(todo_params.merge(user_id: 1, project_id: 2, target_id: 2)) # non member
- todos.create(todo_params.merge(user_id: 2, project_id: 2, target_id: 2)) # reporter
- todos.create(todo_params.merge(user_id: 3, project_id: 2, target_id: 2)) # guest
- end
-
- subject { described_class.new.perform(2, 5) }
-
- context 'when a project is private' do
- it 'removes todos of users without project access' do
- expect { subject }.to change { Todo.count }.from(4).to(3)
- end
-
- context 'with a confidential issue' do
- it 'removes todos of users without project access and guests for confidential issues' do
- issues.create(id: 3, project_id: 2, confidential: true)
- issues.create(id: 4, project_id: 1, confidential: true) # not in the batch
- todos.create(todo_params.merge(user_id: 3, project_id: 2, target_id: 3))
- todos.create(todo_params.merge(user_id: 2, project_id: 2, target_id: 3))
- todos.create(todo_params.merge(user_id: 1, project_id: 1, target_id: 4))
-
- expect { subject }.to change { Todo.count }.from(7).to(5)
- end
- end
- end
-
- context 'when a project is public' do
- before do
- projects.find(2).update_attribute(:visibility_level, 20)
- end
-
- context 'when all features have the same visibility as the project, no confidential issues' do
- it 'does not remove any todos' do
- expect { subject }.not_to change { Todo.count }
- end
- end
-
- context 'with confidential issues' do
- before do
- users.create(id: 4, email: 'author@example.com', projects_limit: 10)
- users.create(id: 5, email: 'assignee@example.com', projects_limit: 10)
- issues.create(id: 3, project_id: 2, confidential: true, author_id: 4)
- assignees.create(user_id: 5, issue_id: 3)
-
- todos.create(todo_params.merge(user_id: 1, project_id: 2, target_id: 3)) # to be deleted
- todos.create(todo_params.merge(user_id: 2, project_id: 2, target_id: 3)) # authorized user
- todos.create(todo_params.merge(user_id: 3, project_id: 2, target_id: 3)) # to be deleted guest
- todos.create(todo_params.merge(user_id: 4, project_id: 2, target_id: 3)) # conf issue author
- todos.create(todo_params.merge(user_id: 5, project_id: 2, target_id: 3)) # conf issue assignee
- end
-
- it 'removes confidential issue todos for non authorized users' do
- expect { subject }.to change { Todo.count }.from(9).to(7)
- end
- end
-
- context 'features visibility restrictions' do
- before do
- todo_params.merge!(project_id: 2, user_id: 1, target_id: 3)
- todos.create(todo_params.merge(user_id: 1, target_id: 3, target_type: 'MergeRequest'))
- todos.create(todo_params.merge(user_id: 1, target_id: 3, target_type: 'Commit'))
- end
-
- context 'when issues are restricted to project members' do
- before do
- project_features.create(issues_access_level: 10, pages_access_level: 10, project_id: 2)
- end
-
- it 'removes non members issue todos' do
- expect { subject }.to change { Todo.count }.from(6).to(5)
- end
- end
-
- context 'when merge requests are restricted to project members' do
- before do
- project_features.create(merge_requests_access_level: 10, pages_access_level: 10, project_id: 2)
- end
-
- it 'removes non members issue todos' do
- expect { subject }.to change { Todo.count }.from(6).to(5)
- end
- end
-
- context 'when repository and merge requests are restricted to project members' do
- before do
- project_features.create(repository_access_level: 10, merge_requests_access_level: 10, pages_access_level: 10, project_id: 2)
- end
-
- it 'removes non members commit and merge requests todos' do
- expect { subject }.to change { Todo.count }.from(6).to(4)
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/set_confidential_note_events_on_services_spec.rb b/spec/lib/gitlab/background_migration/set_confidential_note_events_on_services_spec.rb
deleted file mode 100644
index 364edf3ed2a..00000000000
--- a/spec/lib/gitlab/background_migration/set_confidential_note_events_on_services_spec.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::SetConfidentialNoteEventsOnServices do
- let(:services) { table(:services) }
-
- describe '#perform' do
- it 'migrates services where note_events is true' do
- service = services.create(confidential_note_events: nil, note_events: true)
-
- subject.perform(service.id, service.id)
-
- expect(service.reload.confidential_note_events).to eq(true)
- end
-
- it 'ignores services where note_events is false' do
- service = services.create(confidential_note_events: nil, note_events: false)
-
- subject.perform(service.id, service.id)
-
- expect(service.reload.confidential_note_events).to eq(nil)
- end
-
- it 'ignores services where confidential_note_events has already been set' do
- service = services.create(confidential_note_events: false, note_events: true)
-
- subject.perform(service.id, service.id)
-
- expect(service.reload.confidential_note_events).to eq(false)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/set_confidential_note_events_on_webhooks_spec.rb b/spec/lib/gitlab/background_migration/set_confidential_note_events_on_webhooks_spec.rb
deleted file mode 100644
index 28b06ac3ba3..00000000000
--- a/spec/lib/gitlab/background_migration/set_confidential_note_events_on_webhooks_spec.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::SetConfidentialNoteEventsOnWebhooks do
- let(:web_hooks) { table(:web_hooks) }
-
- describe '#perform' do
- it 'migrates hooks where note_events is true' do
- hook = web_hooks.create(confidential_note_events: nil, note_events: true)
-
- subject.perform(hook.id, hook.id)
-
- expect(hook.reload.confidential_note_events).to eq(true)
- end
-
- it 'ignores hooks where note_events is false' do
- hook = web_hooks.create(confidential_note_events: nil, note_events: false)
-
- subject.perform(hook.id, hook.id)
-
- expect(hook.reload.confidential_note_events).to eq(nil)
- end
-
- it 'ignores hooks where confidential_note_events has already been set' do
- hook = web_hooks.create(confidential_note_events: false, note_events: true)
-
- subject.perform(hook.id, hook.id)
-
- expect(hook.reload.confidential_note_events).to eq(false)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/set_merge_request_diff_files_count_spec.rb b/spec/lib/gitlab/background_migration/set_merge_request_diff_files_count_spec.rb
new file mode 100644
index 00000000000..6e9f51f510a
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/set_merge_request_diff_files_count_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::SetMergeRequestDiffFilesCount, schema: 20200807152315 do
+ let(:merge_request_diff_files) { table(:merge_request_diff_files) }
+ let(:merge_request_diffs) { table(:merge_request_diffs) }
+ let(:merge_requests) { table(:merge_requests) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+
+ let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
+ let(:project) { projects.create!(namespace_id: namespace.id) }
+ let(:merge_request) { merge_requests.create!(source_branch: 'x', target_branch: 'master', target_project_id: project.id) }
+
+ it 'fills the files_count column' do
+ empty_diff = merge_request_diffs.create!(merge_request_id: merge_request.id)
+ filled_diff = merge_request_diffs.create!(merge_request_id: merge_request.id)
+
+ 3.times do |n|
+ merge_request_diff_files.create!(
+ merge_request_diff_id: filled_diff.id,
+ relative_order: n,
+ new_file: false,
+ renamed_file: false,
+ deleted_file: false,
+ too_large: false,
+ a_mode: '',
+ b_mode: '',
+ old_path: '',
+ new_path: ''
+ )
+ end
+
+ described_class.new.perform(empty_diff.id, filled_diff.id)
+
+ expect(empty_diff.reload.files_count).to eq(0)
+ expect(filled_diff.reload.files_count).to eq(3)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/set_null_external_diff_store_to_local_value_spec.rb b/spec/lib/gitlab/background_migration/set_null_external_diff_store_to_local_value_spec.rb
new file mode 100644
index 00000000000..6079ad2dd2a
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/set_null_external_diff_store_to_local_value_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# The test setup must begin before
+# 20200804041930_add_not_null_constraint_on_external_diff_store_to_merge_request_diffs.rb
+# has run, or else we cannot insert a row with `NULL` `external_diff_store` to
+# test against.
+RSpec.describe Gitlab::BackgroundMigration::SetNullExternalDiffStoreToLocalValue, schema: 20200804035230 do
+ let!(:merge_request_diffs) { table(:merge_request_diffs) }
+ let!(:merge_requests) { table(:merge_requests) }
+ let!(:namespaces) { table(:namespaces) }
+ let!(:projects) { table(:projects) }
+ let!(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
+ let!(:project) { projects.create!(namespace_id: namespace.id) }
+ let!(:merge_request) { merge_requests.create!(source_branch: 'x', target_branch: 'master', target_project_id: project.id) }
+
+ it 'correctly migrates nil external_diff_store to 1' do
+ external_diff_store_1 = merge_request_diffs.create!(external_diff_store: 1, merge_request_id: merge_request.id)
+ external_diff_store_2 = merge_request_diffs.create!(external_diff_store: 2, merge_request_id: merge_request.id)
+ external_diff_store_nil = merge_request_diffs.create!(external_diff_store: nil, merge_request_id: merge_request.id)
+
+ described_class.new.perform(external_diff_store_1.id, external_diff_store_nil.id)
+
+ external_diff_store_1.reload
+ external_diff_store_2.reload
+ external_diff_store_nil.reload
+
+ expect(external_diff_store_1.external_diff_store).to eq(1) # unchanged
+ expect(external_diff_store_2.external_diff_store).to eq(2) # unchanged
+ expect(external_diff_store_nil.external_diff_store).to eq(1) # nil => 1
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/set_null_package_files_file_store_to_local_value_spec.rb b/spec/lib/gitlab/background_migration/set_null_package_files_file_store_to_local_value_spec.rb
new file mode 100644
index 00000000000..40d41262fc7
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/set_null_package_files_file_store_to_local_value_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# The test setup must begin before
+# 20200806004742_add_not_null_constraint_on_file_store_to_package_files.rb
+# has run, or else we cannot insert a row with `NULL` `file_store` to
+# test against.
+RSpec.describe Gitlab::BackgroundMigration::SetNullPackageFilesFileStoreToLocalValue, schema: 20200806004232 do
+ let!(:packages_package_files) { table(:packages_package_files) }
+ let!(:packages_packages) { table(:packages_packages) }
+ let!(:projects) { table(:projects) }
+ let!(:namespaces) { table(:namespaces) }
+ let!(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
+ let!(:project) { projects.create!(namespace_id: namespace.id) }
+ let!(:package) { packages_packages.create!(project_id: project.id, name: 'bar', package_type: 1) }
+
+ it 'correctly migrates nil file_store to 1' do
+ file_store_1 = packages_package_files.create!(file_store: 1, file_name: 'foo_1', file: 'foo_1', package_id: package.id)
+ file_store_2 = packages_package_files.create!(file_store: 2, file_name: 'foo_2', file: 'foo_2', package_id: package.id)
+ file_store_nil = packages_package_files.create!(file_store: nil, file_name: 'foo_nil', file: 'foo_nil', package_id: package.id)
+
+ described_class.new.perform(file_store_1.id, file_store_nil.id)
+
+ file_store_1.reload
+ file_store_2.reload
+ file_store_nil.reload
+
+ expect(file_store_1.file_store).to eq(1) # unchanged
+ expect(file_store_2.file_store).to eq(2) # unchanged
+ expect(file_store_nil.file_store).to eq(1) # nil => 1
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb b/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb
index 08a4bbe38ac..392b44d1a1f 100644
--- a/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb
+++ b/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb
@@ -75,6 +75,14 @@ RSpec.describe Gitlab::BackgroundMigration::UserMentions::CreateResourceUserMent
let(:resource) { merge_request }
it_behaves_like 'resource mentions migration', MigrateMergeRequestMentionsToDb, MergeRequest
+
+ context 'when FF disabled' do
+ before do
+ stub_feature_flags(migrate_user_mentions: false)
+ end
+
+ it_behaves_like 'resource migration not run', MigrateMergeRequestMentionsToDb, MergeRequest
+ end
end
context 'migrate commit mentions' do
@@ -96,6 +104,14 @@ RSpec.describe Gitlab::BackgroundMigration::UserMentions::CreateResourceUserMent
let(:resource) { commit }
it_behaves_like 'resource notes mentions migration', MigrateCommitNotesMentionsToDb, Commit
+
+ context 'when FF disabled' do
+ before do
+ stub_feature_flags(migrate_user_mentions: false)
+ end
+
+ it_behaves_like 'resource notes migration not run', MigrateCommitNotesMentionsToDb, Commit
+ end
end
end
diff --git a/spec/lib/gitlab/background_migration_spec.rb b/spec/lib/gitlab/background_migration_spec.rb
index b110fa484ff..052a01a8dd8 100644
--- a/spec/lib/gitlab/background_migration_spec.rb
+++ b/spec/lib/gitlab/background_migration_spec.rb
@@ -130,6 +130,7 @@ RSpec.describe Gitlab::BackgroundMigration do
let(:retry_queue) do
[double(args: ['Object', [3]], queue: described_class.queue, delete: true)]
end
+
let(:dead_queue) do
[double(args: ['Object', [4]], queue: described_class.queue, delete: true)]
end
diff --git a/spec/lib/gitlab/badge/coverage/report_spec.rb b/spec/lib/gitlab/badge/coverage/report_spec.rb
index 9c4dfcbfd54..4a9508712a4 100644
--- a/spec/lib/gitlab/badge/coverage/report_spec.rb
+++ b/spec/lib/gitlab/badge/coverage/report_spec.rb
@@ -102,7 +102,7 @@ RSpec.describe Gitlab::Badge::Coverage::Report do
create(:ci_pipeline, opts).tap do |pipeline|
yield pipeline
- pipeline.update_legacy_status
+ ::Ci::ProcessPipelineService.new(pipeline).execute
end
end
end
diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
index 08b7bafddf0..d4483bf1754 100644
--- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
@@ -87,6 +87,7 @@ RSpec.describe Gitlab::BitbucketImport::Importer do
values: sample_issues_statuses
}
end
+
let(:counter) { double('counter', increment: true) }
subject { described_class.new(project) }
@@ -109,6 +110,7 @@ RSpec.describe Gitlab::BitbucketImport::Importer do
created_at: Time.now,
updated_at: Time.now)
end
+
let(:author_line) { "*Created by: someuser*\n\n" }
before do
diff --git a/spec/lib/gitlab/build_access_spec.rb b/spec/lib/gitlab/build_access_spec.rb
index c6248f94772..4a1c172a975 100644
--- a/spec/lib/gitlab/build_access_spec.rb
+++ b/spec/lib/gitlab/build_access_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::BuildAccess do
let(:project) { create(:project) }
describe '#can_do_action' do
- subject { described_class.new(user, project: project).can_do_action?(:download_code) }
+ subject { described_class.new(user, container: project).can_do_action?(:download_code) }
context 'when the user can do an action on the project but cannot access git' do
before do
diff --git a/spec/lib/gitlab/checks/change_access_spec.rb b/spec/lib/gitlab/checks/change_access_spec.rb
index 87936d19239..6f82dabb285 100644
--- a/spec/lib/gitlab/checks/change_access_spec.rb
+++ b/spec/lib/gitlab/checks/change_access_spec.rb
@@ -3,14 +3,14 @@
require 'spec_helper'
RSpec.describe Gitlab::Checks::ChangeAccess do
- describe '#exec' do
+ describe '#validate!' do
include_context 'change access checks context'
subject { change_access }
context 'without failed checks' do
it "doesn't raise an error" do
- expect { subject.exec }.not_to raise_error
+ expect { subject.validate! }.not_to raise_error
end
it 'calls pushes checks' do
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::Checks::ChangeAccess do
expect(instance).to receive(:validate!)
end
- subject.exec
+ subject.validate!
end
it 'calls branches checks' do
@@ -26,7 +26,7 @@ RSpec.describe Gitlab::Checks::ChangeAccess do
expect(instance).to receive(:validate!)
end
- subject.exec
+ subject.validate!
end
it 'calls tags checks' do
@@ -34,7 +34,7 @@ RSpec.describe Gitlab::Checks::ChangeAccess do
expect(instance).to receive(:validate!)
end
- subject.exec
+ subject.validate!
end
it 'calls lfs checks' do
@@ -42,7 +42,7 @@ RSpec.describe Gitlab::Checks::ChangeAccess do
expect(instance).to receive(:validate!)
end
- subject.exec
+ subject.validate!
end
it 'calls diff checks' do
@@ -50,7 +50,7 @@ RSpec.describe Gitlab::Checks::ChangeAccess do
expect(instance).to receive(:validate!)
end
- subject.exec
+ subject.validate!
end
end
@@ -63,7 +63,7 @@ RSpec.describe Gitlab::Checks::ChangeAccess do
protocol: protocol,
logger: logger)
- expect { access.exec }.to raise_error(Gitlab::Checks::TimedLogger::TimeoutError)
+ expect { access.validate! }.to raise_error(Gitlab::Checks::TimedLogger::TimeoutError)
end
end
end
diff --git a/spec/lib/gitlab/ci/ansi2html_spec.rb b/spec/lib/gitlab/ci/ansi2html_spec.rb
index f29a39e4e66..bf1f2bae7da 100644
--- a/spec/lib/gitlab/ci/ansi2html_spec.rb
+++ b/spec/lib/gitlab/ci/ansi2html_spec.rb
@@ -213,6 +213,7 @@ RSpec.describe Gitlab::Ci::Ansi2html do
" data-timestamp=\"#{section_start_time.to_i}\" data-section=\"#{class_name(section_name)}\"" \
' role="button"></div>'
end
+
let(:section_end_html) do
"<div class=\"section-end\" data-section=\"#{class_name(section_name)}\"></div>"
end
diff --git a/spec/lib/gitlab/ci/build/artifacts/expire_in_parser_spec.rb b/spec/lib/gitlab/ci/build/artifacts/expire_in_parser_spec.rb
new file mode 100644
index 00000000000..0e26a9fa571
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/artifacts/expire_in_parser_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Build::Artifacts::ExpireInParser do
+ describe '.validate_duration' do
+ subject { described_class.validate_duration(value) }
+
+ context 'with never' do
+ let(:value) { 'never' }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'with never value camelized' do
+ let(:value) { 'Never' }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'with a duration' do
+ let(:value) { '1 Day' }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'without a duration' do
+ let(:value) { 'something' }
+
+ it { is_expected.to be_falsy }
+ end
+ end
+
+ describe '#seconds_from_now' do
+ subject { described_class.new(value).seconds_from_now }
+
+ context 'with never' do
+ let(:value) { 'never' }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'with an empty string' do
+ let(:value) { '' }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'with a duration' do
+ let(:value) { '1 day' }
+
+ it { is_expected.to be_like_time(1.day.from_now) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/auto_retry_spec.rb b/spec/lib/gitlab/ci/build/auto_retry_spec.rb
new file mode 100644
index 00000000000..cfa8c9cd938
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/auto_retry_spec.rb
@@ -0,0 +1,127 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Build::AutoRetry do
+ let(:auto_retry) { described_class.new(build) }
+
+ describe '#allowed?' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:build) { create(:ci_build) }
+
+ subject { auto_retry.allowed? }
+
+ where(:description, :retry_count, :options, :failure_reason, :result) do
+ "retries are disabled" | 0 | { max: 0 } | nil | false
+ "max equals count" | 2 | { max: 2 } | nil | false
+ "max is higher than count" | 1 | { max: 2 } | nil | true
+ "max is a string" | 1 | { max: '2' } | nil | true
+ "matching failure reason" | 0 | { when: %w[api_failure], max: 2 } | :api_failure | true
+ "not matching with always" | 0 | { when: %w[always], max: 2 } | :api_failure | true
+ "not matching reason" | 0 | { when: %w[script_error], max: 2 } | :api_failure | false
+ "scheduler failure override" | 1 | { when: %w[scheduler_failure], max: 1 } | :scheduler_failure | false
+ "default for scheduler failure" | 1 | {} | :scheduler_failure | true
+ end
+
+ with_them do
+ before do
+ allow(build).to receive(:retries_count) { retry_count }
+
+ build.options[:retry] = options
+ build.failure_reason = failure_reason
+ allow(build).to receive(:retryable?).and_return(true)
+ end
+
+ it { is_expected.to eq(result) }
+ end
+
+ context 'when build is not retryable' do
+ before do
+ allow(build).to receive(:retryable?).and_return(false)
+ end
+
+ specify { expect(subject).to eq(false) }
+ end
+ end
+
+ describe '#options_retry_max' do
+ subject(:result) { auto_retry.send(:options_retry_max) }
+
+ context 'with retries max config option' do
+ let(:build) { create(:ci_build, options: { retry: { max: 1 } }) }
+
+ context 'when build_metadata_config is set' do
+ before do
+ stub_feature_flags(ci_build_metadata_config: true)
+ end
+
+ it 'returns the number of configured max retries' do
+ expect(result).to eq 1
+ end
+ end
+
+ context 'when build_metadata_config is not set' do
+ before do
+ stub_feature_flags(ci_build_metadata_config: false)
+ end
+
+ it 'returns the number of configured max retries' do
+ expect(result).to eq 1
+ end
+ end
+ end
+
+ context 'without retries max config option' do
+ let(:build) { create(:ci_build) }
+
+ it 'returns nil' do
+ expect(result).to be_nil
+ end
+ end
+
+ context 'when build is degenerated' do
+ let(:build) { create(:ci_build, :degenerated) }
+
+ it 'returns nil' do
+ expect(result).to be_nil
+ end
+ end
+
+ context 'with integer only config option' do
+ let(:build) { create(:ci_build, options: { retry: 1 }) }
+
+ it 'returns the number of configured max retries' do
+ expect(result).to eq 1
+ end
+ end
+ end
+
+ describe '#options_retry_when' do
+ subject(:result) { auto_retry.send(:options_retry_when) }
+
+ context 'with retries when config option' do
+ let(:build) { create(:ci_build, options: { retry: { when: ['some_reason'] } }) }
+
+ it 'returns the configured when' do
+ expect(result).to eq ['some_reason']
+ end
+ end
+
+ context 'without retries when config option' do
+ let(:build) { create(:ci_build) }
+
+ it 'returns always array' do
+ expect(result).to eq ['always']
+ end
+ end
+
+ context 'with integer only config option' do
+ let(:build) { create(:ci_build, options: { retry: 1 }) }
+
+ it 'returns always array' do
+ expect(result).to eq ['always']
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index 180c52ee1ab..ca02eaee0a0 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
%i[before_script script stage type after_script cache
image services only except rules needs variables artifacts
environment coverage retry interruptible timeout release tags
- inherit]
+ inherit parallel]
end
it { is_expected.to include(*result) }
@@ -73,6 +73,45 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
it { is_expected.to be_falsey }
end
+
+ context 'when config does not contain script' do
+ let(:name) { :build }
+
+ let(:config) do
+ { before_script: "cd ${PROJ_DIR} " }
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when using the default job without script' do
+ let(:name) { :default }
+ let(:config) do
+ { before_script: "cd ${PROJ_DIR} " }
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when using the default job with script' do
+ let(:name) { :default }
+ let(:config) do
+ {
+ before_script: "cd ${PROJ_DIR} ",
+ script: "ls"
+ }
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'there are no shared keys between jobs and bridges' do
+ subject(:shared_values) do
+ described_class::ALLOWED_KEYS & Gitlab::Ci::Config::Entry::Bridge::ALLOWED_KEYS
+ end
+
+ it { is_expected.to be_empty }
+ end
end
describe 'validations' do
@@ -202,56 +241,47 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
context 'when parallel value is not correct' do
context 'when it is not a numeric value' do
- let(:config) { { parallel: true } }
+ let(:config) { { script: 'echo', parallel: true } }
it 'returns error about invalid type' do
expect(entry).not_to be_valid
- expect(entry.errors).to include 'job parallel is not a number'
+ expect(entry.errors).to include 'parallel should be an integer or a hash'
end
end
context 'when it is lower than two' do
- let(:config) { { parallel: 1 } }
+ let(:config) { { script: 'echo', parallel: 1 } }
it 'returns error about value too low' do
expect(entry).not_to be_valid
expect(entry.errors)
- .to include 'job parallel must be greater than or equal to 2'
+ .to include 'parallel config must be greater than or equal to 2'
end
end
- context 'when it is bigger than 50' do
- let(:config) { { parallel: 51 } }
+ context 'when it is an empty hash' do
+ let(:config) { { script: 'echo', parallel: {} } }
- it 'returns error about value too high' do
+ it 'returns error about missing matrix' do
expect(entry).not_to be_valid
expect(entry.errors)
- .to include 'job parallel must be less than or equal to 50'
+ .to include 'parallel config missing required keys: matrix'
end
end
+ end
- context 'when it is not an integer' do
- let(:config) { { parallel: 1.5 } }
-
- it 'returns error about wrong value' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include 'job parallel must be an integer'
- end
+ context 'when it uses both "when:" and "rules:"' do
+ let(:config) do
+ {
+ script: 'echo',
+ when: 'on_failure',
+ rules: [{ if: '$VARIABLE', when: 'on_success' }]
+ }
end
- context 'when it uses both "when:" and "rules:"' do
- let(:config) do
- {
- script: 'echo',
- when: 'on_failure',
- rules: [{ if: '$VARIABLE', when: 'on_success' }]
- }
- end
-
- it 'returns an error about when: being combined with rules' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include 'job config key may not be used with `rules`: when'
- end
+ it 'returns an error about when: being combined with rules' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include 'job config key may not be used with `rules`: when'
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/processable_spec.rb b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
index fdf6008f89f..ac8dd2a3267 100644
--- a/spec/lib/gitlab/ci/config/entry/processable_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
@@ -230,6 +230,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
end
end
+ shared_examples 'has no warnings' do
+ it 'does not raise the warning' do
+ expect(entry.warnings).to be_empty
+ end
+ end
+
context 'when workflow rules is used' do
let(:workflow) { double('workflow', 'has_rules?' => true) }
@@ -254,6 +260,86 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
end
end
+ context 'when workflow rules is not used' do
+ let(:workflow) { double('workflow', 'has_rules?' => false) }
+ let(:feature_flag_value) { true }
+
+ before do
+ stub_feature_flags(ci_raise_job_rules_without_workflow_rules_warning: feature_flag_value)
+ entry.compose!(deps)
+ end
+
+ context 'when rules are valid' do
+ let(:config) do
+ {
+ script: 'ls',
+ rules: [
+ { if: '$CI_COMMIT_BRANCH', when: 'on_success' },
+ last_rule
+ ]
+ }
+ end
+
+ context 'when last rule contains only `when`' do
+ let(:last_rule) { { when: when_value } }
+
+ context 'and its value is not `never`' do
+ let(:when_value) { 'on_success' }
+
+ it 'raises a warning' do
+ expect(entry.warnings).to contain_exactly(/may allow multiple pipelines/)
+ end
+
+ context 'when feature flag is disabled' do
+ let(:feature_flag_value) { false }
+
+ it_behaves_like 'has no warnings'
+ end
+ end
+
+ context 'and its value is `never`' do
+ let(:when_value) { 'never' }
+
+ it_behaves_like 'has no warnings'
+ end
+ end
+
+ context 'when last rule does not contain only `when`' do
+ let(:last_rule) { { if: '$CI_MERGE_REQUEST_ID', when: 'always' } }
+
+ it_behaves_like 'has no warnings'
+ end
+ end
+
+ context 'when rules are invalid' do
+ let(:config) { { script: 'ls', rules: { when: 'always' } } }
+
+ it_behaves_like 'has no warnings'
+ end
+ end
+
+ context 'when workflow rules is used' do
+ let(:workflow) { double('workflow', 'has_rules?' => true) }
+
+ before do
+ entry.compose!(deps)
+ end
+
+ context 'when last rule contains only `when' do
+ let(:config) do
+ {
+ script: 'ls',
+ rules: [
+ { if: '$CI_COMMIT_BRANCH', when: 'on_success' },
+ { when: 'always' }
+ ]
+ }
+ end
+
+ it_behaves_like 'has no warnings'
+ end
+ end
+
context 'with inheritance' do
context 'of variables' do
let(:config) do
diff --git a/spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb b/spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb
new file mode 100644
index 00000000000..39697884e3b
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb
@@ -0,0 +1,188 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_dependency 'active_model'
+
+RSpec.describe ::Gitlab::Ci::Config::Entry::Product::Matrix do
+ subject(:matrix) { described_class.new(config) }
+
+ describe 'validations' do
+ before do
+ matrix.compose!
+ end
+
+ context 'when entry config value is correct' do
+ let(:config) do
+ [
+ { 'VAR_1' => [1, 2, 3], 'VAR_2' => [4, 5, 6] },
+ { 'VAR_3' => %w[a b], 'VAR_4' => %w[c d] }
+ ]
+ end
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+ end
+
+ context 'when entry config generates too many jobs' do
+ let(:config) do
+ [
+ {
+ 'VAR_1' => (1..10).to_a,
+ 'VAR_2' => (11..20).to_a
+ }
+ ]
+ end
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns error about too many jobs' do
+ expect(matrix.errors)
+ .to include('matrix config generates too many jobs (maximum is 50)')
+ end
+ end
+ end
+
+ context 'when entry config has only one variable' do
+ let(:config) do
+ [
+ {
+ 'VAR_1' => %w[test]
+ }
+ ]
+ end
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns error about too many jobs' do
+ expect(matrix.errors)
+ .to include('variables config requires at least 2 items')
+ end
+ end
+
+ describe '#value' do
+ before do
+ matrix.compose!
+ end
+
+ it 'returns the value without raising an error' do
+ expect(matrix.value).to eq([{ 'VAR_1' => ['test'] }])
+ end
+ end
+ end
+
+ context 'when config value has wrong type' do
+ let(:config) { {} }
+
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns error about incorrect type' do
+ expect(matrix.errors)
+ .to include('matrix config should be an array of hashes')
+ end
+ end
+ end
+ end
+
+ describe '.compose!' do
+ context 'when valid job entries composed' do
+ let(:config) do
+ [
+ { PROVIDER: 'aws', STACK: %w[monitoring app1 app2] },
+ { STACK: %w[monitoring backup app], PROVIDER: 'ovh' },
+ { PROVIDER: 'gcp', STACK: %w[data processing], ARGS: 'normal' },
+ { PROVIDER: 'vultr', STACK: 'data', ARGS: 'store' }
+ ]
+ end
+
+ before do
+ matrix.compose!
+ end
+
+ describe '#value' do
+ it 'returns key value' do
+ expect(matrix.value).to match(
+ [
+ { 'PROVIDER' => %w[aws], 'STACK' => %w[monitoring app1 app2] },
+ { 'PROVIDER' => %w[ovh], 'STACK' => %w[monitoring backup app] },
+ { 'ARGS' => %w[normal], 'PROVIDER' => %w[gcp], 'STACK' => %w[data processing] },
+ { 'ARGS' => %w[store], 'PROVIDER' => %w[vultr], 'STACK' => %w[data] }
+ ]
+ )
+ end
+ end
+
+ describe '#descendants' do
+ it 'creates valid descendant nodes' do
+ expect(matrix.descendants.count).to eq(config.size)
+ expect(matrix.descendants)
+ .to all(be_an_instance_of(::Gitlab::Ci::Config::Entry::Product::Variables))
+ end
+ end
+ end
+
+ context 'with empty config' do
+ let(:config) { [] }
+
+ before do
+ matrix.compose!
+ end
+
+ describe '#value' do
+ it 'returns empty value' do
+ expect(matrix.value).to eq([])
+ end
+ end
+ end
+ end
+
+ describe '#number_of_generated_jobs' do
+ before do
+ matrix.compose!
+ end
+
+ subject { matrix.number_of_generated_jobs }
+
+ context 'with empty config' do
+ let(:config) { [] }
+
+ it { is_expected.to be_zero }
+ end
+
+ context 'with only one variable' do
+ let(:config) do
+ [{ 'VAR_1' => (1..10).to_a }]
+ end
+
+ it { is_expected.to eq(10) }
+ end
+
+ context 'with two variables' do
+ let(:config) do
+ [{ 'VAR_1' => (1..10).to_a, 'VAR_2' => (1..5).to_a }]
+ end
+
+ it { is_expected.to eq(50) }
+ end
+
+ context 'with two sets of variables' do
+ let(:config) do
+ [
+ { 'VAR_1' => (1..10).to_a, 'VAR_2' => (1..5).to_a },
+ { 'VAR_3' => (1..2).to_a, 'VAR_4' => (1..3).to_a }
+ ]
+ end
+
+ it { is_expected.to eq(56) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/product/parallel_spec.rb b/spec/lib/gitlab/ci/config/entry/product/parallel_spec.rb
new file mode 100644
index 00000000000..bc09e20d748
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/product/parallel_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_dependency 'active_model'
+
+RSpec.describe ::Gitlab::Ci::Config::Entry::Product::Parallel do
+ subject(:parallel) { described_class.new(config) }
+
+ context 'with invalid config' do
+ shared_examples 'invalid config' do |error_message|
+ describe '#valid?' do
+ it { is_expected.not_to be_valid }
+ end
+
+ describe '#errors' do
+ it 'returns error about invalid type' do
+ expect(parallel.errors).to match(a_collection_including(error_message))
+ end
+ end
+ end
+
+ context 'when it is not a numeric value' do
+ let(:config) { true }
+
+ it_behaves_like 'invalid config', /should be an integer or a hash/
+ end
+
+ context 'when it is lower than two' do
+ let(:config) { 1 }
+
+ it_behaves_like 'invalid config', /must be greater than or equal to 2/
+ end
+
+ context 'when it is bigger than 50' do
+ let(:config) { 51 }
+
+ it_behaves_like 'invalid config', /must be less than or equal to 50/
+ end
+
+ context 'when it is not an integer' do
+ let(:config) { 1.5 }
+
+ it_behaves_like 'invalid config', /must be an integer/
+ end
+
+ context 'with empty hash config' do
+ let(:config) { {} }
+
+ it_behaves_like 'invalid config', /matrix builds config missing required keys: matrix/
+ end
+ end
+
+ context 'with numeric config' do
+ context 'when job is specified' do
+ let(:config) { 2 }
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+
+ describe '#value' do
+ it 'returns job needs configuration' do
+ expect(parallel.value).to match(number: config)
+ end
+ end
+ end
+ end
+
+ context 'with matrix builds config' do
+ context 'when matrix is specified' do
+ let(:config) do
+ {
+ matrix: [
+ { PROVIDER: 'aws', STACK: %w[monitoring app1 app2] },
+ { PROVIDER: 'gcp', STACK: %w[data processing] }
+ ]
+ }
+ end
+
+ describe '#valid?' do
+ it { is_expected.to be_valid }
+ end
+
+ describe '#value' do
+ it 'returns job needs configuration' do
+ expect(parallel.value).to match(matrix: [
+ { PROVIDER: 'aws', STACK: %w[monitoring app1 app2] },
+ { PROVIDER: 'gcp', STACK: %w[data processing] }
+ ])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/product/variables_spec.rb b/spec/lib/gitlab/ci/config/entry/product/variables_spec.rb
new file mode 100644
index 00000000000..230b001d620
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/product/variables_spec.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_dependency 'active_model'
+
+RSpec.describe Gitlab::Ci::Config::Entry::Product::Variables do
+ let(:entry) { described_class.new(config) }
+
+ describe 'validations' do
+ context 'when entry config value is correct' do
+ let(:config) do
+ {
+ 'VARIABLE_1' => 1,
+ 'VARIABLE_2' => 'value 2',
+ 'VARIABLE_3' => :value_3,
+ :VARIABLE_4 => 'value 4',
+ 5 => ['value 5'],
+ 'VARIABLE_6' => ['value 6']
+ }
+ end
+
+ describe '#value' do
+ it 'returns hash with key value strings' do
+ expect(entry.value).to match({
+ 'VARIABLE_1' => ['1'],
+ 'VARIABLE_2' => ['value 2'],
+ 'VARIABLE_3' => ['value_3'],
+ 'VARIABLE_4' => ['value 4'],
+ '5' => ['value 5'],
+ 'VARIABLE_6' => ['value 6']
+ })
+ end
+ end
+
+ describe '#errors' do
+ it 'does not append errors' do
+ expect(entry.errors).to be_empty
+ end
+ end
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+
+ context 'when entry value is not correct' do
+ shared_examples 'invalid variables' do |message|
+ describe '#errors' do
+ it 'saves errors' do
+ expect(entry.errors).to include(message)
+ end
+ end
+
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(entry).not_to be_valid
+ end
+ end
+ end
+
+ context 'with array' do
+ let(:config) { [:VAR, 'test'] }
+
+ it_behaves_like 'invalid variables', /should be a hash of key value pairs/
+ end
+
+ context 'with empty array' do
+ let(:config) { { VAR: 'test', VAR2: [] } }
+
+ it_behaves_like 'invalid variables', /should be a hash of key value pairs/
+ end
+
+ context 'with nested array' do
+ let(:config) { { VAR: 'test', VAR2: [1, [2]] } }
+
+ it_behaves_like 'invalid variables', /should be a hash of key value pairs/
+ end
+
+ context 'with only one variable' do
+ let(:config) { { VAR: 'test' } }
+
+ it_behaves_like 'invalid variables', /variables config requires at least 2 items/
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/service_spec.rb b/spec/lib/gitlab/ci/config/entry/service_spec.rb
index 9fbc14c19b9..ec137ef2ae4 100644
--- a/spec/lib/gitlab/ci/config/entry/service_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/service_spec.rb
@@ -95,6 +95,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Service do
let(:config) do
{ name: 'postgresql:9.5', alias: 'db', command: %w(cmd run), entrypoint: %w(/bin/sh run), ports: ports }
end
+
let(:entry) { described_class.new(config, { with_image_ports: image_ports }) }
let(:image_ports) { false }
diff --git a/spec/lib/gitlab/ci/config/external/file/local_spec.rb b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
index 993a07568de..fdd29afe2d6 100644
--- a/spec/lib/gitlab/ci/config/external/file/local_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
@@ -92,6 +92,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local do
- bundle install --jobs $(nproc) "${FLAGS[@]}"
HEREDOC
end
+
let(:location) { '/lib/gitlab/ci/templates/existent-file.yml' }
before do
diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb
index b2cf36b2597..9786e050399 100644
--- a/spec/lib/gitlab/ci/config/external/processor_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb
@@ -128,6 +128,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
remote_file
]
end
+
let(:values) do
{
include: external_files,
diff --git a/spec/lib/gitlab/ci/config/normalizer/factory_spec.rb b/spec/lib/gitlab/ci/config/normalizer/factory_spec.rb
new file mode 100644
index 00000000000..e355740222f
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/normalizer/factory_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Normalizer::Factory do
+ describe '#create' do
+ context 'when no strategy applies' do
+ subject(:subject) { described_class.new(nil, nil).create } # rubocop:disable Rails/SaveBang
+
+ it { is_expected.to be_empty }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/normalizer/matrix_strategy_spec.rb b/spec/lib/gitlab/ci/config/normalizer/matrix_strategy_spec.rb
new file mode 100644
index 00000000000..bab604c4504
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/normalizer/matrix_strategy_spec.rb
@@ -0,0 +1,102 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Normalizer::MatrixStrategy do
+ describe '.applies_to?' do
+ subject { described_class.applies_to?(config) }
+
+ context 'with hash that has :matrix key' do
+ let(:config) { { matrix: [] } }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'with hash that does not have :matrix key' do
+ let(:config) { { number: [] } }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'with a number' do
+ let(:config) { 5 }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '.build_from' do
+ subject { described_class.build_from('test', config) }
+
+ let(:config) do
+ {
+ matrix: [
+ { 'PROVIDER' => %w[aws], 'STACK' => %w[app1 app2] },
+ { 'PROVIDER' => %w[ovh gcp], 'STACK' => %w[app] }
+ ]
+ }
+ end
+
+ it { expect(subject.size).to eq(4) }
+
+ it 'has attributes' do
+ expect(subject.map(&:attributes)).to match_array(
+ [
+ {
+ name: 'test 1/4',
+ instance: 1,
+ parallel: { total: 4 },
+ variables: {
+ 'PROVIDER' => 'aws',
+ 'STACK' => 'app1'
+ }
+ },
+ {
+ name: 'test 2/4',
+ instance: 2,
+ parallel: { total: 4 },
+ variables: {
+ 'PROVIDER' => 'aws',
+ 'STACK' => 'app2'
+ }
+ },
+ {
+ name: 'test 3/4',
+ instance: 3,
+ parallel: { total: 4 },
+ variables: {
+ 'PROVIDER' => 'ovh',
+ 'STACK' => 'app'
+ }
+ },
+ {
+ name: 'test 4/4',
+ instance: 4,
+ parallel: { total: 4 },
+ variables: {
+ 'PROVIDER' => 'gcp',
+ 'STACK' => 'app'
+ }
+ }
+ ]
+ )
+ end
+
+ it 'has parallelized name' do
+ expect(subject.map(&:name)).to match_array(
+ ['test 1/4', 'test 2/4', 'test 3/4', 'test 4/4']
+ )
+ end
+
+ it 'has details' do
+ expect(subject.map(&:name_with_details)).to match_array(
+ [
+ 'test (PROVIDER=aws; STACK=app1)',
+ 'test (PROVIDER=aws; STACK=app2)',
+ 'test (PROVIDER=gcp; STACK=app)',
+ 'test (PROVIDER=ovh; STACK=app)'
+ ]
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/normalizer/number_strategy_spec.rb b/spec/lib/gitlab/ci/config/normalizer/number_strategy_spec.rb
new file mode 100644
index 00000000000..06f47fe11c6
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/normalizer/number_strategy_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Normalizer::NumberStrategy do
+ describe '.applies_to?' do
+ subject { described_class.applies_to?(config) }
+
+ context 'with numbers' do
+ let(:config) { 5 }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'with hash that has :number key' do
+ let(:config) { { number: 5 } }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'with a float number' do
+ let(:config) { 5.5 }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'with hash that does not have :number key' do
+ let(:config) { { matrix: 5 } }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '.build_from' do
+ subject { described_class.build_from('test', config) }
+
+ shared_examples 'parallelized job' do
+ it { expect(subject.size).to eq(3) }
+
+ it 'has attributes' do
+ expect(subject.map(&:attributes)).to match_array(
+ [
+ { name: 'test 1/3', instance: 1, parallel: { total: 3 } },
+ { name: 'test 2/3', instance: 2, parallel: { total: 3 } },
+ { name: 'test 3/3', instance: 3, parallel: { total: 3 } }
+ ]
+ )
+ end
+
+ it 'has parallelized name' do
+ expect(subject.map(&:name)).to match_array(
+ ['test 1/3', 'test 2/3', 'test 3/3'])
+ end
+ end
+
+ context 'with numbers' do
+ let(:config) { 3 }
+
+ it_behaves_like 'parallelized job'
+ end
+
+ context 'with hash that has :number key' do
+ let(:config) { { number: 3 } }
+
+ it_behaves_like 'parallelized job'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/normalizer_spec.rb b/spec/lib/gitlab/ci/config/normalizer_spec.rb
index d3d165ba00f..949af8cdc4c 100644
--- a/spec/lib/gitlab/ci/config/normalizer_spec.rb
+++ b/spec/lib/gitlab/ci/config/normalizer_spec.rb
@@ -4,66 +4,13 @@ require 'fast_spec_helper'
RSpec.describe Gitlab::Ci::Config::Normalizer do
let(:job_name) { :rspec }
- let(:job_config) { { script: 'rspec', parallel: 5, name: 'rspec' } }
+ let(:job_config) { { script: 'rspec', parallel: parallel_config, name: 'rspec', variables: variables_config } }
let(:config) { { job_name => job_config } }
- let(:expanded_job_names) do
- [
- "rspec 1/5",
- "rspec 2/5",
- "rspec 3/5",
- "rspec 4/5",
- "rspec 5/5"
- ]
- end
-
describe '.normalize_jobs' do
subject { described_class.new(config).normalize_jobs }
- it 'does not have original job' do
- is_expected.not_to include(job_name)
- end
-
- it 'has parallelized jobs' do
- is_expected.to include(*expanded_job_names.map(&:to_sym))
- end
-
- it 'sets job instance in options' do
- expect(subject.values).to all(include(:instance))
- end
-
- it 'parallelizes jobs with original config' do
- original_config = config[job_name].except(:name)
- configs = subject.values.map { |config| config.except(:name, :instance) }
-
- expect(configs).to all(eq(original_config))
- end
-
- context 'when the job is not parallelized' do
- let(:job_config) { { script: 'rspec', name: 'rspec' } }
-
- it 'returns the same hash' do
- is_expected.to eq(config)
- end
- end
-
- context 'when there is a job with a slash in it' do
- let(:job_name) { :"rspec 35/2" }
-
- it 'properly parallelizes job names' do
- job_names = [
- :"rspec 35/2 1/5",
- :"rspec 35/2 2/5",
- :"rspec 35/2 3/5",
- :"rspec 35/2 4/5",
- :"rspec 35/2 5/5"
- ]
-
- is_expected.to include(*job_names)
- end
- end
-
- context 'for dependencies' do
+ shared_examples 'parallel dependencies' do
context "when job has dependencies on parallelized jobs" do
let(:config) do
{
@@ -91,9 +38,7 @@ RSpec.describe Gitlab::Ci::Config::Normalizer do
end
it "parallelizes dependencies" do
- job_names = ["rspec 1/5", "rspec 2/5", "rspec 3/5", "rspec 4/5", "rspec 5/5"]
-
- expect(subject[:final_job][:dependencies]).to include(*job_names)
+ expect(subject[:final_job][:dependencies]).to include(*expanded_job_names)
end
it "includes the regular job in dependencies" do
@@ -102,14 +47,14 @@ RSpec.describe Gitlab::Ci::Config::Normalizer do
end
end
- context 'for needs' do
+ shared_examples 'parallel needs' do
let(:expanded_job_attributes) do
expanded_job_names.map do |job_name|
{ name: job_name, extra: :key }
end
end
- context "when job has needs on parallelized jobs" do
+ context 'when job has needs on parallelized jobs' do
let(:config) do
{
job_name => job_config,
@@ -124,12 +69,12 @@ RSpec.describe Gitlab::Ci::Config::Normalizer do
}
end
- it "parallelizes needs" do
+ it 'parallelizes needs' do
expect(subject.dig(:other_job, :needs, :job)).to eq(expanded_job_attributes)
end
end
- context "when there are dependencies which are both parallelized and not" do
+ context 'when there are dependencies which are both parallelized and not' do
let(:config) do
{
job_name => job_config,
@@ -141,21 +86,157 @@ RSpec.describe Gitlab::Ci::Config::Normalizer do
needs: {
job: [
{ name: job_name.to_s, extra: :key },
- { name: "other_job", extra: :key }
+ { name: 'other_job', extra: :key }
]
}
}
}
end
- it "parallelizes dependencies" do
+ it 'parallelizes dependencies' do
expect(subject.dig(:final_job, :needs, :job)).to include(*expanded_job_attributes)
end
- it "includes the regular job in dependencies" do
+ it 'includes the regular job in dependencies' do
expect(subject.dig(:final_job, :needs, :job)).to include(name: 'other_job', extra: :key)
end
end
end
+
+ context 'with parallel config as integer' do
+ let(:variables_config) { {} }
+ let(:parallel_config) { 5 }
+
+ let(:expanded_job_names) do
+ [
+ 'rspec 1/5',
+ 'rspec 2/5',
+ 'rspec 3/5',
+ 'rspec 4/5',
+ 'rspec 5/5'
+ ]
+ end
+
+ it 'does not have original job' do
+ is_expected.not_to include(job_name)
+ end
+
+ it 'has parallelized jobs' do
+ is_expected.to include(*expanded_job_names.map(&:to_sym))
+ end
+
+ it 'sets job instance in options' do
+ expect(subject.values).to all(include(:instance))
+ end
+
+ it 'parallelizes jobs with original config' do
+ original_config = config[job_name]
+ .except(:name)
+ .deep_merge(parallel: { total: parallel_config })
+
+ configs = subject.values.map { |config| config.except(:name, :instance) }
+
+ expect(configs).to all(eq(original_config))
+ end
+
+ context 'when the job is not parallelized' do
+ let(:job_config) { { script: 'rspec', name: 'rspec' } }
+
+ it 'returns the same hash' do
+ is_expected.to eq(config)
+ end
+ end
+
+ context 'when there is a job with a slash in it' do
+ let(:job_name) { :"rspec 35/2" }
+
+ it 'properly parallelizes job names' do
+ job_names = [
+ :"rspec 35/2 1/5",
+ :"rspec 35/2 2/5",
+ :"rspec 35/2 3/5",
+ :"rspec 35/2 4/5",
+ :"rspec 35/2 5/5"
+ ]
+
+ is_expected.to include(*job_names)
+ end
+ end
+
+ it_behaves_like 'parallel dependencies'
+ it_behaves_like 'parallel needs'
+ end
+
+ context 'with parallel matrix config' do
+ let(:variables_config) do
+ {
+ USER_VARIABLE: 'user value'
+ }
+ end
+
+ let(:parallel_config) do
+ {
+ matrix: [
+ {
+ VAR_1: [1],
+ VAR_2: [2, 3]
+ }
+ ]
+ }
+ end
+
+ let(:expanded_job_names) do
+ [
+ 'rspec 1/2',
+ 'rspec 2/2'
+ ]
+ end
+
+ it 'does not have original job' do
+ is_expected.not_to include(job_name)
+ end
+
+ it 'has parallelized jobs' do
+ is_expected.to include(*expanded_job_names.map(&:to_sym))
+ end
+
+ it 'sets job instance in options' do
+ expect(subject.values).to all(include(:instance))
+ end
+
+ it 'sets job variables', :aggregate_failures do
+ expect(subject.values[0]).to match(
+ a_hash_including(variables: { VAR_1: 1, VAR_2: 2, USER_VARIABLE: 'user value' })
+ )
+
+ expect(subject.values[1]).to match(
+ a_hash_including(variables: { VAR_1: 1, VAR_2: 3, USER_VARIABLE: 'user value' })
+ )
+ end
+
+ it 'parallelizes jobs with original config' do
+ configs = subject.values.map do |config|
+ config.except(:name, :instance, :variables)
+ end
+
+ original_config = config[job_name]
+ .except(:name, :variables)
+ .deep_merge(parallel: { total: 2 })
+
+ expect(configs).to all(match(a_hash_including(original_config)))
+ end
+
+ it_behaves_like 'parallel dependencies'
+ it_behaves_like 'parallel needs'
+ end
+
+ context 'when parallel config does not matches a factory' do
+ let(:variables_config) { {} }
+ let(:parallel_config) { }
+
+ it 'does not alter the job config' do
+ is_expected.to match(config)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb b/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb
index 08a3fbd7867..45e87466532 100644
--- a/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/coverage/cobertura_spec.rb
@@ -19,6 +19,41 @@ RSpec.describe Gitlab::Ci::Parsers::Coverage::Cobertura do
end
end
+ context 'when there is a <sources>' do
+ shared_examples_for 'ignoring sources' do
+ it 'parses XML without errors' do
+ expect { subject }.not_to raise_error
+
+ expect(coverage_report.files).to eq({})
+ end
+ end
+
+ context 'and has a single source' do
+ let(:cobertura) do
+ <<-EOF.strip_heredoc
+ <sources>
+ <source>project/src</source>
+ </sources>
+ EOF
+ end
+
+ it_behaves_like 'ignoring sources'
+ end
+
+ context 'and has multiple sources' do
+ let(:cobertura) do
+ <<-EOF.strip_heredoc
+ <sources>
+ <source>project/src/foo</source>
+ <source>project/src/bar</source>
+ </sources>
+ EOF
+ end
+
+ it_behaves_like 'ignoring sources'
+ end
+ end
+
context 'when there is a single <class>' do
context 'with no lines' do
let(:cobertura) do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
index 5d20b1b8fda..cc4aaffb0a4 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/sequence_spec.rb
@@ -23,9 +23,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Sequence do
end
it 'does not process the second step' do
- subject.build! do |pipeline, sequence|
- expect(sequence).not_to be_complete
- end
+ subject.build!
expect(second_step).not_to have_received(:perform!)
end
@@ -43,9 +41,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Sequence do
end
it 'iterates through entire sequence' do
- subject.build! do |pipeline, sequence|
- expect(sequence).to be_complete
- end
+ subject.build!
expect(first_step).to have_received(:perform!)
expect(second_step).to have_received(:perform!)
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
index 931c62701ce..de580d2e148 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
@@ -41,9 +41,10 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
)
end
+ let(:save_incompleted) { true }
let(:command) do
Gitlab::Ci::Pipeline::Chain::Command.new(
- project: project, current_user: user, config_processor: yaml_processor
+ project: project, current_user: user, config_processor: yaml_processor, save_incompleted: save_incompleted
)
end
@@ -84,6 +85,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
perform!
expect(pipeline.status).to eq('failed')
+ expect(pipeline).to be_persisted
expect(pipeline.errors.to_a).to include('External validation failed')
end
@@ -98,6 +100,30 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
perform!
end
+
+ context 'when save_incompleted is false' do
+ let(:save_incompleted) { false}
+
+ it 'adds errors to the pipeline without dropping it' do
+ perform!
+
+ expect(pipeline.status).to eq('pending')
+ expect(pipeline).not_to be_persisted
+ expect(pipeline.errors.to_a).to include('External validation failed')
+ end
+
+ it 'breaks the chain' do
+ perform!
+
+ expect(step.break?).to be true
+ end
+
+ it 'logs the authorization' do
+ expect(Gitlab::AppLogger).to receive(:info).with(message: 'Pipeline not authorized', project_id: project.id, user_id: user.id)
+
+ perform!
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb
index 1dc2e0a1822..7eefb4d7876 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/repository_spec.rb
@@ -68,6 +68,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::Repository do
proj.repository.add_tag(user, 'master', 'master')
end
end
+
let(:command) do
Gitlab::Ci::Pipeline::Chain::Command.new(
project: project, current_user: user, origin_ref: 'master')
diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/and_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/and_spec.rb
index 6601537a2d3..1448b045b18 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/lexeme/and_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/and_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexeme::And do
describe '.type' do
it 'is an operator' do
- expect(described_class.type).to eq :operator
+ expect(described_class.type).to eq :logical_operator
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/equals_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/equals_spec.rb
index 2bed47f0a87..ab223ae41fa 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/lexeme/equals_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/equals_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexeme::Equals do
describe '.type' do
it 'is an operator' do
- expect(described_class.type).to eq :operator
+ expect(described_class.type).to eq :logical_operator
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/matches_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/matches_spec.rb
index efcea0b0e09..0da04d8dcf7 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/lexeme/matches_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/matches_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexeme::Matches do
describe '.type' do
it 'is an operator' do
- expect(described_class.type).to eq :operator
+ expect(described_class.type).to eq :logical_operator
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/not_equals_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/not_equals_spec.rb
index a81e1713ef0..3cde4c5d9dc 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/lexeme/not_equals_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/not_equals_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexeme::NotEquals do
describe '.type' do
it 'is an operator' do
- expect(described_class.type).to eq :operator
+ expect(described_class.type).to eq :logical_operator
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/not_matches_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/not_matches_spec.rb
index f44fe19f86d..9bff2355d58 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/lexeme/not_matches_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/not_matches_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexeme::NotMatches do
describe '.type' do
it 'is an operator' do
- expect(described_class.type).to eq :operator
+ expect(described_class.type).to eq :logical_operator
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/or_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/or_spec.rb
index 7fe445975eb..c7d89c4e1e9 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/lexeme/or_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/or_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexeme::Or do
describe '.type' do
it 'is an operator' do
- expect(described_class.type).to eq :operator
+ expect(described_class.type).to eq :logical_operator
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern_spec.rb
index 1a56a91c471..fa4f8a20984 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/pattern_spec.rb
@@ -70,7 +70,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexeme::Pattern do
.to eq Gitlab::UntrustedRegexp.new('pattern')
end
- it 'is a eager scanner for regexp boundaries' do
+ it 'is an eager scanner for regexp boundaries' do
scanner = StringScanner.new('/some .* / pattern/')
token = described_class.scan(scanner)
diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb
index 61c6ced4dac..6e242faa885 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb
@@ -81,6 +81,35 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexer do
with_them do
it { is_expected.to eq(tokens) }
end
+
+ context 'with parentheses are used' do
+ where(:expression, :tokens) do
+ '($PRESENT_VARIABLE =~ /my var/) && $EMPTY_VARIABLE =~ /nope/' | ['(', '$PRESENT_VARIABLE', '=~', '/my var/', ')', '&&', '$EMPTY_VARIABLE', '=~', '/nope/']
+ '$PRESENT_VARIABLE =~ /my var/ || ($EMPTY_VARIABLE =~ /nope/)' | ['$PRESENT_VARIABLE', '=~', '/my var/', '||', '(', '$EMPTY_VARIABLE', '=~', '/nope/', ')']
+ '($PRESENT_VARIABLE && (null || $EMPTY_VARIABLE == ""))' | ['(', '$PRESENT_VARIABLE', '&&', '(', 'null', '||', '$EMPTY_VARIABLE', '==', '""', ')', ')']
+ end
+
+ with_them do
+ context 'when ci_if_parenthesis_enabled is enabled' do
+ before do
+ stub_feature_flags(ci_if_parenthesis_enabled: true)
+ end
+
+ it { is_expected.to eq(tokens) }
+ end
+
+ context 'when ci_if_parenthesis_enabled is disabled' do
+ before do
+ stub_feature_flags(ci_if_parenthesis_enabled: false)
+ end
+
+ it do
+ expect { subject }
+ .to raise_error described_class::SyntaxError
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/expression/parser_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/parser_spec.rb
index 1704cabfd2e..3394a75ac0a 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/parser_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/parser_spec.rb
@@ -1,51 +1,79 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Expression::Parser do
+ before do
+ stub_feature_flags(ci_if_parenthesis_enabled: true)
+ end
+
describe '#tree' do
- context 'when using two operators' do
- it 'returns a reverse descent parse tree' do
- expect(described_class.seed('$VAR1 == "123"').tree)
- .to be_a Gitlab::Ci::Pipeline::Expression::Lexeme::Equals
+ context 'validates simple operators' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:expression, :result_tree) do
+ '$VAR1 == "123"' | 'equals($VAR1, "123")'
+ '$VAR1 == "123" == $VAR2' | 'equals(equals($VAR1, "123"), $VAR2)'
+ '$VAR' | '$VAR'
+ '"some value"' | '"some value"'
+ 'null' | 'null'
+ '$VAR1 || $VAR2 && $VAR3' | 'or($VAR1, and($VAR2, $VAR3))'
+ '$VAR1 && $VAR2 || $VAR3' | 'or(and($VAR1, $VAR2), $VAR3)'
+ '$VAR1 && $VAR2 || $VAR3 && $VAR4' | 'or(and($VAR1, $VAR2), and($VAR3, $VAR4))'
+ '$VAR1 && ($VAR2 || $VAR3) && $VAR4' | 'and(and($VAR1, or($VAR2, $VAR3)), $VAR4)'
end
- end
- context 'when using three operators' do
- it 'returns a reverse descent parse tree' do
- expect(described_class.seed('$VAR1 == "123" == $VAR2').tree)
- .to be_a Gitlab::Ci::Pipeline::Expression::Lexeme::Equals
+ with_them do
+ it { expect(described_class.seed(expression).tree.inspect).to eq(result_tree) }
end
end
- context 'when using a single variable token' do
- it 'returns a single token instance' do
- expect(described_class.seed('$VAR').tree)
- .to be_a Gitlab::Ci::Pipeline::Expression::Lexeme::Variable
+ context 'when combining && and OR operators' do
+ subject { described_class.seed('$VAR1 == "a" || $VAR2 == "b" && $VAR3 == "c" || $VAR4 == "d" && $VAR5 == "e"').tree }
+
+ context 'when parenthesis engine is enabled' do
+ before do
+ stub_feature_flags(ci_if_parenthesis_enabled: true)
+ end
+
+ it 'returns operations in a correct order' do
+ expect(subject.inspect)
+ .to eq('or(or(equals($VAR1, "a"), and(equals($VAR2, "b"), equals($VAR3, "c"))), and(equals($VAR4, "d"), equals($VAR5, "e")))')
+ end
+ end
+
+ context 'when parenthesis engine is disabled (legacy)' do
+ before do
+ stub_feature_flags(ci_if_parenthesis_enabled: false)
+ end
+
+ it 'returns operations in a invalid order' do
+ expect(subject.inspect)
+ .to eq('or(equals($VAR1, "a"), and(equals($VAR2, "b"), or(equals($VAR3, "c"), and(equals($VAR4, "d"), equals($VAR5, "e")))))')
+ end
end
end
- context 'when using a single string token' do
- it 'returns a single token instance' do
- expect(described_class.seed('"some value"').tree)
- .to be_a Gitlab::Ci::Pipeline::Expression::Lexeme::String
+ context 'when using parenthesis' do
+ subject { described_class.seed('(($VAR1 == "a" || $VAR2 == "b") && $VAR3 == "c" || $VAR4 == "d") && $VAR5 == "e"').tree }
+
+ before do
+ stub_feature_flags(ci_if_parenthesis_enabled: true)
+ end
+
+ it 'returns operations in a correct order' do
+ expect(subject.inspect)
+ .to eq('and(or(and(or(equals($VAR1, "a"), equals($VAR2, "b")), equals($VAR3, "c")), equals($VAR4, "d")), equals($VAR5, "e"))')
end
end
context 'when expression is empty' do
- it 'returns a null token' do
+ it 'raises a parsing error' do
expect { described_class.seed('').tree }
.to raise_error Gitlab::Ci::Pipeline::Expression::Parser::ParseError
end
end
- context 'when expression is null' do
- it 'returns a null token' do
- expect(described_class.seed('null').tree)
- .to be_a Gitlab::Ci::Pipeline::Expression::Lexeme::Null
- end
- end
-
context 'when two value tokens have no operator' do
it 'raises a parsing error' do
expect { described_class.seed('$VAR "text"').tree }
@@ -66,5 +94,42 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Parser do
.to raise_error Gitlab::Ci::Pipeline::Expression::Lexeme::Operator::OperatorError
end
end
+
+ context 'when parenthesis are unmatched' do
+ context 'when parenthesis engine is enabled' do
+ before do
+ stub_feature_flags(ci_if_parenthesis_enabled: true)
+ end
+
+ where(:expression) do
+ [
+ '$VAR == (',
+ '$VAR2 == ("aa"',
+ '$VAR2 == ("aa"))',
+ '$VAR2 == "aa")',
+ '(($VAR2 == "aa")',
+ '($VAR2 == "aa"))'
+ ]
+ end
+
+ with_them do
+ it 'raises a ParseError' do
+ expect { described_class.seed(expression).tree }
+ .to raise_error Gitlab::Ci::Pipeline::Expression::Parser::ParseError
+ end
+ end
+ end
+
+ context 'when parenthesis engine is disabled' do
+ before do
+ stub_feature_flags(ci_if_parenthesis_enabled: false)
+ end
+
+ it 'raises an SyntaxError' do
+ expect { described_class.seed('$VAR == (').tree }
+ .to raise_error Gitlab::Ci::Pipeline::Expression::Lexer::SyntaxError
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb
index 642d6816030..cf3644c9ad5 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
-require 'rspec-parameterized'
+require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Expression::Statement do
subject do
@@ -109,6 +108,17 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Statement do
'$UNDEFINED_VARIABLE || $PRESENT_VARIABLE' | 'my variable'
'$UNDEFINED_VARIABLE == null || $PRESENT_VARIABLE' | true
'$PRESENT_VARIABLE || $UNDEFINED_VARIABLE == null' | 'my variable'
+
+ '($PRESENT_VARIABLE)' | 'my variable'
+ '(($PRESENT_VARIABLE))' | 'my variable'
+ '(($PRESENT_VARIABLE && null) || $EMPTY_VARIABLE == "")' | true
+ '($PRESENT_VARIABLE) && (null || $EMPTY_VARIABLE == "")' | true
+ '("string" || "test") == "string"' | true
+ '(null || ("test" == "string"))' | false
+ '("string" == ("test" && "string"))' | true
+ '("string" == ("test" || "string"))' | false
+ '("string" == "test" || "string")' | "string"
+ '("string" == ("string" || (("1" == "1") && ("2" == "3"))))' | true
end
with_them do
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 2dea554fe56..733ab30132d 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -928,29 +928,51 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
end
end
- context 'when lower limit of needs is reached' do
- before do
- stub_feature_flags(ci_dag_limit_needs: true)
- end
+ context 'when using 101 needs' do
+ let(:needs_count) { 101 }
- let(:needs_count) { described_class::LOW_NEEDS_LIMIT + 1 }
+ context 'when ci_plan_needs_size_limit is disabled' do
+ before do
+ stub_feature_flags(ci_plan_needs_size_limit: false)
+ end
- it "returns an error" do
- expect(subject.errors).to contain_exactly(
- "rspec: one job can only need 10 others, but you have listed 11. See needs keyword documentation for more details")
+ it "returns an error" do
+ expect(subject.errors).to contain_exactly(
+ "rspec: one job can only need 10 others, but you have listed 101. See needs keyword documentation for more details")
+ end
end
- end
- context 'when upper limit of needs is reached' do
- before do
- stub_feature_flags(ci_dag_limit_needs: false)
- end
+ context 'when ci_plan_needs_size_limit is enabled' do
+ before do
+ stub_feature_flags(ci_plan_needs_size_limit: true)
+ end
- let(:needs_count) { described_class::HARD_NEEDS_LIMIT + 1 }
+ it "returns an error" do
+ expect(subject.errors).to contain_exactly(
+ "rspec: one job can only need 50 others, but you have listed 101. See needs keyword documentation for more details")
+ end
- it "returns an error" do
- expect(subject.errors).to contain_exactly(
- "rspec: one job can only need 50 others, but you have listed 51. See needs keyword documentation for more details")
+ context 'when ci_needs_size_limit is set to 100' do
+ before do
+ project.actual_limits.update!(ci_needs_size_limit: 100)
+ end
+
+ it "returns an error" do
+ expect(subject.errors).to contain_exactly(
+ "rspec: one job can only need 100 others, but you have listed 101. See needs keyword documentation for more details")
+ end
+ end
+
+ context 'when ci_needs_size_limit is set to 0' do
+ before do
+ project.actual_limits.update!(ci_needs_size_limit: 0)
+ end
+
+ it "returns an error" do
+ expect(subject.errors).to contain_exactly(
+ "rspec: one job can only need 0 others, but you have listed 101. See needs keyword documentation for more details")
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/reports/accessibility_reports_comparer_spec.rb b/spec/lib/gitlab/ci/reports/accessibility_reports_comparer_spec.rb
index 240ede790e0..650ae41320b 100644
--- a/spec/lib/gitlab/ci/reports/accessibility_reports_comparer_spec.rb
+++ b/spec/lib/gitlab/ci/reports/accessibility_reports_comparer_spec.rb
@@ -21,6 +21,7 @@ RSpec.describe Gitlab::Ci::Reports::AccessibilityReportsComparer do
}
]
end
+
let(:different_error) do
[
{
diff --git a/spec/lib/gitlab/ci/reports/test_report_summary_spec.rb b/spec/lib/gitlab/ci/reports/test_report_summary_spec.rb
index 70d82851125..555682cc006 100644
--- a/spec/lib/gitlab/ci/reports/test_report_summary_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_report_summary_spec.rb
@@ -11,68 +11,8 @@ RSpec.describe Gitlab::Ci::Reports::TestReportSummary do
subject { test_report_summary.total }
context 'when test report summary has several build report results' do
- it 'returns test suite summary object' do
- expect(subject).to be_a_kind_of(Gitlab::Ci::Reports::TestSuiteSummary)
- end
- end
- end
-
- describe '#total_time' do
- subject { test_report_summary.total_time }
-
- context 'when test report summary has several build report results' do
- it 'returns the total' do
- expect(subject).to eq(0.84)
- end
- end
- end
-
- describe '#total_count' do
- subject { test_report_summary.total_count }
-
- context 'when test report summary has several build report results' do
- it 'returns the total count' do
- expect(subject).to eq(4)
- end
- end
- end
-
- describe '#success_count' do
- subject { test_report_summary.success_count }
-
- context 'when test suite summary has several build report results' do
- it 'returns the total success' do
- expect(subject).to eq(2)
- end
- end
- end
-
- describe '#failed_count' do
- subject { test_report_summary.failed_count }
-
- context 'when test suite summary has several build report results' do
- it 'returns the total failed' do
- expect(subject).to eq(0)
- end
- end
- end
-
- describe '#error_count' do
- subject { test_report_summary.error_count }
-
- context 'when test suite summary has several build report results' do
- it 'returns the total errored' do
- expect(subject).to eq(2)
- end
- end
- end
-
- describe '#skipped_count' do
- subject { test_report_summary.skipped_count }
-
- context 'when test suite summary has several build report results' do
- it 'returns the total skipped' do
- expect(subject).to eq(0)
+ it 'returns all the total count in a hash' do
+ expect(subject).to include(:time, :count, :success, :failed, :skipped, :error)
end
end
end
diff --git a/spec/lib/gitlab/ci/reports/test_suite_spec.rb b/spec/lib/gitlab/ci/reports/test_suite_spec.rb
index c4c4d2c3704..fbe3473f6b0 100644
--- a/spec/lib/gitlab/ci/reports/test_suite_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_suite_spec.rb
@@ -50,9 +50,11 @@ RSpec.describe Gitlab::Ci::Reports::TestSuite do
before do
test_suite.add_test_case(test_case_success)
test_suite.add_test_case(test_case_failed)
+ test_suite.add_test_case(test_case_skipped)
+ test_suite.add_test_case(test_case_error)
end
- it { is_expected.to eq(2) }
+ it { is_expected.to eq(4) }
end
describe '#total_status' do
diff --git a/spec/lib/gitlab/ci/reports/test_suite_summary_spec.rb b/spec/lib/gitlab/ci/reports/test_suite_summary_spec.rb
index 12c96acdcf3..a98d3db4e82 100644
--- a/spec/lib/gitlab/ci/reports/test_suite_summary_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_suite_summary_spec.rb
@@ -86,4 +86,14 @@ RSpec.describe Gitlab::Ci::Reports::TestSuiteSummary do
end
end
end
+
+ describe '#to_h' do
+ subject { test_suite_summary.to_h }
+
+ context 'when test suite summary has several build report results' do
+ it 'returns the total as a hash' do
+ expect(subject).to include(:time, :count, :success, :failed, :skipped, :error)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/runner_instructions_spec.rb b/spec/lib/gitlab/ci/runner_instructions_spec.rb
new file mode 100644
index 00000000000..32ee2ceb040
--- /dev/null
+++ b/spec/lib/gitlab/ci/runner_instructions_spec.rb
@@ -0,0 +1,217 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::RunnerInstructions do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:params) { {} }
+ let(:user) { create(:user) }
+
+ describe 'OS' do
+ Gitlab::Ci::RunnerInstructions::OS.each do |name, subject|
+ context name do
+ it 'has the required fields' do
+ expect(subject).to have_key(:human_readable_name)
+ expect(subject).to have_key(:download_locations)
+ expect(subject).to have_key(:install_script_template_path)
+ expect(subject).to have_key(:runner_executable)
+ end
+
+ it 'has a valid script' do
+ expect(File.read(subject[:install_script_template_path]).length).not_to eq(0)
+ end
+ end
+ end
+ end
+
+ describe 'OTHER_ENVIRONMENTS' do
+ Gitlab::Ci::RunnerInstructions::OTHER_ENVIRONMENTS.each do |name, subject|
+ context name do
+ it 'has the required fields' do
+ expect(subject).to have_key(:human_readable_name)
+ expect(subject).to have_key(:installation_instructions_url)
+ end
+ end
+ end
+ end
+
+ describe '#install_script' do
+ subject { described_class.new(current_user: user, **params) }
+
+ context 'invalid params' do
+ where(:current_params, :expected_error_message) do
+ { os: nil, arch: nil } | 'Missing OS'
+ { os: 'linux', arch: nil } | 'Missing arch'
+ { os: nil, arch: 'amd64' } | 'Missing OS'
+ { os: 'non_existing_os', arch: 'amd64' } | 'Invalid OS'
+ { os: 'linux', arch: 'non_existing_arch' } | 'Architecture not found for OS'
+ { os: 'windows', arch: 'non_existing_arch' } | 'Architecture not found for OS'
+ end
+
+ with_them do
+ let(:params) { current_params }
+
+ it 'raises argument error' do
+ result = subject.install_script
+
+ expect(result).to be_nil
+ expect(subject.errors).to include(expected_error_message)
+ end
+ end
+ end
+
+ context 'with valid params' do
+ where(:os, :arch) do
+ 'linux' | 'amd64'
+ 'linux' | '386'
+ 'linux' | 'arm'
+ 'linux' | 'arm64'
+ 'windows' | 'amd64'
+ 'windows' | '386'
+ 'osx' | 'amd64'
+ end
+
+ with_them do
+ let(:params) { { os: os, arch: arch } }
+
+ it 'returns string containing correct params' do
+ result = subject.install_script
+
+ expect(result).to be_a(String)
+
+ if os == 'osx'
+ expect(result).to include("darwin-#{arch}")
+ else
+ expect(result).to include("#{os}-#{arch}")
+ end
+ end
+ end
+ end
+ end
+
+ describe '#register_command' do
+ let(:params) { { os: 'linux', arch: 'foo' } }
+
+ where(:commands) do
+ Gitlab::Ci::RunnerInstructions::OS.map do |name, values|
+ { name => values[:runner_executable] }
+ end
+ end
+
+ context 'group' do
+ let(:group) { create(:group) }
+
+ subject { described_class.new(current_user: user, group: group, **params) }
+
+ context 'user is owner' do
+ before do
+ group.add_owner(user)
+ end
+
+ with_them do
+ let(:params) { { os: commands.each_key.first, arch: 'foo' } }
+
+ it 'have correct configurations' do
+ result = subject.register_command
+
+ expect(result).to include("#{commands[commands.each_key.first]} register")
+ expect(result).to include("--registration-token #{group.runners_token}")
+ expect(result).to include("--url #{Gitlab::Routing.url_helpers.root_url(only_path: false)}")
+ end
+ end
+ end
+
+ context 'user is not owner' do
+ where(:user_permission) do
+ [:maintainer, :developer, :reporter, :guest]
+ end
+
+ with_them do
+ before do
+ create(:group_member, user_permission, group: group, user: user)
+ end
+
+ it 'raises error' do
+ result = subject.register_command
+
+ expect(result).to be_nil
+ expect(subject.errors).to include("Gitlab::Access::AccessDeniedError")
+ end
+ end
+ end
+ end
+
+ context 'project' do
+ let(:project) { create(:project) }
+
+ subject { described_class.new(current_user: user, project: project, **params) }
+
+ context 'user is maintainer' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ with_them do
+ let(:params) { { os: commands.each_key.first, arch: 'foo' } }
+
+ it 'have correct configurations' do
+ result = subject.register_command
+
+ expect(result).to include("#{commands[commands.each_key.first]} register")
+ expect(result).to include("--registration-token #{project.runners_token}")
+ expect(result).to include("--url #{Gitlab::Routing.url_helpers.root_url(only_path: false)}")
+ end
+ end
+ end
+
+ context 'user is not maintainer' do
+ where(:user_permission) do
+ [:developer, :reporter, :guest]
+ end
+
+ with_them do
+ before do
+ create(:project_member, user_permission, project: project, user: user)
+ end
+
+ it 'raises error' do
+ result = subject.register_command
+
+ expect(result).to be_nil
+ expect(subject.errors).to include("Gitlab::Access::AccessDeniedError")
+ end
+ end
+ end
+ end
+
+ context 'instance' do
+ subject { described_class.new(current_user: user, **params) }
+
+ context 'user is admin' do
+ let(:user) { create(:user, :admin) }
+
+ with_them do
+ let(:params) { { os: commands.each_key.first, arch: 'foo' } }
+
+ it 'have correct configurations' do
+ result = subject.register_command
+
+ expect(result).to include("#{commands[commands.each_key.first]} register")
+ expect(result).to include("--registration-token #{Gitlab::CurrentSettings.runners_registration_token}")
+ expect(result).to include("--url #{Gitlab::Routing.url_helpers.root_url(only_path: false)}")
+ end
+ end
+ end
+
+ context 'user is not admin' do
+ it 'raises error' do
+ result = subject.register_command
+
+ expect(result).to be_nil
+ expect(subject.errors).to include("Gitlab::Access::AccessDeniedError")
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/status/composite_spec.rb b/spec/lib/gitlab/ci/status/composite_spec.rb
index 47bbc4169b6..e1dcd05373f 100644
--- a/spec/lib/gitlab/ci/status/composite_spec.rb
+++ b/spec/lib/gitlab/ci/status/composite_spec.rb
@@ -16,48 +16,61 @@ RSpec.describe Gitlab::Ci::Status::Composite do
end
describe '#status' do
- shared_examples 'compares composite with SQL status' do
- it 'returns exactly the same result' do
- builds = Ci::Build.where(id: all_statuses)
+ using RSpec::Parameterized::TableSyntax
- expect(composite_status.status).to eq(builds.legacy_status)
- expect(composite_status.warnings?).to eq(builds.failed_but_allowed.any?)
+ shared_examples 'compares status and warnings' do
+ let(:composite_status) do
+ described_class.new(all_statuses)
+ end
+
+ it 'returns status and warnings?' do
+ expect(composite_status.status).to eq(result)
+ expect(composite_status.warnings?).to eq(has_warnings)
end
end
- shared_examples 'validate all combinations' do |perms|
- Ci::HasStatus::STATUSES_ENUM.keys.combination(perms).each do |statuses|
- context "with #{statuses.join(",")}" do
- it_behaves_like 'compares composite with SQL status' do
- let(:all_statuses) do
- statuses.map { |status| @statuses[status] }
- end
-
- let(:composite_status) do
- described_class.new(all_statuses)
- end
- end
-
- Ci::HasStatus::STATUSES_ENUM.each do |allow_failure_status, _|
- context "and allow_failure #{allow_failure_status}" do
- it_behaves_like 'compares composite with SQL status' do
- let(:all_statuses) do
- statuses.map { |status| @statuses[status] } +
- [@statuses_with_allow_failure[allow_failure_status]]
- end
-
- let(:composite_status) do
- described_class.new(all_statuses)
- end
- end
- end
- end
+ context 'allow_failure: false' do
+ where(:build_statuses, :result, :has_warnings) do
+ %i(skipped) | 'skipped' | false
+ %i(skipped success) | 'success' | false
+ %i(created) | 'created' | false
+ %i(preparing) | 'preparing' | false
+ %i(canceled success skipped) | 'canceled' | false
+ %i(pending created skipped) | 'pending' | false
+ %i(pending created skipped success) | 'running' | false
+ %i(running created skipped success) | 'running' | false
+ %i(success waiting_for_resource) | 'waiting_for_resource' | false
+ %i(success manual) | 'manual' | false
+ %i(success scheduled) | 'scheduled' | false
+ %i(created preparing) | 'preparing' | false
+ %i(created success pending) | 'running' | false
+ %i(skipped success failed) | 'failed' | false
+ end
+
+ with_them do
+ let(:all_statuses) do
+ build_statuses.map { |status| @statuses[status] }
end
+
+ it_behaves_like 'compares status and warnings'
end
end
- it_behaves_like 'validate all combinations', 0
- it_behaves_like 'validate all combinations', 1
- it_behaves_like 'validate all combinations', 2
+ context 'allow_failure: true' do
+ where(:build_statuses, :result, :has_warnings) do
+ %i(manual) | 'skipped' | false
+ %i(skipped failed) | 'success' | true
+ %i(created failed) | 'created' | true
+ %i(preparing manual) | 'preparing' | false
+ end
+
+ with_them do
+ let(:all_statuses) do
+ build_statuses.map { |status| @statuses_with_allow_failure[status] }
+ end
+
+ it_behaves_like 'compares status and warnings'
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/trace/stream_spec.rb b/spec/lib/gitlab/ci/trace/stream_spec.rb
index 568c10bbac2..e28469c9404 100644
--- a/spec/lib/gitlab/ci/trace/stream_spec.rb
+++ b/spec/lib/gitlab/ci/trace/stream_spec.rb
@@ -101,7 +101,7 @@ RSpec.describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
describe '#append' do
shared_examples_for 'appends' do
- it "truncates and append content" do
+ it "truncates and appends content" do
stream.append(+"89", 4)
stream.seek(0)
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index 5c6d748d66c..1c81cc83cd1 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -443,15 +443,15 @@ module Gitlab
context 'when a warning is raised in a given entry' do
let(:config) do
<<-EOYML
- rspec:
- script: rspec
- rules:
- - if: '$VAR == "value"'
+ rspec:
+ script: echo
+ rules:
+ - when: always
EOYML
end
it 'is propagated all the way up to the processor' do
- expect(subject.warnings).to contain_exactly('jobs:rspec uses `rules` without defining `workflow:rules`')
+ expect(subject.warnings).to contain_exactly(/jobs:rspec may allow multiple pipelines to run/)
end
end
@@ -461,7 +461,7 @@ module Gitlab
rspec:
script: rspec
rules:
- - if: '$VAR == "value"'
+ - when: always
invalid:
script: echo
artifacts:
@@ -473,7 +473,7 @@ module Gitlab
expect { subject }.to raise_error do |error|
expect(error).to be_a(described_class::ValidationError)
expect(error.message).to eq('jobs:invalid:artifacts config should be a hash')
- expect(error.warnings).to contain_exactly('jobs:rspec uses `rules` without defining `workflow:rules`')
+ expect(error.warnings).to contain_exactly(/jobs:rspec may allow multiple pipelines to run/)
end
end
end
@@ -485,7 +485,7 @@ module Gitlab
rspec:
script: rspec
rules:
- - if: '$VAR == "value"'
+ - when: always
EOYML
end
@@ -516,7 +516,7 @@ module Gitlab
stage: custom_stage
script: rspec
rules:
- - if: '$VAR == "value"'
+ - when: always
EOYML
end
@@ -530,7 +530,7 @@ module Gitlab
stage: build
script: echo
rules:
- - if: '$VAR == "value"'
+ - when: always
test:
stage: test
script: echo
@@ -549,7 +549,7 @@ module Gitlab
script: echo
needs: [test]
rules:
- - if: '$VAR == "value"'
+ - when: always
test:
stage: test
script: echo
@@ -571,7 +571,7 @@ module Gitlab
rspec:
script: rspec
rules:
- - if: '$VAR == "value"'
+ - when: always
EOYML
end
@@ -942,6 +942,7 @@ module Gitlab
let(:variables) do
{ 'VAR1' => 'value1', 'VAR2' => 'value2' }
end
+
let(:config) do
{
variables: variables,
@@ -962,9 +963,11 @@ module Gitlab
let(:global_variables) do
{ 'VAR1' => 'global1', 'VAR3' => 'global3', 'VAR4' => 'global4' }
end
+
let(:job_variables) do
{ 'VAR1' => 'value1', 'VAR2' => 'value2' }
end
+
let(:config) do
{
before_script: ['pwd'],
@@ -1269,27 +1272,104 @@ module Gitlab
end
describe 'Parallel' do
+ let(:config) do
+ YAML.dump(rspec: { script: 'rspec',
+ parallel: parallel,
+ variables: { 'VAR1' => 1 } })
+ end
+
+ let(:config_processor) { Gitlab::Ci::YamlProcessor.new(config) }
+ let(:builds) { config_processor.stage_builds_attributes('test') }
+
context 'when job is parallelized' do
let(:parallel) { 5 }
- let(:config) do
- YAML.dump(rspec: { script: 'rspec',
- parallel: parallel })
- end
-
it 'returns parallelized jobs' do
- config_processor = Gitlab::Ci::YamlProcessor.new(config)
- builds = config_processor.stage_builds_attributes('test')
build_options = builds.map { |build| build[:options] }
expect(builds.size).to eq(5)
- expect(build_options).to all(include(:instance, parallel: parallel))
+ expect(build_options).to all(include(:instance, parallel: { number: parallel, total: parallel }))
end
it 'does not have the original job' do
- config_processor = Gitlab::Ci::YamlProcessor.new(config)
- builds = config_processor.stage_builds_attributes('test')
+ expect(builds).not_to include(:rspec)
+ end
+ end
+
+ context 'with build matrix' do
+ let(:parallel) do
+ {
+ matrix: [
+ { 'PROVIDER' => 'aws', 'STACK' => %w[monitoring app1 app2] },
+ { 'PROVIDER' => 'ovh', 'STACK' => %w[monitoring backup app] },
+ { 'PROVIDER' => 'gcp', 'STACK' => %w[data processing] }
+ ]
+ }
+ end
+
+ it 'returns the number of parallelized jobs' do
+ expect(builds.size).to eq(8)
+ end
+
+ it 'returns the parallel config' do
+ build_options = builds.map { |build| build[:options] }
+ parallel_config = {
+ matrix: parallel[:matrix].map { |var| var.transform_values { |v| Array(v).flatten }},
+ total: build_options.size
+ }
+
+ expect(build_options).to all(include(:instance, parallel: parallel_config))
+ end
+ it 'sets matrix variables' do
+ build_variables = builds.map { |build| build[:yaml_variables] }
+ expected_variables = [
+ [
+ { key: 'VAR1', value: '1' },
+ { key: 'PROVIDER', value: 'aws' },
+ { key: 'STACK', value: 'monitoring' }
+ ],
+ [
+ { key: 'VAR1', value: '1' },
+ { key: 'PROVIDER', value: 'aws' },
+ { key: 'STACK', value: 'app1' }
+ ],
+ [
+ { key: 'VAR1', value: '1' },
+ { key: 'PROVIDER', value: 'aws' },
+ { key: 'STACK', value: 'app2' }
+ ],
+ [
+ { key: 'VAR1', value: '1' },
+ { key: 'PROVIDER', value: 'ovh' },
+ { key: 'STACK', value: 'monitoring' }
+ ],
+ [
+ { key: 'VAR1', value: '1' },
+ { key: 'PROVIDER', value: 'ovh' },
+ { key: 'STACK', value: 'backup' }
+ ],
+ [
+ { key: 'VAR1', value: '1' },
+ { key: 'PROVIDER', value: 'ovh' },
+ { key: 'STACK', value: 'app' }
+ ],
+ [
+ { key: 'VAR1', value: '1' },
+ { key: 'PROVIDER', value: 'gcp' },
+ { key: 'STACK', value: 'data' }
+ ],
+ [
+ { key: 'VAR1', value: '1' },
+ { key: 'PROVIDER', value: 'gcp' },
+ { key: 'STACK', value: 'processing' }
+ ]
+ ].map { |vars| vars.map { |var| a_hash_including(var) } }
+
+ expect(build_variables).to match(expected_variables)
+ end
+
+ it 'does not have the original job' do
expect(builds).not_to include(:rspec)
end
end
@@ -1482,6 +1562,21 @@ module Gitlab
})
end
+ it "returns artifacts with expire_in never keyword" do
+ config = YAML.dump({
+ rspec: {
+ script: "rspec",
+ artifacts: { paths: ["releases/"], expire_in: "never" }
+ }
+ })
+
+ config_processor = Gitlab::Ci::YamlProcessor.new(config)
+ builds = config_processor.stage_builds_attributes("test")
+
+ expect(builds.size).to eq(1)
+ expect(builds.first[:options][:artifacts][:expire_in]).to eq('never')
+ end
+
%w[on_success on_failure always].each do |when_state|
it "returns artifacts for when #{when_state} defined" do
config = YAML.dump({
@@ -1564,26 +1659,9 @@ module Gitlab
}
end
- context 'with feature flag active' do
- before do
- stub_feature_flags(ci_release_generation: true)
- end
-
- it "returns release info" do
- expect(processor.stage_builds_attributes('release').first[:options])
- .to eq(config[:release].except(:stage, :only))
- end
- end
-
- context 'with feature flag inactive' do
- before do
- stub_feature_flags(ci_release_generation: false)
- end
-
- it 'raises error' do
- expect { processor }.to raise_error(
- 'jobs:release config release features are not enabled: release')
- end
+ it "returns release info" do
+ expect(processor.stage_builds_attributes('release').first[:options])
+ .to eq(config[:release].except(:stage, :only))
end
end
@@ -1998,6 +2076,7 @@ module Gitlab
{ job: "build2" }
]
end
+
let(:dependencies) { %w(build3) }
it { expect { subject }.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, 'jobs:test1 dependencies the build3 should be part of needs') }
@@ -2407,6 +2486,14 @@ module Gitlab
end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs config should contain at least one visible job")
end
+ it "returns errors if the job script is not defined" do
+ config = YAML.dump({ rspec: { before_script: "test" } })
+
+ expect do
+ Gitlab::Ci::YamlProcessor.new(config)
+ end.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError, "jobs:rspec script can't be blank")
+ end
+
it "returns errors if there are no visible jobs defined" do
config = YAML.dump({ before_script: ["bundle update"], '.hidden'.to_sym => { script: 'ls' } })
expect do
@@ -2619,6 +2706,14 @@ module Gitlab
.to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
'rspec: unknown keys in `extends` (something)')
end
+
+ it 'returns errors if parallel is invalid' do
+ config = YAML.dump({ rspec: { parallel: 'test', script: 'test' } })
+
+ expect { Gitlab::Ci::YamlProcessor.new(config) }
+ .to raise_error(Gitlab::Ci::YamlProcessor::ValidationError,
+ 'jobs:rspec:parallel should be an integer or a hash')
+ end
end
describe "#validation_message" do
diff --git a/spec/lib/gitlab/cleanup/remote_uploads_spec.rb b/spec/lib/gitlab/cleanup/remote_uploads_spec.rb
index 1752608f844..c59b7f004dd 100644
--- a/spec/lib/gitlab/cleanup/remote_uploads_spec.rb
+++ b/spec/lib/gitlab/cleanup/remote_uploads_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Gitlab::Cleanup::RemoteUploads do
create(:upload, path: 'dir/file2', store: ObjectStorage::Store::LOCAL)
]
end
+
let(:remote_files) do
[
double(key: 'dir/file1'),
diff --git a/spec/lib/gitlab/cross_project_access/class_methods_spec.rb b/spec/lib/gitlab/cross_project_access/class_methods_spec.rb
index cc2c431fc07..afc45c86362 100644
--- a/spec/lib/gitlab/cross_project_access/class_methods_spec.rb
+++ b/spec/lib/gitlab/cross_project_access/class_methods_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Gitlab::CrossProjectAccess::ClassMethods do
extend Gitlab::CrossProjectAccess::ClassMethods
end
end
+
let(:dummy_proc) { lambda { false } }
describe '#requires_cross_project_access' do
diff --git a/spec/lib/gitlab/danger/changelog_spec.rb b/spec/lib/gitlab/danger/changelog_spec.rb
index f5954cd8c1e..3c67e9ca8ea 100644
--- a/spec/lib/gitlab/danger/changelog_spec.rb
+++ b/spec/lib/gitlab/danger/changelog_spec.rb
@@ -57,6 +57,24 @@ RSpec.describe Gitlab::Danger::Changelog do
is_expected.to be_truthy
end
end
+
+ context 'when MR contains a category that require changelog and a category that require no changelog with changelog label' do
+ let(:changes_by_category) { { category_with_changelog => nil, category_without_changelog => nil } }
+ let(:mr_labels) { ['feature'] }
+
+ it 'is truthy' do
+ is_expected.to be_truthy
+ end
+ end
+
+ context 'when MR contains a category that require changelog and a category that require no changelog with no changelog label' do
+ let(:changes_by_category) { { category_with_changelog => nil, category_without_changelog => nil } }
+ let(:mr_labels) { ['tooling'] }
+
+ it 'is truthy' do
+ is_expected.to be_falsey
+ end
+ end
end
describe '#found' do
diff --git a/spec/lib/gitlab/danger/commit_linter_spec.rb b/spec/lib/gitlab/danger/commit_linter_spec.rb
index 06bec6f793d..c31522c538d 100644
--- a/spec/lib/gitlab/danger/commit_linter_spec.rb
+++ b/spec/lib/gitlab/danger/commit_linter_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe Gitlab::Danger::CommitLinter do
let(:commit_class) do
Struct.new(:message, :sha, :diff_parent)
end
+
let(:commit_message) { 'A commit message' }
let(:commit_sha) { 'abcd1234' }
let(:commit) { commit_class.new(commit_message, commit_sha, diff_parent) }
diff --git a/spec/lib/gitlab/danger/helper_spec.rb b/spec/lib/gitlab/danger/helper_spec.rb
index e73742b5911..e5018e46634 100644
--- a/spec/lib/gitlab/danger/helper_spec.rb
+++ b/spec/lib/gitlab/danger/helper_spec.rb
@@ -76,6 +76,14 @@ RSpec.describe Gitlab::Danger::Helper do
end
end
+ describe "changed_files" do
+ it 'returns list of changed files matching given regex' do
+ expect(helper).to receive(:all_changed_files).and_return(%w[migration.rb usage_data.rb])
+
+ expect(helper.changed_files(/usage_data/)).to contain_exactly('usage_data.rb')
+ end
+ end
+
describe '#all_ee_changes' do
subject { helper.all_ee_changes }
@@ -98,21 +106,21 @@ RSpec.describe Gitlab::Danger::Helper do
it 'delegates to CHANGELOG-EE.md existence if CI_PROJECT_NAME is set to something else' do
stub_env('CI_PROJECT_NAME', 'something else')
- expect(Dir).to receive(:exist?).with('../../ee') { true }
+ expect(Dir).to receive(:exist?).with(File.expand_path('../../../../ee', __dir__)) { true }
is_expected.to be_truthy
end
it 'returns true if ee exists' do
stub_env('CI_PROJECT_NAME', nil)
- expect(Dir).to receive(:exist?).with('../../ee') { true }
+ expect(Dir).to receive(:exist?).with(File.expand_path('../../../../ee', __dir__)) { true }
is_expected.to be_truthy
end
it "returns false if ee doesn't exist" do
stub_env('CI_PROJECT_NAME', nil)
- expect(Dir).to receive(:exist?).with('../../ee') { false }
+ expect(Dir).to receive(:exist?).with(File.expand_path('../../../../ee', __dir__)) { false }
is_expected.to be_falsy
end
@@ -217,9 +225,17 @@ RSpec.describe Gitlab::Danger::Helper do
'ee/spec/foo' | [:backend]
'ee/spec/foo/bar' | [:backend]
+ 'spec/features/foo' | [:test]
+ 'ee/spec/features/foo' | [:test]
+ 'spec/support/shared_examples/features/foo' | [:test]
+ 'ee/spec/support/shared_examples/features/foo' | [:test]
+ 'spec/support/shared_contexts/features/foo' | [:test]
+ 'ee/spec/support/shared_contexts/features/foo' | [:test]
+ 'spec/support/helpers/features/foo' | [:test]
+ 'ee/spec/support/helpers/features/foo' | [:test]
+
'generator_templates/foo' | [:backend]
'vendor/languages.yml' | [:backend]
- 'vendor/licenses.csv' | [:backend]
'file_hooks/examples/' | [:backend]
'Gemfile' | [:backend]
@@ -242,6 +258,7 @@ RSpec.describe Gitlab::Danger::Helper do
'.editorconfig' | [:engineering_productivity]
'tooling/overcommit/foo' | [:engineering_productivity]
'.codeclimate.yml' | [:engineering_productivity]
+ '.gitlab/CODEOWNERS' | [:engineering_productivity]
'lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml' | [:backend]
@@ -295,9 +312,13 @@ RSpec.describe Gitlab::Danger::Helper do
context 'having specific changes' do
it 'has database and backend categories' do
- allow(fake_git).to receive(:diff_for_file).with('usage_data.rb') { double(:diff, patch: "+ count(User.active)") }
+ changed_files = ['usage_data.rb', 'lib/gitlab/usage_data.rb', 'ee/lib/ee/gitlab/usage_data.rb']
- expect(helper.categories_for_file('usage_data.rb')).to eq([:database, :backend])
+ changed_files.each do |file|
+ allow(fake_git).to receive(:diff_for_file).with(file) { double(:diff, patch: "+ count(User.active)") }
+
+ expect(helper.categories_for_file(file)).to eq([:database, :backend])
+ end
end
it 'has backend category' do
@@ -311,6 +332,13 @@ RSpec.describe Gitlab::Danger::Helper do
expect(helper.categories_for_file('user.rb')).to eq([:backend])
end
+
+ it 'has backend category for files that are not usage_data.rb' do
+ changed_file = 'usage_data/topology.rb'
+ allow(fake_git).to receive(:diff_for_file).with(changed_file) { double(:diff, patch: "+ count(User.active)") }
+
+ expect(helper.categories_for_file(changed_file)).to eq([:backend])
+ end
end
end
diff --git a/spec/lib/gitlab/danger/roulette_spec.rb b/spec/lib/gitlab/danger/roulette_spec.rb
index 676edca2459..b471e17e2e7 100644
--- a/spec/lib/gitlab/danger/roulette_spec.rb
+++ b/spec/lib/gitlab/danger/roulette_spec.rb
@@ -1,6 +1,5 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
require 'webmock/rspec'
require 'timecop'
@@ -11,102 +10,99 @@ RSpec.describe Gitlab::Danger::Roulette do
Timecop.freeze(Time.utc(2020, 06, 22, 10)) { example.run }
end
+ let(:backend_available) { true }
+ let(:backend_tz_offset_hours) { 2.0 }
let(:backend_maintainer) do
- {
- username: 'backend-maintainer',
- name: 'Backend maintainer',
- role: 'Backend engineer',
- projects: { 'gitlab' => 'maintainer backend' },
- available: true,
- tz_offset_hours: 2.0
- }
+ Gitlab::Danger::Teammate.new(
+ 'username' => 'backend-maintainer',
+ 'name' => 'Backend maintainer',
+ 'role' => 'Backend engineer',
+ 'projects' => { 'gitlab' => 'maintainer backend' },
+ 'available' => backend_available,
+ 'tz_offset_hours' => backend_tz_offset_hours
+ )
end
+
let(:frontend_reviewer) do
- {
- username: 'frontend-reviewer',
- name: 'Frontend reviewer',
- role: 'Frontend engineer',
- projects: { 'gitlab' => 'reviewer frontend' },
- available: true,
- tz_offset_hours: 2.0
- }
+ Gitlab::Danger::Teammate.new(
+ 'username' => 'frontend-reviewer',
+ 'name' => 'Frontend reviewer',
+ 'role' => 'Frontend engineer',
+ 'projects' => { 'gitlab' => 'reviewer frontend' },
+ 'available' => true,
+ 'tz_offset_hours' => 2.0
+ )
end
+
let(:frontend_maintainer) do
- {
- username: 'frontend-maintainer',
- name: 'Frontend maintainer',
- role: 'Frontend engineer',
- projects: { 'gitlab' => "maintainer frontend" },
- available: true,
- tz_offset_hours: 2.0
- }
+ Gitlab::Danger::Teammate.new(
+ 'username' => 'frontend-maintainer',
+ 'name' => 'Frontend maintainer',
+ 'role' => 'Frontend engineer',
+ 'projects' => { 'gitlab' => "maintainer frontend" },
+ 'available' => true,
+ 'tz_offset_hours' => 2.0
+ )
end
+
let(:software_engineer_in_test) do
- {
- username: 'software-engineer-in-test',
- name: 'Software Engineer in Test',
- role: 'Software Engineer in Test, Create:Source Code',
- projects: {
- 'gitlab' => 'reviewer qa',
- 'gitlab-qa' => 'maintainer'
- },
- available: true,
- tz_offset_hours: 2.0
- }
+ Gitlab::Danger::Teammate.new(
+ 'username' => 'software-engineer-in-test',
+ 'name' => 'Software Engineer in Test',
+ 'role' => 'Software Engineer in Test, Create:Source Code',
+ 'projects' => { 'gitlab' => 'reviewer qa', 'gitlab-qa' => 'maintainer' },
+ 'available' => true,
+ 'tz_offset_hours' => 2.0
+ )
end
+
let(:engineering_productivity_reviewer) do
- {
- username: 'eng-prod-reviewer',
- name: 'EP engineer',
- role: 'Engineering Productivity',
- projects: { 'gitlab' => 'reviewer backend' },
- available: true,
- tz_offset_hours: 2.0
- }
+ Gitlab::Danger::Teammate.new(
+ 'username' => 'eng-prod-reviewer',
+ 'name' => 'EP engineer',
+ 'role' => 'Engineering Productivity',
+ 'projects' => { 'gitlab' => 'reviewer backend' },
+ 'available' => true,
+ 'tz_offset_hours' => 2.0
+ )
end
let(:teammate_json) do
[
- backend_maintainer,
- frontend_maintainer,
- frontend_reviewer,
- software_engineer_in_test,
- engineering_productivity_reviewer
+ backend_maintainer.to_h,
+ frontend_maintainer.to_h,
+ frontend_reviewer.to_h,
+ software_engineer_in_test.to_h,
+ engineering_productivity_reviewer.to_h
].to_json
end
subject(:roulette) { Object.new.extend(described_class) }
- def matching_teammate(person)
- satisfy do |teammate|
- teammate.username == person[:username] &&
- teammate.name == person[:name] &&
- teammate.role == person[:role] &&
- teammate.projects == person[:projects]
- end
- end
-
- def matching_spin(category, reviewer: { username: nil }, maintainer: { username: nil }, optional: nil)
- satisfy do |spin|
- bool = spin.category == category
- bool &&= spin.reviewer&.username == reviewer[:username]
-
- bool &&=
- if maintainer
- spin.maintainer&.username == maintainer[:username]
- else
- spin.maintainer.nil?
- end
-
- bool && spin.optional_role == optional
+ describe 'Spin#==' do
+ it 'compares Spin attributes' do
+ spin1 = described_class::Spin.new(:backend, frontend_reviewer, frontend_maintainer, false, false)
+ spin2 = described_class::Spin.new(:backend, frontend_reviewer, frontend_maintainer, false, false)
+ spin3 = described_class::Spin.new(:backend, frontend_reviewer, frontend_maintainer, false, true)
+ spin4 = described_class::Spin.new(:backend, frontend_reviewer, frontend_maintainer, true, false)
+ spin5 = described_class::Spin.new(:backend, frontend_reviewer, backend_maintainer, false, false)
+ spin6 = described_class::Spin.new(:backend, backend_maintainer, frontend_maintainer, false, false)
+ spin7 = described_class::Spin.new(:frontend, frontend_reviewer, frontend_maintainer, false, false)
+
+ expect(spin1).to eq(spin2)
+ expect(spin1).not_to eq(spin3)
+ expect(spin1).not_to eq(spin4)
+ expect(spin1).not_to eq(spin5)
+ expect(spin1).not_to eq(spin6)
+ expect(spin1).not_to eq(spin7)
end
end
describe '#spin' do
let!(:project) { 'gitlab' }
- let!(:branch_name) { 'a-branch' }
+ let!(:mr_source_branch) { 'a-branch' }
let!(:mr_labels) { ['backend', 'devops::create'] }
- let!(:author) { Gitlab::Danger::Teammate.new('username' => 'filipa') }
+ let!(:author) { Gitlab::Danger::Teammate.new('username' => 'johndoe') }
let(:timezone_experiment) { false }
let(:spins) do
# Stub the request at the latest time so that we can modify the raw data, e.g. available fields.
@@ -114,12 +110,13 @@ RSpec.describe Gitlab::Danger::Roulette do
.stub_request(:get, described_class::ROULETTE_DATA_URL)
.to_return(body: teammate_json)
- subject.spin(project, categories, branch_name, timezone_experiment: timezone_experiment)
+ subject.spin(project, categories, timezone_experiment: timezone_experiment)
end
before do
- allow(subject).to receive_message_chain(:gitlab, :mr_author).and_return(author.username)
- allow(subject).to receive_message_chain(:gitlab, :mr_labels).and_return(mr_labels)
+ allow(subject).to receive(:mr_author_username).and_return(author.username)
+ allow(subject).to receive(:mr_labels).and_return(mr_labels)
+ allow(subject).to receive(:mr_source_branch).and_return(mr_source_branch)
end
context 'when timezone_experiment == false' do
@@ -127,16 +124,16 @@ RSpec.describe Gitlab::Danger::Roulette do
let(:categories) { [:backend] }
it 'assigns backend reviewer and maintainer' do
- expect(spins).to contain_exactly(matching_spin(:backend, reviewer: engineering_productivity_reviewer, maintainer: backend_maintainer))
+ expect(spins[0].reviewer).to eq(engineering_productivity_reviewer)
+ expect(spins[0].maintainer).to eq(backend_maintainer)
+ expect(spins).to eq([described_class::Spin.new(:backend, engineering_productivity_reviewer, backend_maintainer, false, false)])
end
context 'when teammate is not available' do
- before do
- backend_maintainer[:available] = false
- end
+ let(:backend_available) { false }
it 'assigns backend reviewer and no maintainer' do
- expect(spins).to contain_exactly(matching_spin(:backend, reviewer: engineering_productivity_reviewer, maintainer: nil))
+ expect(spins).to eq([described_class::Spin.new(:backend, engineering_productivity_reviewer, nil, false, false)])
end
end
end
@@ -145,7 +142,7 @@ RSpec.describe Gitlab::Danger::Roulette do
let(:categories) { [:frontend] }
it 'assigns frontend reviewer and maintainer' do
- expect(spins).to contain_exactly(matching_spin(:frontend, reviewer: frontend_reviewer, maintainer: frontend_maintainer))
+ expect(spins).to eq([described_class::Spin.new(:frontend, frontend_reviewer, frontend_maintainer, false, false)])
end
end
@@ -153,7 +150,7 @@ RSpec.describe Gitlab::Danger::Roulette do
let(:categories) { [:qa] }
it 'assigns QA reviewer' do
- expect(spins).to contain_exactly(matching_spin(:qa, reviewer: software_engineer_in_test))
+ expect(spins).to eq([described_class::Spin.new(:qa, software_engineer_in_test, nil, false, false)])
end
end
@@ -161,7 +158,7 @@ RSpec.describe Gitlab::Danger::Roulette do
let(:categories) { [:engineering_productivity] }
it 'assigns Engineering Productivity reviewer and fallback to backend maintainer' do
- expect(spins).to contain_exactly(matching_spin(:engineering_productivity, reviewer: engineering_productivity_reviewer, maintainer: backend_maintainer))
+ expect(spins).to eq([described_class::Spin.new(:engineering_productivity, engineering_productivity_reviewer, backend_maintainer, false, false)])
end
end
@@ -169,7 +166,7 @@ RSpec.describe Gitlab::Danger::Roulette do
let(:categories) { [:test] }
it 'assigns corresponding SET' do
- expect(spins).to contain_exactly(matching_spin(:test, reviewer: software_engineer_in_test))
+ expect(spins).to eq([described_class::Spin.new(:test, software_engineer_in_test, nil, :maintainer, false)])
end
end
end
@@ -181,16 +178,14 @@ RSpec.describe Gitlab::Danger::Roulette do
let(:categories) { [:backend] }
it 'assigns backend reviewer and maintainer' do
- expect(spins).to contain_exactly(matching_spin(:backend, reviewer: engineering_productivity_reviewer, maintainer: backend_maintainer))
+ expect(spins).to eq([described_class::Spin.new(:backend, engineering_productivity_reviewer, backend_maintainer, false, true)])
end
context 'when teammate is not in a good timezone' do
- before do
- backend_maintainer[:tz_offset_hours] = 5.0
- end
+ let(:backend_tz_offset_hours) { 5.0 }
it 'assigns backend reviewer and no maintainer' do
- expect(spins).to contain_exactly(matching_spin(:backend, reviewer: engineering_productivity_reviewer, maintainer: nil))
+ expect(spins).to eq([described_class::Spin.new(:backend, engineering_productivity_reviewer, nil, false, true)])
end
end
end
@@ -203,22 +198,33 @@ RSpec.describe Gitlab::Danger::Roulette do
end
it 'assigns backend reviewer and maintainer' do
- expect(spins).to contain_exactly(matching_spin(:backend, reviewer: engineering_productivity_reviewer, maintainer: backend_maintainer))
+ expect(spins).to eq([described_class::Spin.new(:backend, engineering_productivity_reviewer, backend_maintainer, false, false)])
end
context 'when teammate is not in a good timezone' do
- before do
- backend_maintainer[:tz_offset_hours] = 5.0
- end
+ let(:backend_tz_offset_hours) { 5.0 }
it 'assigns backend reviewer and maintainer' do
- expect(spins).to contain_exactly(matching_spin(:backend, reviewer: engineering_productivity_reviewer, maintainer: backend_maintainer))
+ expect(spins).to eq([described_class::Spin.new(:backend, engineering_productivity_reviewer, backend_maintainer, false, false)])
end
end
end
end
end
+ RSpec::Matchers.define :match_teammates do |expected|
+ match do |actual|
+ expected.each do |expected_person|
+ actual_person_found = actual.find { |actual_person| actual_person.name == expected_person.username }
+
+ actual_person_found &&
+ actual_person_found.name == expected_person.name &&
+ actual_person_found.role == expected_person.role &&
+ actual_person_found.projects == expected_person.projects
+ end
+ end
+ end
+
describe '#team' do
subject(:team) { roulette.team }
@@ -254,15 +260,13 @@ RSpec.describe Gitlab::Danger::Roulette do
end
it 'returns an array of teammates' do
- expected_teammates = [
- matching_teammate(backend_maintainer),
- matching_teammate(frontend_reviewer),
- matching_teammate(frontend_maintainer),
- matching_teammate(software_engineer_in_test),
- matching_teammate(engineering_productivity_reviewer)
- ]
-
- is_expected.to contain_exactly(*expected_teammates)
+ is_expected.to match_teammates([
+ backend_maintainer,
+ frontend_reviewer,
+ frontend_maintainer,
+ software_engineer_in_test,
+ engineering_productivity_reviewer
+ ])
end
it 'memoizes the result' do
@@ -281,7 +285,9 @@ RSpec.describe Gitlab::Danger::Roulette do
end
it 'filters team by project_name' do
- is_expected.to contain_exactly(matching_teammate(software_engineer_in_test))
+ is_expected.to match_teammates([
+ software_engineer_in_test
+ ])
end
end
@@ -289,32 +295,35 @@ RSpec.describe Gitlab::Danger::Roulette do
let(:person_tz_offset_hours) { 0.0 }
let(:person1) do
Gitlab::Danger::Teammate.new(
- 'username' => 'rymai',
+ 'username' => 'user1',
'available' => true,
'tz_offset_hours' => person_tz_offset_hours
)
end
+
let(:person2) do
Gitlab::Danger::Teammate.new(
- 'username' => 'godfat',
+ 'username' => 'user2',
'available' => true,
'tz_offset_hours' => person_tz_offset_hours)
end
+
let(:author) do
Gitlab::Danger::Teammate.new(
- 'username' => 'filipa',
+ 'username' => 'johndoe',
'available' => true,
'tz_offset_hours' => 0.0)
end
+
let(:unavailable) do
Gitlab::Danger::Teammate.new(
- 'username' => 'jacopo-beschi',
+ 'username' => 'janedoe',
'available' => false,
'tz_offset_hours' => 0.0)
end
before do
- allow(subject).to receive_message_chain(:gitlab, :mr_author).and_return(author.username)
+ allow(subject).to receive(:mr_author_username).and_return(author.username)
end
(-4..4).each do |utc_offset|
@@ -328,7 +337,7 @@ RSpec.describe Gitlab::Danger::Roulette do
selected = subject.spin_for_person(persons, random: Random.new, timezone_experiment: timezone_experiment)
- expect(selected.username).to be_in(persons.map(&:username))
+ expect(persons.map(&:username)).to include(selected.username)
end
end
end
@@ -349,7 +358,7 @@ RSpec.describe Gitlab::Danger::Roulette do
if timezone_experiment
expect(selected).to be_nil
else
- expect(selected.username).to be_in(persons.map(&:username))
+ expect(persons.map(&:username)).to include(selected.username)
end
end
end
diff --git a/spec/lib/gitlab/danger/teammate_spec.rb b/spec/lib/gitlab/danger/teammate_spec.rb
index a0540a9fbf5..12819614fab 100644
--- a/spec/lib/gitlab/danger/teammate_spec.rb
+++ b/spec/lib/gitlab/danger/teammate_spec.rb
@@ -1,7 +1,5 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
-
require 'timecop'
require 'rspec-parameterized'
@@ -10,24 +8,45 @@ require 'gitlab/danger/teammate'
RSpec.describe Gitlab::Danger::Teammate do
using RSpec::Parameterized::TableSyntax
- subject { described_class.new(options.stringify_keys) }
+ subject { described_class.new(options) }
let(:tz_offset_hours) { 2.0 }
let(:options) do
{
- username: 'luigi',
- projects: projects,
- role: role,
- markdown_name: '[Luigi](https://gitlab.com/luigi) (`@luigi`)',
- tz_offset_hours: tz_offset_hours
+ 'username' => 'luigi',
+ 'projects' => projects,
+ 'role' => role,
+ 'markdown_name' => '[Luigi](https://gitlab.com/luigi) (`@luigi`)',
+ 'tz_offset_hours' => tz_offset_hours
}
end
+
let(:capabilities) { ['reviewer backend'] }
let(:projects) { { project => capabilities } }
let(:role) { 'Engineer, Manage' }
let(:labels) { [] }
let(:project) { double }
+ describe '#==' do
+ it 'compares Teammate username' do
+ joe1 = described_class.new('username' => 'joe', 'projects' => projects)
+ joe2 = described_class.new('username' => 'joe', 'projects' => [])
+ jane1 = described_class.new('username' => 'jane', 'projects' => projects)
+ jane2 = described_class.new('username' => 'jane', 'projects' => [])
+
+ expect(joe1).to eq(joe2)
+ expect(jane1).to eq(jane2)
+ expect(jane1).not_to eq(nil)
+ expect(described_class.new('username' => nil)).not_to eq(nil)
+ end
+ end
+
+ describe '#to_h' do
+ it 'returns the given options' do
+ expect(subject.to_h).to eq(options)
+ end
+ end
+
context 'when having multiple capabilities' do
let(:capabilities) { ['reviewer backend', 'maintainer frontend', 'trainee_maintainer qa'] }
@@ -153,44 +172,44 @@ RSpec.describe Gitlab::Danger::Teammate do
describe '#markdown_name' do
context 'when timezone_experiment == false' do
it 'returns markdown name as-is' do
- expect(subject.markdown_name).to eq(options[:markdown_name])
- expect(subject.markdown_name(timezone_experiment: false)).to eq(options[:markdown_name])
+ expect(subject.markdown_name).to eq(options['markdown_name'])
+ expect(subject.markdown_name(timezone_experiment: false)).to eq(options['markdown_name'])
end
end
context 'when timezone_experiment == true' do
it 'returns markdown name with timezone info' do
- expect(subject.markdown_name(timezone_experiment: true)).to eq("#{options[:markdown_name]} (UTC+2)")
+ expect(subject.markdown_name(timezone_experiment: true)).to eq("#{options['markdown_name']} (UTC+2)")
end
context 'when offset is 1.5' do
let(:tz_offset_hours) { 1.5 }
it 'returns markdown name with timezone info, not truncated' do
- expect(subject.markdown_name(timezone_experiment: true)).to eq("#{options[:markdown_name]} (UTC+1.5)")
+ expect(subject.markdown_name(timezone_experiment: true)).to eq("#{options['markdown_name']} (UTC+1.5)")
end
end
context 'when author is given' do
where(:tz_offset_hours, :author_offset, :diff_text) do
-12 | -10 | "2 hours behind `@mario`"
- -10 | -12 | "2 hours ahead `@mario`"
+ -10 | -12 | "2 hours ahead of `@mario`"
-10 | 2 | "12 hours behind `@mario`"
2 | 4 | "2 hours behind `@mario`"
- 4 | 2 | "2 hours ahead `@mario`"
+ 4 | 2 | "2 hours ahead of `@mario`"
2 | 3 | "1 hour behind `@mario`"
- 3 | 2 | "1 hour ahead `@mario`"
+ 3 | 2 | "1 hour ahead of `@mario`"
2 | 2 | "same timezone as `@mario`"
end
with_them do
it 'returns markdown name with timezone info' do
- author = described_class.new(options.merge(username: 'mario', tz_offset_hours: author_offset).stringify_keys)
+ author = described_class.new(options.merge('username' => 'mario', 'tz_offset_hours' => author_offset))
floored_offset_hours = subject.__send__(:floored_offset_hours)
utc_offset = floored_offset_hours >= 0 ? "+#{floored_offset_hours}" : floored_offset_hours
- expect(subject.markdown_name(timezone_experiment: true, author: author)).to eq("#{options[:markdown_name]} (UTC#{utc_offset}, #{diff_text})")
+ expect(subject.markdown_name(timezone_experiment: true, author: author)).to eq("#{options['markdown_name']} (UTC#{utc_offset}, #{diff_text})")
end
end
end
diff --git a/spec/lib/gitlab/database/batch_count_spec.rb b/spec/lib/gitlab/database/batch_count_spec.rb
index 656501dbf56..1f84a915cdc 100644
--- a/spec/lib/gitlab/database/batch_count_spec.rb
+++ b/spec/lib/gitlab/database/batch_count_spec.rb
@@ -13,11 +13,34 @@ RSpec.describe Gitlab::Database::BatchCount do
let(:another_user) { create(:user) }
before do
- create_list(:issue, 3, author: user )
- create_list(:issue, 2, author: another_user )
+ create_list(:issue, 3, author: user)
+ create_list(:issue, 2, author: another_user)
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(in_transaction)
end
+ shared_examples 'disallowed configurations' do |method|
+ it 'returns fallback if start is bigger than finish' do
+ expect(described_class.public_send(method, *args, start: 1, finish: 0)).to eq(fallback)
+ end
+
+ it 'returns fallback if loops more than allowed' do
+ large_finish = Gitlab::Database::BatchCounter::MAX_ALLOWED_LOOPS * default_batch_size + 1
+ expect(described_class.public_send(method, *args, start: 1, finish: large_finish)).to eq(fallback)
+ end
+
+ it 'returns fallback if batch size is less than min required' do
+ expect(described_class.public_send(method, *args, batch_size: small_batch_size)).to eq(fallback)
+ end
+ end
+
+ shared_examples 'when a transaction is open' do
+ let(:in_transaction) { true }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error('BatchCount can not be run inside a transaction')
+ end
+ end
+
describe '#batch_count' do
it 'counts table' do
expect(described_class.batch_count(model)).to eq(5)
@@ -53,38 +76,32 @@ RSpec.describe Gitlab::Database::BatchCount do
[1, 2, 4, 5, 6].each { |i| expect(described_class.batch_count(model, batch_size: i)).to eq(5) }
end
- it 'will raise an error if distinct count is requested' do
- expect do
- described_class.batch_count(model.distinct(column))
- end.to raise_error 'Use distinct count for optimized distinct counting'
+ it 'counts with a start and finish' do
+ expect(described_class.batch_count(model, start: model.minimum(:id), finish: model.maximum(:id))).to eq(5)
end
- context 'in a transaction' do
- let(:in_transaction) { true }
+ it "defaults the batch size to #{Gitlab::Database::BatchCounter::DEFAULT_BATCH_SIZE}" do
+ min_id = model.minimum(:id)
- it 'cannot count' do
- expect do
- described_class.batch_count(model)
- end.to raise_error 'BatchCount can not be run inside a transaction'
+ expect_next_instance_of(Gitlab::Database::BatchCounter) do |batch_counter|
+ expect(batch_counter).to receive(:batch_fetch).with(min_id, Gitlab::Database::BatchCounter::DEFAULT_BATCH_SIZE + min_id, :itself).once.and_call_original
end
- end
- it 'counts with a start and finish' do
- expect(described_class.batch_count(model, start: model.minimum(:id), finish: model.maximum(:id))).to eq(5)
+ described_class.batch_count(model)
end
- context 'disallowed configurations' do
- it 'returns fallback if start is bigger than finish' do
- expect(described_class.batch_count(model, start: 1, finish: 0)).to eq(fallback)
- end
+ it_behaves_like 'when a transaction is open' do
+ subject { described_class.batch_count(model) }
+ end
- it 'returns fallback if loops more than allowed' do
- large_finish = Gitlab::Database::BatchCounter::MAX_ALLOWED_LOOPS * Gitlab::Database::BatchCounter::DEFAULT_BATCH_SIZE + 1
- expect(described_class.batch_count(model, start: 1, finish: large_finish)).to eq(fallback)
+ context 'disallowed_configurations' do
+ include_examples 'disallowed configurations', :batch_count do
+ let(:args) { [Issue] }
+ let(:default_batch_size) { Gitlab::Database::BatchCounter::DEFAULT_BATCH_SIZE }
end
- it 'returns fallback if batch size is less than min required' do
- expect(described_class.batch_count(model, batch_size: small_batch_size)).to eq(fallback)
+ it 'raises an error if distinct count is requested' do
+ expect { described_class.batch_count(model.distinct(column)) }.to raise_error 'Use distinct count for optimized distinct counting'
end
end
end
@@ -128,18 +145,24 @@ RSpec.describe Gitlab::Database::BatchCount do
expect(described_class.batch_distinct_count(model, column, start: User.minimum(:id), finish: User.maximum(:id))).to eq(2)
end
- context 'disallowed configurations' do
- it 'returns fallback if start is bigger than finish' do
- expect(described_class.batch_distinct_count(model, column, start: 1, finish: 0)).to eq(fallback)
- end
+ it "defaults the batch size to #{Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE}" do
+ min_id = model.minimum(:id)
- it 'returns fallback if loops more than allowed' do
- large_finish = Gitlab::Database::BatchCounter::MAX_ALLOWED_LOOPS * Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE + 1
- expect(described_class.batch_distinct_count(model, column, start: 1, finish: large_finish)).to eq(fallback)
+ expect_next_instance_of(Gitlab::Database::BatchCounter) do |batch_counter|
+ expect(batch_counter).to receive(:batch_fetch).with(min_id, Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE + min_id, :distinct).once.and_call_original
end
- it 'returns fallback if batch size is less than min required' do
- expect(described_class.batch_distinct_count(model, column, batch_size: small_batch_size)).to eq(fallback)
+ described_class.batch_distinct_count(model)
+ end
+
+ it_behaves_like 'when a transaction is open' do
+ subject { described_class.batch_distinct_count(model, column) }
+ end
+
+ context 'disallowed configurations' do
+ include_examples 'disallowed configurations', :batch_distinct_count do
+ let(:args) { [model, column] }
+ let(:default_batch_size) { Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE }
end
it 'will raise an error if distinct count with the :id column is requested' do
@@ -149,4 +172,55 @@ RSpec.describe Gitlab::Database::BatchCount do
end
end
end
+
+ describe '#batch_sum' do
+ let(:column) { :weight }
+
+ before do
+ Issue.first.update_attribute(column, 3)
+ Issue.last.update_attribute(column, 4)
+ end
+
+ it 'returns the sum of values in the given column' do
+ expect(described_class.batch_sum(model, column)).to eq(7)
+ end
+
+ it 'works when given an Arel column' do
+ expect(described_class.batch_sum(model, model.arel_table[column])).to eq(7)
+ end
+
+ it 'works with a batch size of 50K' do
+ expect(described_class.batch_sum(model, column, batch_size: 50_000)).to eq(7)
+ end
+
+ it 'works with start and finish provided' do
+ expect(described_class.batch_sum(model, column, start: model.minimum(:id), finish: model.maximum(:id))).to eq(7)
+ end
+
+ it 'returns the same result regardless of batch size' do
+ stub_const('Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE', 0)
+
+ (1..(model.count + 1)).each { |i| expect(described_class.batch_sum(model, column, batch_size: i)).to eq(7) }
+ end
+
+ it "defaults the batch size to #{Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE}" do
+ min_id = model.minimum(:id)
+
+ expect_next_instance_of(Gitlab::Database::BatchCounter) do |batch_counter|
+ expect(batch_counter).to receive(:batch_fetch).with(min_id, Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE + min_id, :itself).once.and_call_original
+ end
+
+ described_class.batch_sum(model, column)
+ end
+
+ it_behaves_like 'when a transaction is open' do
+ subject { described_class.batch_sum(model, column) }
+ end
+
+ it_behaves_like 'disallowed configurations', :batch_sum do
+ let(:args) { [model, column] }
+ let(:default_batch_size) { Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE }
+ let(:small_batch_size) { Gitlab::Database::BatchCounter::DEFAULT_SUM_BATCH_SIZE - 1 }
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb b/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb
index e488bf5ee4c..c2028f8c238 100644
--- a/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb
+++ b/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb
@@ -23,6 +23,7 @@ RSpec.describe Gitlab::Database::Count::TablesampleCountStrategy do
Namespace => threshold + 1
}
end
+
let(:threshold) { Gitlab::Database::Count::TablesampleCountStrategy::EXACT_COUNT_THRESHOLD }
before do
diff --git a/spec/lib/gitlab/database/custom_structure_spec.rb b/spec/lib/gitlab/database/custom_structure_spec.rb
index beda9df3684..b3bdca0acdd 100644
--- a/spec/lib/gitlab/database/custom_structure_spec.rb
+++ b/spec/lib/gitlab/database/custom_structure_spec.rb
@@ -32,6 +32,7 @@ RSpec.describe Gitlab::Database::CustomStructure do
Gitlab::Database::PartitioningMigrationHelpers::PartitionedForeignKey.create(
cascade_delete: true, from_table: 'issues', from_column: 'project_id', to_table: 'projects', to_column: 'id')
end
+
let!(:second_fk) do
Gitlab::Database::PartitioningMigrationHelpers::PartitionedForeignKey.create(
cascade_delete: false, from_table: 'issues', from_column: 'moved_to_id', to_table: 'issues', to_column: 'id')
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 48e1c97e97f..4b7f371b25a 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -712,7 +712,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:add_not_null_constraint).with(:users, :new)
expect(model).to receive(:execute).with("UPDATE \"users\" SET \"new\" = cast_to_jsonb_with_default(\"users\".\"id\") WHERE \"users\".\"id\" >= #{user.id}")
expect(model).to receive(:execute).with("DROP TRIGGER IF EXISTS #{trigger_name}\nON \"users\"\n")
- expect(model).to receive(:execute).with("CREATE TRIGGER #{trigger_name}\nBEFORE INSERT OR UPDATE\nON \"users\"\nFOR EACH ROW\nEXECUTE PROCEDURE #{trigger_name}()\n")
+ expect(model).to receive(:execute).with("CREATE TRIGGER #{trigger_name}\nBEFORE INSERT OR UPDATE\nON \"users\"\nFOR EACH ROW\nEXECUTE FUNCTION #{trigger_name}()\n")
expect(model).to receive(:execute).with("CREATE OR REPLACE FUNCTION #{trigger_name}()\nRETURNS trigger AS\n$BODY$\nBEGIN\n NEW.\"new\" := NEW.\"id\";\n RETURN NEW;\nEND;\n$BODY$\nLANGUAGE 'plpgsql'\nVOLATILE\n")
model.rename_column_concurrently(:users, :id, :new, type_cast_function: 'cast_to_jsonb_with_default')
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
index efa9c83b2d2..7d88c17c9b3 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/foreign_key_helpers_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::ForeignKeyHelpers
let(:model) do
ActiveRecord::Migration.new.extend(described_class)
end
+
let_it_be(:connection) { ActiveRecord::Base.connection }
let(:referenced_table) { :issues }
let(:function_name) { '_test_partitioned_foreign_keys_function' }
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
index 9b24ab7cad4..86f79b213ae 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
@@ -315,42 +315,13 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
expect(model.find(second_todo.id).attributes).to eq(second_todo.attributes)
end
end
-
- describe 'copying historic data to the partitioned table' do
- let(:source_table) { 'todos' }
- let(:migration_class) { '::Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable' }
- let(:sub_batch_size) { described_class::SUB_BATCH_SIZE }
- let(:pause_seconds) { described_class::PAUSE_SECONDS }
- let!(:first_id) { create(:todo).id }
- let!(:second_id) { create(:todo).id }
- let!(:third_id) { create(:todo).id }
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
-
- expect(migration).to receive(:queue_background_migration_jobs_by_range_at_intervals).and_call_original
- end
-
- it 'enqueues jobs to copy each batch of data' do
- Sidekiq::Testing.fake! do
- migration.partition_table_by_date source_table, partition_column, min_date: min_date, max_date: max_date
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
-
- first_job_arguments = [first_id, second_id, source_table, partitioned_table, 'id']
- expect(BackgroundMigrationWorker.jobs[0]['args']).to eq([migration_class, first_job_arguments])
-
- second_job_arguments = [third_id, third_id, source_table, partitioned_table, 'id']
- expect(BackgroundMigrationWorker.jobs[1]['args']).to eq([migration_class, second_job_arguments])
- end
- end
- end
end
describe '#drop_partitioned_table_for' do
let(:expected_tables) do
%w[000000 201912 202001 202002].map { |suffix| "#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.#{partitioned_table}_#{suffix}" }.unshift(partitioned_table)
end
+
let(:migration_class) { 'Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable' }
context 'when the table is not allowed' do
@@ -390,16 +361,85 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
expect(connection.table_exists?(table)).to be(false)
end
end
+ end
+
+ describe '#enqueue_partitioning_data_migration' do
+ context 'when the table is not allowed' do
+ let(:source_table) { :this_table_is_not_allowed }
+
+ it 'raises an error' do
+ expect(migration).to receive(:assert_table_is_allowed).with(source_table).and_call_original
+
+ expect do
+ migration.enqueue_partitioning_data_migration source_table
+ end.to raise_error(/#{source_table} is not allowed for use/)
+ end
+ end
- context 'cleaning up background migration tracking records' do
+ context 'when run inside a transaction block' do
+ it 'raises an error' do
+ expect(migration).to receive(:transaction_open?).and_return(true)
+
+ expect do
+ migration.enqueue_partitioning_data_migration source_table
+ end.to raise_error(/can not be run inside a transaction/)
+ end
+ end
+
+ context 'when records exist in the source table' do
+ let(:source_table) { 'todos' }
+ let(:migration_class) { '::Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable' }
+ let(:sub_batch_size) { described_class::SUB_BATCH_SIZE }
+ let(:pause_seconds) { described_class::PAUSE_SECONDS }
+ let!(:first_id) { create(:todo).id }
+ let!(:second_id) { create(:todo).id }
+ let!(:third_id) { create(:todo).id }
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+
+ expect(migration).to receive(:queue_background_migration_jobs_by_range_at_intervals).and_call_original
+ end
+
+ it 'enqueues jobs to copy each batch of data' do
+ migration.partition_table_by_date source_table, partition_column, min_date: min_date, max_date: max_date
+
+ Sidekiq::Testing.fake! do
+ migration.enqueue_partitioning_data_migration source_table
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+
+ first_job_arguments = [first_id, second_id, source_table, partitioned_table, 'id']
+ expect(BackgroundMigrationWorker.jobs[0]['args']).to eq([migration_class, first_job_arguments])
+
+ second_job_arguments = [third_id, third_id, source_table, partitioned_table, 'id']
+ expect(BackgroundMigrationWorker.jobs[1]['args']).to eq([migration_class, second_job_arguments])
+ end
+ end
+ end
+ end
+
+ describe '#cleanup_partitioning_data_migration' do
+ context 'when the table is not allowed' do
+ let(:source_table) { :this_table_is_not_allowed }
+
+ it 'raises an error' do
+ expect(migration).to receive(:assert_table_is_allowed).with(source_table).and_call_original
+
+ expect do
+ migration.cleanup_partitioning_data_migration source_table
+ end.to raise_error(/#{source_table} is not allowed for use/)
+ end
+ end
+
+ context 'when tracking records exist in the background_migration_jobs table' do
+ let(:migration_class) { 'Gitlab::Database::PartitioningMigrationHelpers::BackfillPartitionedTable' }
let!(:job1) { create(:background_migration_job, class_name: migration_class, arguments: [1, 10, source_table]) }
let!(:job2) { create(:background_migration_job, class_name: migration_class, arguments: [11, 20, source_table]) }
let!(:job3) { create(:background_migration_job, class_name: migration_class, arguments: [1, 10, 'other_table']) }
- it 'deletes any tracking records from the background_migration_jobs table' do
- migration.partition_table_by_date source_table, partition_column, min_date: min_date, max_date: max_date
-
- expect { migration.drop_partitioned_table_for(source_table) }
+ it 'deletes those pertaining to the given table' do
+ expect { migration.cleanup_partitioning_data_migration(source_table) }
.to change { ::Gitlab::Database::BackgroundMigrationJob.count }.from(3).to(1)
remaining_record = ::Gitlab::Database::BackgroundMigrationJob.first
diff --git a/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb b/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb
new file mode 100644
index 00000000000..ca9f4af9187
--- /dev/null
+++ b/spec/lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::PostgresqlAdapter::DumpSchemaVersionsMixin do
+ let(:schema_migration) { double('schema_migration', all_versions: versions) }
+
+ let(:instance) do
+ Object.new.extend(described_class)
+ end
+
+ before do
+ allow(instance).to receive(:schema_migration).and_return(schema_migration)
+ end
+
+ context 'when version files exist' do
+ let(:versions) { %w(5 2 1000 200 4 93 2) }
+
+ it 'touches version files' do
+ expect(Gitlab::Database::SchemaVersionFiles).to receive(:touch_all).with(versions)
+
+ instance.dump_schema_information
+ end
+ end
+
+ context 'when version files do not exist' do
+ let(:versions) { [] }
+
+ it 'does not touch version files' do
+ expect(Gitlab::Database::SchemaVersionFiles).not_to receive(:touch_all)
+
+ instance.dump_schema_information
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb b/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
index 8b3a0ceb804..ea8c9e2cfd7 100644
--- a/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
+++ b/spec/lib/gitlab/database/postgresql_adapter/force_disconnectable_mixin_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe Gitlab::Database::PostgresqlAdapter::ForceDisconnectableMixin do
end
end
end
+
let(:config) { Rails.application.config_for(:database).merge(pool: 1) }
let(:pool) { model.establish_connection(config) }
diff --git a/spec/lib/gitlab/database/postgresql_adapter/schema_versions_copy_mixin_spec.rb b/spec/lib/gitlab/database/postgresql_adapter/schema_versions_copy_mixin_spec.rb
deleted file mode 100644
index c6333e4a4dc..00000000000
--- a/spec/lib/gitlab/database/postgresql_adapter/schema_versions_copy_mixin_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::PostgresqlAdapter::SchemaVersionsCopyMixin do
- let(:schema_migration) { double('schem_migration', table_name: table_name, all_versions: versions) }
- let(:versions) { %w(5 2 1000 200 4 93 2) }
- let(:table_name) { "schema_migrations" }
-
- let(:instance) do
- Object.new.extend(described_class)
- end
-
- before do
- allow(instance).to receive(:schema_migration).and_return(schema_migration)
- allow(instance).to receive(:quote_table_name).with(table_name).and_return("\"#{table_name}\"")
- end
-
- subject { instance.dump_schema_information }
-
- it 'uses COPY FROM STDIN' do
- expect(subject.split("\n").first).to match(/COPY "schema_migrations" \(version\) FROM STDIN;/)
- end
-
- it 'contains a sorted list of versions by their numeric value' do
- version_lines = subject.split("\n")[1..-2].map(&:to_i)
-
- expect(version_lines).to eq(versions.map(&:to_i).sort)
- end
-
- it 'contains a end-of-data marker' do
- expect(subject).to end_with("\\.\n")
- end
-
- context 'with non-Integer versions' do
- let(:versions) { %w(5 2 4 abc) }
-
- it 'raises an error' do
- expect { subject }.to raise_error(/invalid value for Integer/)
- end
- end
-end
diff --git a/spec/lib/gitlab/database/schema_version_files_spec.rb b/spec/lib/gitlab/database/schema_version_files_spec.rb
new file mode 100644
index 00000000000..c3b3ae0a07f
--- /dev/null
+++ b/spec/lib/gitlab/database/schema_version_files_spec.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SchemaVersionFiles do
+ describe '.touch_all' do
+ let(:version1) { '20200123' }
+ let(:version2) { '20200410' }
+ let(:version3) { '20200602' }
+ let(:version4) { '20200809' }
+ let(:relative_schema_directory) { 'db/schema_migrations' }
+ let(:relative_migrate_directory) { 'db/migrate' }
+ let(:relative_post_migrate_directory) { 'db/post_migrate' }
+
+ it 'creates a file containing a checksum for each version with a matching migration' do
+ Dir.mktmpdir do |tmpdir|
+ schema_directory = Pathname.new(tmpdir).join(relative_schema_directory)
+ migrate_directory = Pathname.new(tmpdir).join(relative_migrate_directory)
+ post_migrate_directory = Pathname.new(tmpdir).join(relative_post_migrate_directory)
+
+ FileUtils.mkdir_p(migrate_directory)
+ FileUtils.mkdir_p(post_migrate_directory)
+ FileUtils.mkdir_p(schema_directory)
+
+ migration1_filepath = migrate_directory.join("#{version1}_migration.rb")
+ FileUtils.touch(migration1_filepath)
+
+ migration2_filepath = post_migrate_directory.join("#{version2}_post_migration.rb")
+ FileUtils.touch(migration2_filepath)
+
+ old_version_filepath = schema_directory.join('20200101')
+ FileUtils.touch(old_version_filepath)
+
+ expect(File.exist?(old_version_filepath)).to be(true)
+
+ allow(described_class).to receive(:schema_directory).and_return(schema_directory)
+ allow(described_class).to receive(:migration_directories).and_return([migrate_directory, post_migrate_directory])
+
+ described_class.touch_all([version1, version2, version3, version4])
+
+ expect(File.exist?(old_version_filepath)).to be(false)
+ [version1, version2].each do |version|
+ version_filepath = schema_directory.join(version)
+ expect(File.exist?(version_filepath)).to be(true)
+
+ hashed_value = Digest::SHA256.hexdigest(version)
+ expect(File.read(version_filepath)).to eq(hashed_value)
+ end
+
+ [version3, version4].each do |version|
+ version_filepath = schema_directory.join(version)
+ expect(File.exist?(version_filepath)).to be(false)
+ end
+ end
+ end
+ end
+
+ describe '.load_all' do
+ let(:connection) { double('connection') }
+
+ before do
+ allow(described_class).to receive(:connection).and_return(connection)
+ allow(described_class).to receive(:find_version_filenames).and_return(filenames)
+ end
+
+ context 'when there are no version files' do
+ let(:filenames) { [] }
+
+ it 'does nothing' do
+ expect(connection).not_to receive(:quote_string)
+ expect(connection).not_to receive(:execute)
+
+ described_class.load_all
+ end
+ end
+
+ context 'when there are version files' do
+ let(:filenames) { %w[123 456 789] }
+
+ it 'inserts the missing versions into schema_migrations' do
+ filenames.each do |filename|
+ expect(connection).to receive(:quote_string).with(filename).and_return(filename)
+ end
+
+ expect(connection).to receive(:execute).with(<<~SQL)
+ INSERT INTO schema_migrations (version)
+ VALUES ('123'),('456'),('789')
+ ON CONFLICT DO NOTHING
+ SQL
+
+ described_class.load_all
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/similarity_score_spec.rb b/spec/lib/gitlab/database/similarity_score_spec.rb
new file mode 100644
index 00000000000..e36a4f610e1
--- /dev/null
+++ b/spec/lib/gitlab/database/similarity_score_spec.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::SimilarityScore do
+ let(:search) { '' }
+ let(:query_result) { ActiveRecord::Base.connection.execute(query).to_a }
+
+ let(:query) do
+ # In memory query, with the id as the tie breaker.
+ <<-SQL
+ SELECT *, #{order_expression} AS similarity
+ FROM (
+ VALUES (1, 'Git', 'git', 'git source code mirror. this is a publish-only repository.'),
+ (2, 'GitLab Runner', 'gitlab-runner', 'official helm chart for the gitlab runner'),
+ (3, 'gitaly', 'gitaly', 'gitaly is a git rpc service for handling all the git calls made by gitlab'),
+ (4, 'GitLab', 'gitlab', 'gitlab is an open source end-to-end software development platform with built-in version control'),
+ (5, 'Gitlab Danger', 'gitlab-danger', 'this gem provides common dangerfile and plugins for gitlab projects'),
+ (6, 'different', 'same', 'same'),
+ (7, 'same', 'different', 'same'),
+ (8, 'gitlab-styles', 'gitlab-styles', 'gitlab style guides and shared style configs.'),
+ (9, '🔒 gitaly', 'gitaly-sec', 'security mirror for gitaly')
+ ) tbl (id, name, path, descrption) ORDER BY #{order_expression} DESC, id DESC;
+ SQL
+ end
+
+ let(:order_expression) do
+ Gitlab::Database::SimilarityScore.build_expression(search: search, rules: [{ column: Arel.sql('path') }]).to_sql
+ end
+
+ subject { query_result.take(3).map { |row| row['path'] } }
+
+ context 'when passing empty values' do
+ context 'when search is nil' do
+ let(:search) { nil }
+
+ it 'orders by a constant 0 value' do
+ expect(query).to include('ORDER BY CAST(0 AS integer) DESC')
+ end
+ end
+
+ context 'when rules are empty' do
+ let(:search) { 'text' }
+
+ let(:order_expression) do
+ Gitlab::Database::SimilarityScore.build_expression(search: search, rules: []).to_sql
+ end
+
+ it 'orders by a constant 0 value' do
+ expect(query).to include('ORDER BY CAST(0 AS integer) DESC')
+ end
+ end
+ end
+
+ context 'when similarity scoring based on the path' do
+ let(:search) { 'git' }
+
+ context 'when searching for `git`' do
+ let(:search) { 'git' }
+
+ it { expect(subject).to eq(%w[git gitlab gitaly]) }
+ end
+
+ context 'when searching for `gitlab`' do
+ let(:search) { 'gitlab' }
+
+ it { expect(subject).to eq(%w[gitlab gitlab-styles gitlab-danger]) }
+ end
+
+ context 'when searching for something unrelated' do
+ let(:search) { 'xyz' }
+
+ it 'results have 0 similarity score' do
+ expect(query_result.map { |row| row['similarity'] }).to all(eq(0))
+ end
+ end
+ end
+
+ describe 'score multiplier' do
+ let(:order_expression) do
+ Gitlab::Database::SimilarityScore.build_expression(search: search, rules: [
+ { column: Arel.sql('path'), multiplier: 1 },
+ { column: Arel.sql('name'), multiplier: 0.8 }
+ ]).to_sql
+ end
+
+ let(:search) { 'different' }
+
+ it 'ranks `path` matches higher' do
+ expect(subject).to eq(%w[different same gitlab-danger])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb
index 70cbddbb7b7..2cc6e175500 100644
--- a/spec/lib/gitlab/database/with_lock_retries_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb
@@ -72,9 +72,14 @@ RSpec.describe Gitlab::Database::WithLockRetries do
lock_attempts = 0
lock_acquired = false
- expect_any_instance_of(Gitlab::Database::WithLockRetries).to receive(:sleep).exactly(retry_count - 1).times # we don't sleep in the last iteration
-
- allow_any_instance_of(Gitlab::Database::WithLockRetries).to receive(:run_block_with_transaction).and_wrap_original do |method|
+ # the actual number of attempts to run_block_with_transaction can never exceed the number of
+ # timings_configurations, so here we limit the retry_count if it exceeds that value
+ #
+ # also, there is no call to sleep after the final attempt, which is why it will always be one less
+ expected_runs_with_timeout = [retry_count, timing_configuration.size].min
+ expect(subject).to receive(:sleep).exactly(expected_runs_with_timeout - 1).times
+
+ expect(subject).to receive(:run_block_with_transaction).exactly(expected_runs_with_timeout).times.and_wrap_original do |method|
lock_fiber.resume if lock_attempts == retry_count
method.call
@@ -114,6 +119,33 @@ RSpec.describe Gitlab::Database::WithLockRetries do
end
end
+ context 'after the retries, when requested to raise an error' do
+ let(:expected_attempts_with_timeout) { timing_configuration.size }
+ let(:retry_count) { timing_configuration.size + 1 }
+
+ it 'raises an error instead of waiting indefinitely for the lock' do
+ lock_attempts = 0
+ lock_acquired = false
+
+ expect(subject).to receive(:sleep).exactly(expected_attempts_with_timeout - 1).times
+ expect(subject).to receive(:run_block_with_transaction).exactly(expected_attempts_with_timeout).times.and_call_original
+
+ expect do
+ subject.run(raise_on_exhaustion: true) do
+ lock_attempts += 1
+
+ ActiveRecord::Base.transaction do
+ ActiveRecord::Base.connection.execute("LOCK TABLE #{Project.table_name} in exclusive mode")
+ lock_acquired = true
+ end
+ end
+ end.to raise_error(described_class::AttemptsExhaustedError)
+
+ expect(lock_attempts).to eq(retry_count - 1)
+ expect(lock_acquired).to eq(false)
+ end
+ end
+
context 'when statement timeout is reached' do
it 'raises QueryCanceled error' do
lock_acquired = false
diff --git a/spec/lib/gitlab/diff/file_collection/commit_spec.rb b/spec/lib/gitlab/diff/file_collection/commit_spec.rb
index 6c109e96a53..7773604a638 100644
--- a/spec/lib/gitlab/diff/file_collection/commit_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection/commit_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Gitlab::Diff::FileCollection::Commit do
let(:collection_default_args) do
{ diff_options: {} }
end
+
let(:diffable) { project.commit }
let(:stub_path) { 'bar/branch-test.txt' }
end
diff --git a/spec/lib/gitlab/diff/file_collection/compare_spec.rb b/spec/lib/gitlab/diff/file_collection/compare_spec.rb
index 168d58e584e..dda4513a3a1 100644
--- a/spec/lib/gitlab/diff/file_collection/compare_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection/compare_spec.rb
@@ -23,6 +23,7 @@ RSpec.describe Gitlab::Diff::FileCollection::Compare do
diff_refs: diffable.diff_refs
}
end
+
let(:diffable) { Compare.new(raw_compare, project) }
let(:stub_path) { '.gitignore' }
end
diff --git a/spec/lib/gitlab/diff/file_collection/merge_request_diff_spec.rb b/spec/lib/gitlab/diff/file_collection/merge_request_diff_spec.rb
index a5e714c90fc..429e552278d 100644
--- a/spec/lib/gitlab/diff/file_collection/merge_request_diff_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection/merge_request_diff_spec.rb
@@ -42,6 +42,7 @@ RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiff do
let(:collection_default_args) do
{ diff_options: {} }
end
+
let(:diffable) { merge_request.merge_request_diff }
let(:stub_path) { '.gitignore' }
end
diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb
index 94abfcf079a..78be89c449b 100644
--- a/spec/lib/gitlab/diff/file_spec.rb
+++ b/spec/lib/gitlab/diff/file_spec.rb
@@ -800,6 +800,7 @@ RSpec.describe Gitlab::Diff::File do
let(:project) do
create(:project, :custom_repo, files: {})
end
+
let(:branch_name) { 'master' }
context 'when empty file is created' do
@@ -842,6 +843,7 @@ RSpec.describe Gitlab::Diff::File do
let(:project) do
create(:project, :custom_repo, files: {})
end
+
let(:branch_name) { 'master' }
context 'when empty file is created' do
diff --git a/spec/lib/gitlab/diff/highlight_cache_spec.rb b/spec/lib/gitlab/diff/highlight_cache_spec.rb
index 80cc10051c4..7e926f86096 100644
--- a/spec/lib/gitlab/diff/highlight_cache_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_cache_spec.rb
@@ -173,57 +173,32 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
fallback_diff_refs: diffs.fallback_diff_refs)
end
- context "feature flag :gzip_diff_cache disabled" do
- before do
- stub_feature_flags(gzip_diff_cache: true)
- end
-
- it "uses ActiveSupport::Gzip when reading from the cache" do
- expect(ActiveSupport::Gzip).to receive(:decompress).at_least(:once).and_call_original
-
- cache.write_if_empty
- cache.decorate(diff_file)
- end
+ it "uses ActiveSupport::Gzip when reading from the cache" do
+ expect(ActiveSupport::Gzip).to receive(:decompress).at_least(:once).and_call_original
- it "uses ActiveSupport::Gzip to compress data when writing to cache" do
- expect(ActiveSupport::Gzip).to receive(:compress).and_call_original
-
- cache.send(:write_to_redis_hash, diff_hash)
- end
+ cache.write_if_empty
+ cache.decorate(diff_file)
end
- context "feature flag :gzip_diff_cache disabled" do
- before do
- stub_feature_flags(gzip_diff_cache: false)
- end
-
- it "doesn't use ActiveSupport::Gzip when reading from the cache" do
- expect(ActiveSupport::Gzip).not_to receive(:decompress)
-
- cache.write_if_empty
- cache.decorate(diff_file)
- end
-
- it "doesn't use ActiveSupport::Gzip to compress data when writing to cache" do
- expect(ActiveSupport::Gzip).not_to receive(:compress)
+ it "uses ActiveSupport::Gzip to compress data when writing to cache" do
+ expect(ActiveSupport::Gzip).to receive(:compress).and_call_original
- expect { cache.send(:write_to_redis_hash, diff_hash) }
- .to change { Gitlab::Redis::Cache.with { |r| r.hgetall(cache_key) } }
- end
+ cache.send(:write_to_redis_hash, diff_hash)
end
end
describe 'metrics' do
- it 'defines :gitlab_redis_diff_caching_memory_usage_bytes histogram' do
- expect(described_class).to respond_to(:gitlab_redis_diff_caching_memory_usage_bytes)
- end
+ let(:transaction) { Gitlab::Metrics::WebTransaction.new({} ) }
- it 'defines :gitlab_redis_diff_caching_hit' do
- expect(described_class).to respond_to(:gitlab_redis_diff_caching_hit)
+ before do
+ allow(cache).to receive(:current_transaction).and_return(transaction)
end
- it 'defines :gitlab_redis_diff_caching_miss' do
- expect(described_class).to respond_to(:gitlab_redis_diff_caching_miss)
+ it 'observes :gitlab_redis_diff_caching_memory_usage_bytes' do
+ expect(transaction)
+ .to receive(:observe).with(:gitlab_redis_diff_caching_memory_usage_bytes, a_kind_of(Numeric))
+
+ cache.write_if_empty
end
end
end
diff --git a/spec/lib/gitlab/diff/position_collection_spec.rb b/spec/lib/gitlab/diff/position_collection_spec.rb
index b1478c774f1..d2bb82983c0 100644
--- a/spec/lib/gitlab/diff/position_collection_spec.rb
+++ b/spec/lib/gitlab/diff/position_collection_spec.rb
@@ -8,9 +8,11 @@ RSpec.describe Gitlab::Diff::PositionCollection do
let(:text_position) do
build(:text_diff_position, :added, diff_refs: diff_refs)
end
+
let(:folded_text_position) do
build(:text_diff_position, diff_refs: diff_refs, old_line: 1, new_line: 1)
end
+
let(:image_position) do
build(:image_diff_position, diff_refs: diff_refs)
end
diff --git a/spec/lib/gitlab/diff/stats_cache_spec.rb b/spec/lib/gitlab/diff/stats_cache_spec.rb
index 8bf510c0bdd..5b01c1913bf 100644
--- a/spec/lib/gitlab/diff/stats_cache_spec.rb
+++ b/spec/lib/gitlab/diff/stats_cache_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Gitlab::Diff::StatsCache, :use_clean_rails_memory_store_caching d
let(:cachable_key) { 'cachecachecache' }
let(:stat) { Gitaly::DiffStats.new(path: 'temp', additions: 10, deletions: 15) }
let(:stats) { Gitlab::Git::DiffStatsCollection.new([stat]) }
+ let(:serialized_stats) { stats.map(&:to_h).as_json }
let(:cache) { Rails.cache }
describe '#read' do
@@ -38,7 +39,7 @@ RSpec.describe Gitlab::Diff::StatsCache, :use_clean_rails_memory_store_caching d
it 'writes the stats' do
expect(cache)
.to receive(:write)
- .with(key, stats.as_json, expires_in: described_class::EXPIRATION)
+ .with(key, serialized_stats, expires_in: described_class::EXPIRATION)
.and_call_original
stats_cache.write_if_empty(stats)
@@ -53,7 +54,7 @@ RSpec.describe Gitlab::Diff::StatsCache, :use_clean_rails_memory_store_caching d
it 'writes the stats' do
expect(cache)
.to receive(:write)
- .with(key, stats.as_json, expires_in: described_class::EXPIRATION)
+ .with(key, serialized_stats, expires_in: described_class::EXPIRATION)
.and_call_original
stats_cache.write_if_empty(stats)
@@ -81,4 +82,28 @@ RSpec.describe Gitlab::Diff::StatsCache, :use_clean_rails_memory_store_caching d
stats_cache.clear
end
end
+
+ it 'VERSION is set' do
+ expect(described_class::VERSION).to be_present
+ end
+
+ context 'with multiple cache versions' do
+ before do
+ stats_cache.write_if_empty(stats)
+ end
+
+ it 'does not read from a stale cache' do
+ expect(stats_cache.read.to_json).to eq(stats.to_json)
+
+ stub_const('Gitlab::Diff::StatsCache::VERSION', '1.0.new-new-thing')
+
+ stats_cache = described_class.new(cachable_key: cachable_key)
+
+ expect(stats_cache.read).to be_nil
+
+ stats_cache.write_if_empty(stats)
+
+ expect(stats_cache.read.to_json).to eq(stats.to_json)
+ end
+ end
end
diff --git a/spec/lib/gitlab/diff/suggestion_spec.rb b/spec/lib/gitlab/diff/suggestion_spec.rb
index 5a5c5555818..40779faf917 100644
--- a/spec/lib/gitlab/diff/suggestion_spec.rb
+++ b/spec/lib/gitlab/diff/suggestion_spec.rb
@@ -31,9 +31,11 @@ RSpec.describe Gitlab::Diff::Suggestion do
new_line: 9,
diff_refs: merge_request.diff_refs)
end
+
let(:diff_file) do
position.diff_file(project.repository)
end
+
let(:text) { "# parsed suggestion content\n# with comments" }
def blob_lines_data(from_line, to_line)
diff --git a/spec/lib/gitlab/email/message/repository_push_spec.rb b/spec/lib/gitlab/email/message/repository_push_spec.rb
index 10586527239..6b1f03e0385 100644
--- a/spec/lib/gitlab/email/message/repository_push_spec.rb
+++ b/spec/lib/gitlab/email/message/repository_push_spec.rb
@@ -18,10 +18,12 @@ RSpec.describe Gitlab::Email::Message::RepositoryPush do
{ author_id: author.id, ref: 'master', action: :push, compare: compare,
send_from_committer_email: true }
end
+
let(:raw_compare) do
Gitlab::Git::Compare.new(project.repository.raw_repository,
sample_image_commit.id, sample_commit.id)
end
+
let(:compare) do
Compare.decorate(raw_compare, project)
end
diff --git a/spec/lib/gitlab/encoding_helper_spec.rb b/spec/lib/gitlab/encoding_helper_spec.rb
index 5394c04c6ba..0ea974921bc 100644
--- a/spec/lib/gitlab/encoding_helper_spec.rb
+++ b/spec/lib/gitlab/encoding_helper_spec.rb
@@ -138,6 +138,7 @@ RSpec.describe Gitlab::EncodingHelper do
let(:test_string) do
"refs/heads/FixSymbolsTitleDropdown".encode("ASCII-8BIT")
end
+
let(:expected_string) do
"refs/heads/FixSymbolsTitleDropdown".encode("UTF-8")
end
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
index a6408aeae8b..2de5e1e20d6 100644
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ b/spec/lib/gitlab/experimentation_spec.rb
@@ -233,6 +233,68 @@ RSpec.describe Gitlab::Experimentation do
end
end
end
+
+ describe '#record_experiment_user' do
+ let(:user) { build(:user) }
+
+ context 'when the experiment is enabled' do
+ before do
+ stub_experiment(test_experiment: true)
+ allow(controller).to receive(:current_user).and_return(user)
+ end
+
+ context 'the user is part of the experimental group' do
+ before do
+ stub_experiment_for_user(test_experiment: true)
+ end
+
+ it 'calls add_user on the Experiment model' do
+ expect(::Experiment).to receive(:add_user).with(:test_experiment, :experimental, user)
+
+ controller.record_experiment_user(:test_experiment)
+ end
+ end
+
+ context 'the user is part of the control group' do
+ before do
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:experiment_enabled?).with(:test_experiment).and_return(false)
+ end
+ end
+
+ it 'calls add_user on the Experiment model' do
+ expect(::Experiment).to receive(:add_user).with(:test_experiment, :control, user)
+
+ controller.record_experiment_user(:test_experiment)
+ end
+ end
+ end
+
+ context 'when the experiment is disabled' do
+ before do
+ stub_experiment(test_experiment: false)
+ allow(controller).to receive(:current_user).and_return(user)
+ end
+
+ it 'does not call add_user on the Experiment model' do
+ expect(::Experiment).not_to receive(:add_user)
+
+ controller.record_experiment_user(:test_experiment)
+ end
+ end
+
+ context 'when there is no current_user' do
+ before do
+ stub_experiment(test_experiment: true)
+ end
+
+ it 'does not call add_user on the Experiment model' do
+ expect(::Experiment).not_to receive(:add_user)
+
+ controller.record_experiment_user(:test_experiment)
+ end
+ end
+ end
end
describe '.enabled?' do
diff --git a/spec/lib/gitlab/external_authorization/client_spec.rb b/spec/lib/gitlab/external_authorization/client_spec.rb
index 473b57441fa..c08da382486 100644
--- a/spec/lib/gitlab/external_authorization/client_spec.rb
+++ b/spec/lib/gitlab/external_authorization/client_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::ExternalAuthorization::Client do
describe '#request_access' do
it 'performs requests to the configured endpoint' do
- expect(Excon).to receive(:post).with(dummy_url, any_args)
+ expect(Gitlab::HTTP).to receive(:post).with(dummy_url, any_args)
client.request_access
end
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::ExternalAuthorization::Client do
project_classification_label: 'dummy_label',
identities: []
}.to_json
- expect(Excon).to receive(:post)
+ expect(Gitlab::HTTP).to receive(:post)
.with(dummy_url, hash_including(body: expected_body))
client.request_access
@@ -36,7 +36,7 @@ RSpec.describe Gitlab::ExternalAuthorization::Client do
external_authorization_service_timeout: 3
)
- expect(Excon).to receive(:post).with(dummy_url,
+ expect(Gitlab::HTTP).to receive(:post).with(dummy_url,
hash_including(
connect_timeout: 3,
read_timeout: 3,
@@ -58,25 +58,33 @@ RSpec.describe Gitlab::ExternalAuthorization::Client do
client_key_pass: 'open sesame'
}
- expect(Excon).to receive(:post).with(dummy_url, hash_including(expected_params))
+ expect(Gitlab::HTTP).to receive(:post).with(dummy_url, hash_including(expected_params))
client.request_access
end
it 'returns an expected response' do
- expect(Excon).to receive(:post)
+ expect(Gitlab::HTTP).to receive(:post)
expect(client.request_access)
.to be_kind_of(::Gitlab::ExternalAuthorization::Response)
end
it 'wraps exceptions if the request fails' do
- expect(Excon).to receive(:post) { raise Excon::Error.new('the request broke') }
+ expect(Gitlab::HTTP).to receive(:post) { raise Gitlab::HTTP::BlockedUrlError.new('the request broke') }
expect { client.request_access }
.to raise_error(::Gitlab::ExternalAuthorization::RequestFailed)
end
+ it 'passes local request setting to Gitlab::HTTP' do
+ stub_application_setting(allow_local_requests_from_system_hooks: false)
+
+ expect(Gitlab::HTTP).to receive(:post).with(dummy_url, hash_including(allow_local_requests: false))
+
+ client.request_access
+ end
+
describe 'for ldap users' do
let(:user) do
create(:omniauth_user,
@@ -92,7 +100,7 @@ RSpec.describe Gitlab::ExternalAuthorization::Client do
identities: [{ provider: 'ldapprovider', extern_uid: 'external id' }],
user_ldap_dn: 'external id'
}.to_json
- expect(Excon).to receive(:post)
+ expect(Gitlab::HTTP).to receive(:post)
.with(dummy_url, hash_including(body: expected_body))
client.request_access
@@ -115,7 +123,7 @@ RSpec.describe Gitlab::ExternalAuthorization::Client do
{ provider: 'facebook', extern_uid: 'facebook_external_id' }
]
}.to_json
- expect(Excon).to receive(:post)
+ expect(Gitlab::HTTP).to receive(:post)
.with(dummy_url, hash_including(body: expected_body))
client.request_access
diff --git a/spec/lib/gitlab/external_authorization/response_spec.rb b/spec/lib/gitlab/external_authorization/response_spec.rb
index 11f83feb76f..716196e0aa2 100644
--- a/spec/lib/gitlab/external_authorization/response_spec.rb
+++ b/spec/lib/gitlab/external_authorization/response_spec.rb
@@ -3,21 +3,21 @@
require 'spec_helper'
RSpec.describe Gitlab::ExternalAuthorization::Response do
- let(:excon_response) { double }
+ let(:http_response) { double }
- subject(:response) { described_class.new(excon_response) }
+ subject(:response) { described_class.new(http_response) }
describe '#valid?' do
it 'is valid for 200, 401, and 403 responses' do
- [200, 401, 403].each do |status|
- allow(excon_response).to receive(:status).and_return(status)
+ [200, 401, 403].each do |code|
+ allow(http_response).to receive(:code).and_return(code)
expect(response).to be_valid
end
end
it "is invalid for other statuses" do
- expect(excon_response).to receive(:status).and_return(500)
+ expect(http_response).to receive(:code).and_return(500)
expect(response).not_to be_valid
end
@@ -25,13 +25,13 @@ RSpec.describe Gitlab::ExternalAuthorization::Response do
describe '#reason' do
it 'returns a reason if it was included in the response body' do
- expect(excon_response).to receive(:body).and_return({ reason: 'Not authorized' }.to_json)
+ expect(http_response).to receive(:body).and_return({ reason: 'Not authorized' }.to_json)
expect(response.reason).to eq('Not authorized')
end
it 'returns nil when there was no body' do
- expect(excon_response).to receive(:body).and_return('')
+ expect(http_response).to receive(:body).and_return('')
expect(response.reason).to eq(nil)
end
@@ -39,14 +39,14 @@ RSpec.describe Gitlab::ExternalAuthorization::Response do
describe '#successful?' do
it 'is `true` if the status is 200' do
- allow(excon_response).to receive(:status).and_return(200)
+ allow(http_response).to receive(:code).and_return(200)
expect(response).to be_successful
end
it 'is `false` if the status is 401 or 403' do
- [401, 403].each do |status|
- allow(excon_response).to receive(:status).and_return(status)
+ [401, 403].each do |code|
+ allow(http_response).to receive(:code).and_return(code)
expect(response).not_to be_successful
end
diff --git a/spec/lib/gitlab/file_finder_spec.rb b/spec/lib/gitlab/file_finder_spec.rb
index 36fb4c48fb2..8d6df62b3f6 100644
--- a/spec/lib/gitlab/file_finder_spec.rb
+++ b/spec/lib/gitlab/file_finder_spec.rb
@@ -13,22 +13,44 @@ RSpec.describe Gitlab::FileFinder do
let(:expected_file_by_content) { 'CHANGELOG' }
end
- it 'filters by filename' do
- results = subject.find('files filename:wm.svg')
+ context 'with inclusive filters' do
+ it 'filters by filename' do
+ results = subject.find('files filename:wm.svg')
- expect(results.count).to eq(1)
- end
+ expect(results.count).to eq(1)
+ end
+
+ it 'filters by path' do
+ results = subject.find('white path:images')
- it 'filters by path' do
- results = subject.find('white path:images')
+ expect(results.count).to eq(1)
+ end
- expect(results.count).to eq(1)
+ it 'filters by extension' do
+ results = subject.find('files extension:md')
+
+ expect(results.count).to eq(4)
+ end
end
- it 'filters by extension' do
- results = subject.find('files extension:svg')
+ context 'with exclusive filters' do
+ it 'filters by filename' do
+ results = subject.find('files -filename:wm.svg')
+
+ expect(results.count).to eq(26)
+ end
+
+ it 'filters by path' do
+ results = subject.find('white -path:images')
+
+ expect(results.count).to eq(4)
+ end
+
+ it 'filters by extension' do
+ results = subject.find('files -extension:md')
- expect(results.count).to eq(1)
+ expect(results.count).to eq(23)
+ end
end
it 'does not cause N+1 query' do
diff --git a/spec/lib/gitlab/fogbugz_import/importer_spec.rb b/spec/lib/gitlab/fogbugz_import/importer_spec.rb
index d2be3e3f6b1..eb0c4da6ce3 100644
--- a/spec/lib/gitlab/fogbugz_import/importer_spec.rb
+++ b/spec/lib/gitlab/fogbugz_import/importer_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Gitlab::FogbugzImport::Importer do
path: 'vim',
raw_data: '')
end
+
let(:import_data) { { 'repo' => repo } }
let(:credentials) do
{
diff --git a/spec/lib/gitlab/gfm/reference_rewriter_spec.rb b/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
index 1c9004262c5..f4875aa0ebc 100644
--- a/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
+++ b/spec/lib/gitlab/gfm/reference_rewriter_spec.rb
@@ -110,6 +110,20 @@ RSpec.describe Gitlab::Gfm::ReferenceRewriter do
end
end
+ context 'when description contains a local reference' do
+ let(:local_issue) { create(:issue, project: old_project) }
+ let(:text) { "See ##{local_issue.iid}" }
+
+ it { is_expected.to eq("See #{old_project.path}##{local_issue.iid}") }
+ end
+
+ context 'when description contains a cross reference' do
+ let(:merge_request) { create(:merge_request) }
+ let(:text) { "See #{merge_request.project.full_path}!#{merge_request.iid}" }
+
+ it { is_expected.to eq(text) }
+ end
+
context 'with a commit' do
let(:old_project) { create(:project, :repository, name: 'old-project', group: group) }
let(:commit) { old_project.commit }
diff --git a/spec/lib/gitlab/git/branch_spec.rb b/spec/lib/gitlab/git/branch_spec.rb
index bac1b4c57f9..e1bcf4aeeb1 100644
--- a/spec/lib/gitlab/git/branch_spec.rb
+++ b/spec/lib/gitlab/git/branch_spec.rb
@@ -84,6 +84,7 @@ RSpec.describe Gitlab::Git::Branch, :seed_helper do
parents: parents
}
end
+
let(:stale_sha) { Timecop.freeze(Gitlab::Git::Branch::STALE_BRANCH_THRESHOLD.ago - 5.days) { create_commit } }
let(:active_sha) { Timecop.freeze(Gitlab::Git::Branch::STALE_BRANCH_THRESHOLD.ago + 5.days) { create_commit } }
let(:future_sha) { Timecop.freeze(100.days.since) { create_commit } }
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index 666b49f27f7..491437856d4 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Gitlab::Git::Commit, :seed_helper do
let(:rugged_repo) do
Rugged::Repository.new(File.join(TestEnv.repos_path, TEST_REPO_PATH))
end
+
let(:commit) { described_class.find(repository, SeedRepo::Commit::ID) }
let(:rugged_commit) { rugged_repo.lookup(SeedRepo::Commit::ID) }
diff --git a/spec/lib/gitlab/git/conflict/parser_spec.rb b/spec/lib/gitlab/git/conflict/parser_spec.rb
index 67bd48256ce..02b00f711b4 100644
--- a/spec/lib/gitlab/git/conflict/parser_spec.rb
+++ b/spec/lib/gitlab/git/conflict/parser_spec.rb
@@ -89,12 +89,15 @@ RSpec.describe Gitlab::Git::Conflict::Parser do
let(:lines) do
described_class.parse(text, our_path: 'files/ruby/regex.rb', their_path: 'files/ruby/regex.rb')
end
+
let(:old_line_numbers) do
lines.select { |line| line[:type] != 'new' }.map { |line| line[:line_old] }
end
+
let(:new_line_numbers) do
lines.select { |line| line[:type] != 'old' }.map { |line| line[:line_new] }
end
+
let(:line_indexes) { lines.map { |line| line[:line_obj_index] } }
it 'sets our lines as new lines' do
diff --git a/spec/lib/gitlab/git/diff_collection_spec.rb b/spec/lib/gitlab/git/diff_collection_spec.rb
index 6da07ce84a1..b202015464f 100644
--- a/spec/lib/gitlab/git/diff_collection_spec.rb
+++ b/spec/lib/gitlab/git/diff_collection_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
return enum_for(:each) unless block_given?
loop do
- break if @count.zero?
+ break if @count == 0
# It is critical to decrement before yielding. We may never reach the lines after 'yield'.
@count -= 1
diff --git a/spec/lib/gitlab/git/diff_spec.rb b/spec/lib/gitlab/git/diff_spec.rb
index 91688c31f5e..117c519e98d 100644
--- a/spec/lib/gitlab/git/diff_spec.rb
+++ b/spec/lib/gitlab/git/diff_spec.rb
@@ -78,6 +78,7 @@ EOT
patch: raw_patch
)
end
+
let(:diff) { described_class.new(gitaly_diff) }
context 'with a small diff' do
@@ -134,6 +135,7 @@ EOT
to_id: '8e5177d718c561d36efde08bad36b43687ee6bf0'
)
end
+
let(:diff) { described_class.new(commit_delta) }
it 'initializes the diff' do
diff --git a/spec/lib/gitlab/git/patches/collection_spec.rb b/spec/lib/gitlab/git/patches/collection_spec.rb
index eb92f4663c8..67a502242ea 100644
--- a/spec/lib/gitlab/git/patches/collection_spec.rb
+++ b/spec/lib/gitlab/git/patches/collection_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::Git::Patches::Collection do
let(:patch_content1) do
File.read(File.join(patches_folder, "0001-This-does-not-apply-to-the-feature-branch.patch"))
end
+
let(:patch_content2) do
File.read(File.join(patches_folder, "0001-A-commit-from-a-patch.patch"))
end
diff --git a/spec/lib/gitlab/git/patches/commit_patches_spec.rb b/spec/lib/gitlab/git/patches/commit_patches_spec.rb
index cd1e03a6de0..9ab0893eb55 100644
--- a/spec/lib/gitlab/git/patches/commit_patches_spec.rb
+++ b/spec/lib/gitlab/git/patches/commit_patches_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Gitlab::Git::Patches::CommitPatches do
Gitlab::Git::Patches::Collection.new([content_1, content_2])
end
+
let(:user) { build(:user) }
let(:branch_name) { 'branch-with-patches' }
let(:repository) { create(:project, :repository).repository }
diff --git a/spec/lib/gitlab/git/patches/patch_spec.rb b/spec/lib/gitlab/git/patches/patch_spec.rb
index 629f43d3636..6588b18d0ae 100644
--- a/spec/lib/gitlab/git/patches/patch_spec.rb
+++ b/spec/lib/gitlab/git/patches/patch_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::Git::Patches::Patch do
let(:patch_content) do
File.read(File.join(patches_folder, "0001-This-does-not-apply-to-the-feature-branch.patch"))
end
+
let(:patch) { described_class.new(patch_content) }
describe '#size' do
diff --git a/spec/lib/gitlab/git/pre_receive_error_spec.rb b/spec/lib/gitlab/git/pre_receive_error_spec.rb
index bf4530c8945..2ad27361c80 100644
--- a/spec/lib/gitlab/git/pre_receive_error_spec.rb
+++ b/spec/lib/gitlab/git/pre_receive_error_spec.rb
@@ -6,15 +6,27 @@ RSpec.describe Gitlab::Git::PreReceiveError do
Gitlab::Git::PreReceiveError::SAFE_MESSAGE_PREFIXES.each do |prefix|
context "error messages prefixed with #{prefix}" do
it 'accepts only errors lines with the prefix' do
- ex = described_class.new("#{prefix} Hello,\nworld!")
+ raw_message = "#{prefix} Hello,\nworld!"
+ ex = described_class.new(raw_message)
expect(ex.message).to eq('Hello,')
+ expect(ex.raw_message).to eq(raw_message)
end
it 'makes its message HTML-friendly' do
- ex = described_class.new("#{prefix} Hello,\n#{prefix} world!\n")
+ raw_message = "#{prefix} Hello,\n#{prefix} world!\n"
+ ex = described_class.new(raw_message)
expect(ex.message).to eq('Hello,<br>world!')
+ expect(ex.raw_message).to eq(raw_message)
+ end
+
+ it 'sanitizes the user message' do
+ raw_message = 'Raw message'
+ ex = described_class.new(raw_message, "#{prefix} User message")
+
+ expect(ex.raw_message).to eq(raw_message)
+ expect(ex.message).to eq('User message')
end
end
end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index e7f4573c95f..73eecd3401a 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -579,9 +579,11 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
let(:commit_with_old_name) do
Gitlab::Git::Commit.find(repository, @commit_with_old_name_id)
end
+
let(:commit_with_new_name) do
Gitlab::Git::Commit.find(repository, @commit_with_new_name_id)
end
+
let(:rename_commit) do
Gitlab::Git::Commit.find(repository, @rename_commit_id)
end
@@ -2178,6 +2180,7 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
let(:new_repository) do
Gitlab::Git::Repository.new('test_second_storage', TEST_REPO_PATH, '', 'group/project')
end
+
let(:new_repository_path) { File.join(TestEnv::SECOND_STORAGE_PATH, new_repository.relative_path) }
subject { new_repository.replicate(repository) }
diff --git a/spec/lib/gitlab/git_access_project_spec.rb b/spec/lib/gitlab/git_access_project_spec.rb
index 520300363c9..f80915b2be9 100644
--- a/spec/lib/gitlab/git_access_project_spec.rb
+++ b/spec/lib/gitlab/git_access_project_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::GitAccessProject do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository) }
+ let(:container) { project }
let(:actor) { user }
let(:project_path) { project.path }
let(:namespace_path) { project&.namespace&.path }
@@ -13,19 +14,32 @@ RSpec.describe Gitlab::GitAccessProject do
let(:changes) { Gitlab::GitAccess::ANY }
let(:push_access_check) { access.check('git-receive-pack', changes) }
let(:pull_access_check) { access.check('git-upload-pack', changes) }
+ let(:access) do
+ described_class.new(actor, container, protocol,
+ authentication_abilities: authentication_abilities,
+ repository_path: project_path, namespace_path: namespace_path)
+ end
+
+ describe '#check_namespace!' do
+ context 'when namespace is nil' do
+ let(:namespace_path) { nil }
+
+ it 'does not allow push and pull access' do
+ aggregate_failures do
+ expect { push_access_check }.to raise_namespace_not_found
+ expect { pull_access_check }.to raise_namespace_not_found
+ end
+ end
+ end
+ end
describe '#check_project_accessibility!' do
context 'when the project is nil' do
- let(:project) { nil }
+ let(:container) { nil }
let(:project_path) { "new-project" }
context 'when user is allowed to create project in namespace' do
let(:namespace_path) { user.namespace.path }
- let(:access) do
- described_class.new(actor, nil,
- protocol, authentication_abilities: authentication_abilities,
- repository_path: project_path, namespace_path: namespace_path)
- end
it 'blocks pull access with "not found"' do
expect { pull_access_check }.to raise_not_found
@@ -39,11 +53,6 @@ RSpec.describe Gitlab::GitAccessProject do
context 'when user is not allowed to create project in namespace' do
let(:user2) { create(:user) }
let(:namespace_path) { user2.namespace.path }
- let(:access) do
- described_class.new(actor, nil,
- protocol, authentication_abilities: authentication_abilities,
- repository_path: project_path, namespace_path: namespace_path)
- end
it 'blocks push and pull with "not found"' do
aggregate_failures do
@@ -56,22 +65,27 @@ RSpec.describe Gitlab::GitAccessProject do
end
describe '#ensure_project_on_push!' do
- let(:access) do
- described_class.new(actor, project,
- protocol, authentication_abilities: authentication_abilities,
- repository_path: project_path, namespace_path: namespace_path)
- end
-
before do
allow(access).to receive(:changes).and_return(changes)
end
+ shared_examples 'no project is created' do
+ let(:raise_specific_error) { raise_not_found }
+ let(:action) { push_access_check }
+
+ it 'does not create a new project' do
+ expect { action }
+ .to raise_specific_error
+ .and change { Project.count }.by(0)
+ end
+ end
+
context 'when push' do
let(:cmd) { 'git-receive-pack' }
context 'when project does not exist' do
let(:project_path) { "nonexistent" }
- let(:project) { nil }
+ let(:container) { nil }
context 'when changes is _any' do
let(:changes) { Gitlab::GitAccess::ANY }
@@ -82,8 +96,8 @@ RSpec.describe Gitlab::GitAccessProject do
context 'when user can create project in namespace' do
let(:namespace_path) { user.namespace.path }
- it 'creates a new project' do
- expect { access.send(:ensure_project_on_push!, cmd) }
+ it 'creates a new project in the correct namespace' do
+ expect { push_access_check }
.to change { Project.count }.by(1)
.and change { Project.where(namespace: user.namespace, name: project_path).count }.by(1)
end
@@ -93,9 +107,7 @@ RSpec.describe Gitlab::GitAccessProject do
let(:user2) { create(:user) }
let(:namespace_path) { user2.namespace.path }
- it 'does not create a new project' do
- expect { access.send(:ensure_project_on_push!, cmd) }.not_to change { Project.count }
- end
+ it_behaves_like 'no project is created'
end
end
@@ -105,8 +117,8 @@ RSpec.describe Gitlab::GitAccessProject do
context 'when user can create project in namespace' do
let(:namespace_path) { user.namespace.path }
- it 'does not create a new project' do
- expect { access.send(:ensure_project_on_push!, cmd) }.not_to change { Project.count }
+ it_behaves_like 'no project is created' do
+ let(:raise_specific_error) { raise_forbidden }
end
end
end
@@ -115,32 +127,26 @@ RSpec.describe Gitlab::GitAccessProject do
context 'when check contains actual changes' do
let(:changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/new_branch" }
- it 'does not create a new project' do
- expect { access.send(:ensure_project_on_push!, cmd) }.not_to change { Project.count }
- end
+ it_behaves_like 'no project is created'
end
end
context 'when project exists' do
let(:changes) { Gitlab::GitAccess::ANY }
- let!(:project) { create(:project) }
+ let!(:container) { project }
- it 'does not create a new project' do
- expect { access.send(:ensure_project_on_push!, cmd) }.not_to change { Project.count }
- end
+ it_behaves_like 'no project is created'
end
context 'when deploy key is used' do
let(:key) { create(:deploy_key, user: user) }
let(:actor) { key }
let(:project_path) { "nonexistent" }
- let(:project) { nil }
+ let(:container) { nil }
let(:namespace_path) { user.namespace.path }
let(:changes) { Gitlab::GitAccess::ANY }
- it 'does not create a new project' do
- expect { access.send(:ensure_project_on_push!, cmd) }.not_to change { Project.count }
- end
+ it_behaves_like 'no project is created'
end
end
@@ -151,10 +157,10 @@ RSpec.describe Gitlab::GitAccessProject do
context 'when project does not exist' do
let(:project_path) { "new-project" }
let(:namespace_path) { user.namespace.path }
- let(:project) { nil }
+ let(:container) { nil }
- it 'does not create a new project' do
- expect { access.send(:ensure_project_on_push!, cmd) }.not_to change { Project.count }
+ it_behaves_like 'no project is created' do
+ let(:action) { pull_access_check }
end
end
end
@@ -163,4 +169,12 @@ RSpec.describe Gitlab::GitAccessProject do
def raise_not_found
raise_error(Gitlab::GitAccess::NotFoundError, Gitlab::GitAccess::ERROR_MESSAGES[:project_not_found])
end
+
+ def raise_forbidden
+ raise_error(Gitlab::GitAccess::ForbiddenError)
+ end
+
+ def raise_namespace_not_found
+ raise_error(Gitlab::GitAccess::NotFoundError, described_class::ERROR_MESSAGES[:namespace_not_found])
+ end
end
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index 01691f87092..8153886a2ab 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -20,6 +20,18 @@ RSpec.describe Gitlab::GitAccess do
let(:push_access_check) { access.check('git-receive-pack', changes) }
let(:pull_access_check) { access.check('git-upload-pack', changes) }
+ let(:access_class) do
+ Class.new(described_class) do
+ def push_ability
+ :push_code
+ end
+
+ def download_ability
+ :download_code
+ end
+ end
+ end
+
describe '#check with single protocols allowed' do
def disable_protocol(protocol)
allow(Gitlab::ProtocolAccess).to receive(:allowed?).with(protocol).and_return(false)
@@ -58,7 +70,7 @@ RSpec.describe Gitlab::GitAccess do
it "doesn't block http pull" do
aggregate_failures do
- expect { pull_access_check }.not_to raise_forbidden('Git access over HTTP is not allowed')
+ expect { pull_access_check }.not_to raise_error
end
end
@@ -67,7 +79,7 @@ RSpec.describe Gitlab::GitAccess do
it "doesn't block http pull" do
aggregate_failures do
- expect { pull_access_check }.not_to raise_forbidden('Git access over HTTP is not allowed')
+ expect { pull_access_check }.not_to raise_error
end
end
end
@@ -75,33 +87,6 @@ RSpec.describe Gitlab::GitAccess do
end
end
- describe '#check_namespace!' do
- context 'when namespace exists' do
- before do
- project.add_maintainer(user)
- end
-
- it 'allows push and pull access' do
- aggregate_failures do
- expect { push_access_check }.not_to raise_error
- expect { pull_access_check }.not_to raise_error
- end
- end
- end
-
- context 'when namespace and project are nil' do
- let(:project) { nil }
- let(:namespace_path) { nil }
-
- it 'does not allow push and pull access' do
- aggregate_failures do
- expect { push_access_check }.to raise_namespace_not_found
- expect { pull_access_check }.to raise_namespace_not_found
- end
- end
- end
- end
-
describe '#check_project_accessibility!' do
context 'when the project exists' do
context 'when actor exists' do
@@ -464,7 +449,7 @@ RSpec.describe Gitlab::GitAccess do
let(:public_project) { create(:project, :public, :repository) }
let(:project_path) { public_project.path }
let(:namespace_path) { public_project.namespace.path }
- let(:access) { described_class.new(nil, public_project, 'web', authentication_abilities: [:download_code], repository_path: project_path, namespace_path: namespace_path) }
+ let(:access) { access_class.new(nil, public_project, 'web', authentication_abilities: [:download_code], repository_path: project_path, namespace_path: namespace_path) }
context 'when repository is enabled' do
it 'give access to download code' do
@@ -859,7 +844,7 @@ RSpec.describe Gitlab::GitAccess do
message = "Push operation timed out\n\nTiming information for debugging purposes:\nRunning checks for ref: wow"
expect_next_instance_of(Gitlab::Checks::ChangeAccess) do |check|
- expect(check).to receive(:exec).and_raise(Gitlab::Checks::TimedLogger::TimeoutError)
+ expect(check).to receive(:validate!).and_raise(Gitlab::Checks::TimedLogger::TimeoutError)
end
expect { access.check('git-receive-pack', changes) }.to raise_error(described_class::TimeoutError, message)
@@ -1067,7 +1052,7 @@ RSpec.describe Gitlab::GitAccess do
private
def access
- described_class.new(actor, project, protocol,
+ access_class.new(actor, project, protocol,
authentication_abilities: authentication_abilities,
namespace_path: namespace_path, repository_path: project_path,
redirected_path: redirected_path, auth_result_type: auth_result_type)
@@ -1078,15 +1063,11 @@ RSpec.describe Gitlab::GitAccess do
end
def raise_forbidden(message)
- raise_error(Gitlab::GitAccess::ForbiddenError, message)
+ raise_error(described_class::ForbiddenError, message)
end
def raise_not_found
- raise_error(Gitlab::GitAccess::NotFoundError, Gitlab::GitAccess::ERROR_MESSAGES[:project_not_found])
- end
-
- def raise_namespace_not_found
- raise_error(Gitlab::GitAccess::NotFoundError, Gitlab::GitAccess::ERROR_MESSAGES[:namespace_not_found])
+ raise_error(described_class::NotFoundError, described_class::ERROR_MESSAGES[:project_not_found])
end
def build_authentication_abilities
diff --git a/spec/lib/gitlab/git_access_wiki_spec.rb b/spec/lib/gitlab/git_access_wiki_spec.rb
index 738269e4a14..688089f4862 100644
--- a/spec/lib/gitlab/git_access_wiki_spec.rb
+++ b/spec/lib/gitlab/git_access_wiki_spec.rb
@@ -4,8 +4,8 @@ require 'spec_helper'
RSpec.describe Gitlab::GitAccessWiki do
let(:access) { described_class.new(user, project, 'web', authentication_abilities: authentication_abilities, redirected_path: redirected_path) }
- let(:project) { create(:project, :wiki_repo) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :wiki_repo) }
+ let_it_be(:user) { create(:user) }
let(:changes) { ['6f6d7e7ed 570e7b2ab refs/heads/master'] }
let(:redirected_path) { nil }
let(:authentication_abilities) do
@@ -17,56 +17,65 @@ RSpec.describe Gitlab::GitAccessWiki do
end
describe '#push_access_check' do
+ subject { access.check('git-receive-pack', changes) }
+
context 'when user can :create_wiki' do
before do
- create(:protected_branch, name: 'master', project: project)
project.add_developer(user)
end
- subject { access.check('git-receive-pack', changes) }
-
it { expect { subject }.not_to raise_error }
context 'when in a read-only GitLab instance' do
+ let(:message) { "You can't push code to a read-only GitLab instance." }
+
before do
allow(Gitlab::Database).to receive(:read_only?) { true }
end
- it 'does not give access to upload wiki code' do
- expect { subject }.to raise_error(Gitlab::GitAccess::ForbiddenError, "You can't push code to a read-only GitLab instance.")
- end
+ it_behaves_like 'forbidden git access'
+ end
+ end
+
+ context 'the user cannot :create_wiki' do
+ it_behaves_like 'not-found git access' do
+ let(:message) { 'The wiki you were looking for could not be found.' }
end
end
end
- describe '#access_check_download!' do
+ describe '#check_download_access!' do
subject { access.check('git-upload-pack', Gitlab::GitAccess::ANY) }
- before do
- project.add_developer(user)
- end
-
- context 'when wiki feature is enabled' do
- it 'give access to download wiki code' do
- expect { subject }.not_to raise_error
+ context 'the user can :download_wiki_code' do
+ before do
+ project.add_developer(user)
end
- context 'when the wiki repository does not exist' do
- let(:project) { create(:project) }
+ context 'when wiki feature is disabled' do
+ before do
+ project.project_feature.update_attribute(:wiki_access_level, ProjectFeature::DISABLED)
+ end
- it 'returns not found' do
- expect(project.wiki_repository_exists?).to eq(false)
+ it_behaves_like 'forbidden git access' do
+ let(:message) { include('wiki') }
+ end
+ end
- expect { subject }.to raise_error(Gitlab::GitAccess::NotFoundError, 'A repository for this project does not exist yet.')
+ context 'when the repository does not exist' do
+ before do
+ allow(project.wiki).to receive(:repository).and_return(double('Repository', exists?: false))
+ end
+
+ it_behaves_like 'not-found git access' do
+ let(:message) { include('for this wiki') }
end
end
end
- context 'when wiki feature is disabled' do
- it 'does not give access to download wiki code' do
- project.project_feature.update_attribute(:wiki_access_level, ProjectFeature::DISABLED)
-
- expect { subject }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You are not allowed to download code from this project.')
+ context 'the user cannot :download_wiki_code' do
+ it_behaves_like 'not-found git access' do
+ let(:message) { include('wiki') }
end
end
end
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index 45a25ccfc88..9581b017839 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -266,6 +266,7 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
repository: repository_message, revision: revision
)
end
+
let(:response) do
Gitaly::CommitStatsResponse.new(
oid: revision,
diff --git a/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb b/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb
index b016e8bdf5a..e90cb966917 100644
--- a/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb
@@ -35,6 +35,7 @@ RSpec.describe Gitlab::GitalyClient::ConflictsService do
let(:files) do
[{ old_path: 'some/path', new_path: 'some/path', content: '' }]
end
+
let(:source_branch) { 'master' }
let(:target_branch) { 'feature' }
let(:commit_message) { 'Solving conflicts\n\nTést' }
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
index 4e16f760235..b974f456914 100644
--- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -20,11 +20,13 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
user: gitaly_user
)
end
+
let(:gitaly_commit) { build(:gitaly_commit) }
let(:commit_id) { gitaly_commit.id }
let(:gitaly_branch) do
Gitaly::Branch.new(name: branch_name, target_commit: gitaly_commit)
end
+
let(:response) { Gitaly::UserCreateBranchResponse.new(branch: gitaly_branch) }
let(:commit) { Gitlab::Git::Commit.new(repository, gitaly_commit) }
@@ -68,6 +70,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
user: gitaly_user
)
end
+
let(:response) { Gitaly::UserUpdateBranchResponse.new }
subject { client.user_update_branch(branch_name, user, newrev, oldrev) }
@@ -123,6 +126,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
user: gitaly_user
)
end
+
let(:response) { Gitaly::UserDeleteBranchResponse.new }
subject { client.user_delete_branch(branch_name, user) }
@@ -162,6 +166,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
user: gitaly_user
)
end
+
let(:branch_update) do
Gitaly::OperationBranchUpdate.new(
commit_id: source_sha,
@@ -169,6 +174,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
branch_created: false
)
end
+
let(:response) { Gitaly::UserFFBranchResponse.new(branch_update: branch_update) }
before do
@@ -303,6 +309,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
commit_message: commit_message
)
end
+
let(:squash_sha) { 'f00' }
let(:response) { Gitaly::UserSquashResponse.new(squash_sha: squash_sha) }
@@ -375,6 +382,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
let(:patch_content) do
patch_names.map { |name| File.read(File.join(patches_folder, name)) }.join("\n")
end
+
let(:patch_names) { %w(0001-This-does-not-apply-to-the-feature-branch.patch) }
let(:branch_name) { 'branch-with-patches' }
diff --git a/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb b/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb
index a06f8459963..8a169acb69c 100644
--- a/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/wiki_service_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe Gitlab::GitalyClient::WikiService do
Gitaly::WikiFindPageResponse.new(page: Gitaly::WikiPage.new(raw_data: 'b'))
]
end
+
let(:wiki_page) { subject.first }
let(:wiki_page_version) { subject.last }
@@ -60,6 +61,7 @@ RSpec.describe Gitlab::GitalyClient::WikiService do
Gitaly::WikiGetAllPagesResponse.new(end_of_page: true)
]
end
+
let(:wiki_page_1) { subject[0].first }
let(:wiki_page_1_version) { subject[0].last }
let(:wiki_page_2) { subject[1].first }
diff --git a/spec/lib/gitlab/github_import/client_spec.rb b/spec/lib/gitlab/github_import/client_spec.rb
index 7cff6ed1388..5f6ab42d0d2 100644
--- a/spec/lib/gitlab/github_import/client_spec.rb
+++ b/spec/lib/gitlab/github_import/client_spec.rb
@@ -169,7 +169,7 @@ RSpec.describe Gitlab::GithubImport::Client do
expect(client).to receive(:raise_or_wait_for_rate_limit)
client.with_rate_limit do
- if retries.zero?
+ if retries == 0
retries += 1
raise(Octokit::TooManyRequests)
end
diff --git a/spec/lib/gitlab/gitlab_import/project_creator_spec.rb b/spec/lib/gitlab/gitlab_import/project_creator_spec.rb
index 44bcfb93c51..53bf1db3438 100644
--- a/spec/lib/gitlab/gitlab_import/project_creator_spec.rb
+++ b/spec/lib/gitlab/gitlab_import/project_creator_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe Gitlab::GitlabImport::ProjectCreator do
owner: { name: "john" }
}.with_indifferent_access
end
+
let(:namespace) { create(:group) }
let(:token) { "asdffg" }
let(:access_params) { { gitlab_access_token: token } }
diff --git a/spec/lib/gitlab/google_code_import/importer_spec.rb b/spec/lib/gitlab/google_code_import/importer_spec.rb
index f681e3c9f31..a22e80ae1c0 100644
--- a/spec/lib/gitlab/google_code_import/importer_spec.rb
+++ b/spec/lib/gitlab/google_code_import/importer_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::GoogleCodeImport::Importer do
'user_map' => { 'thilo...' => "@#{mapped_user.username}" }
}
end
+
let(:project) { create(:project) }
subject { described_class.new(project) }
diff --git a/spec/lib/gitlab/google_code_import/project_creator_spec.rb b/spec/lib/gitlab/google_code_import/project_creator_spec.rb
index 4be2e16c116..cfebe57aed3 100644
--- a/spec/lib/gitlab/google_code_import/project_creator_spec.rb
+++ b/spec/lib/gitlab/google_code_import/project_creator_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Gitlab::GoogleCodeImport::ProjectCreator do
"repositoryUrls" => ["https://vim.googlecode.com/git/"]
)
end
+
let(:namespace) { create(:group) }
before do
diff --git a/spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb b/spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb
index e5d0adcfd5f..91299de0751 100644
--- a/spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb
+++ b/spec/lib/gitlab/grape_logging/formatters/lograge_with_timestamp_spec.rb
@@ -29,6 +29,7 @@ RSpec.describe Gitlab::GrapeLogging::Formatters::LogrageWithTimestamp do
correlation_id: 'WMefXn60429'
}
end
+
let(:time) { Time.now }
let(:result) { Gitlab::Json.parse(subject) }
diff --git a/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb
index bcb1f6c5af7..3ce09740ec8 100644
--- a/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb
+++ b/spec/lib/gitlab/grape_logging/loggers/exception_logger_spec.rb
@@ -98,7 +98,7 @@ RSpec.describe Gitlab::GrapeLogging::Loggers::ExceptionLogger do
before do
current_backtrace = caller
allow(exception).to receive(:backtrace).and_return(current_backtrace)
- expected['exception.backtrace'] = Gitlab::BacktraceCleaner.clean_backtrace(current_backtrace)
+ expected['exception.backtrace'] = Rails.backtrace_cleaner.clean(current_backtrace)
end
it 'includes the backtrace' do
diff --git a/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb b/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb
index 83873081a98..c5d7665c3b2 100644
--- a/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb
+++ b/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb
@@ -116,6 +116,7 @@ RSpec.describe Gitlab::Graphql::Authorize::AuthorizeResource do
end
end
end
+
let(:error) { /#{fake_class.name} has no authorizations/ }
describe '#authorized_find!' do
diff --git a/spec/lib/gitlab/graphql/find_argument_in_parent_spec.rb b/spec/lib/gitlab/graphql/find_argument_in_parent_spec.rb
index 68b24a60a99..1b9301cd1aa 100644
--- a/spec/lib/gitlab/graphql/find_argument_in_parent_spec.rb
+++ b/spec/lib/gitlab/graphql/find_argument_in_parent_spec.rb
@@ -21,6 +21,7 @@ RSpec.describe Gitlab::Graphql::FindArgumentInParent do
)
)
end
+
let(:arg_name) { :my_arg }
it 'searches parents and returns the argument' do
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
index 65698caac34..09d7e084172 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
@@ -185,6 +185,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:nodes) do
Project.order(Arel.sql('projects.last_repository_check_at IS NULL')).order(last_repository_check_at: :asc).order(id: :asc)
end
+
let(:ascending_nodes) { [project5, project1, project3, project2, project4] }
it_behaves_like 'nodes are in ascending order'
@@ -210,6 +211,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:nodes) do
Project.order(Arel.sql('projects.last_repository_check_at IS NULL')).order(last_repository_check_at: :desc).order(id: :asc)
end
+
let(:descending_nodes) { [project3, project1, project5, project2, project4] }
it_behaves_like 'nodes are in descending order'
@@ -243,6 +245,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:nodes) do
Project.order(Arel::Table.new(:projects)['name'].lower.asc).order(id: :asc)
end
+
let(:ascending_nodes) { [project1, project5, project3, project2, project4] }
it_behaves_like 'nodes are in ascending order'
@@ -252,6 +255,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
let(:nodes) do
Project.order(Arel::Table.new(:projects)['name'].lower.desc).order(id: :desc)
end
+
let(:descending_nodes) { [project4, project2, project3, project5, project1] }
it_behaves_like 'nodes are in descending order'
diff --git a/spec/lib/gitlab/hashed_path_spec.rb b/spec/lib/gitlab/hashed_path_spec.rb
new file mode 100644
index 00000000000..051c5196748
--- /dev/null
+++ b/spec/lib/gitlab/hashed_path_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::HashedPath do
+ let(:root_hash) { 1 }
+ let(:hashed_path) { described_class.new(*path, root_hash: root_hash) }
+
+ describe '#to_s' do
+ subject { hashed_path }
+
+ context 'when path contains a single value' do
+ let(:path) { 'path' }
+
+ it 'returns the disk path' do
+ expect(subject).to match(%r[\h{2}/\h{2}/\h{64}/path])
+ end
+ end
+
+ context 'when path contains multiple values' do
+ let(:path) { %w(path1 path2) }
+
+ it 'returns the disk path' do
+ expect(subject).to match(%r[\h{2}/\h{2}/\h{64}/path1/path2])
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/hook_data/issuable_builder_spec.rb b/spec/lib/gitlab/hook_data/issuable_builder_spec.rb
index 50f3a4776be..f5ee8eba8bc 100644
--- a/spec/lib/gitlab/hook_data/issuable_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/issuable_builder_spec.rb
@@ -56,6 +56,7 @@ RSpec.describe Gitlab::HookData::IssuableBuilder do
]
}
end
+
let(:data) { builder.build(user: user, changes: changes) }
it 'populates the :changes hash' do
diff --git a/spec/lib/gitlab/http_spec.rb b/spec/lib/gitlab/http_spec.rb
index 09da94e7559..5c990eb3248 100644
--- a/spec/lib/gitlab/http_spec.rb
+++ b/spec/lib/gitlab/http_spec.rb
@@ -5,6 +5,8 @@ require 'spec_helper'
RSpec.describe Gitlab::HTTP do
include StubRequests
+ let(:default_options) { described_class::DEFAULT_TIMEOUT_OPTIONS }
+
context 'when allow_local_requests' do
it 'sends the request to the correct URI' do
stub_full_request('https://example.org:8080', ip_address: '8.8.8.8').to_return(status: 200)
@@ -101,6 +103,73 @@ RSpec.describe Gitlab::HTTP do
end
end
+ describe 'setting default timeouts' do
+ before do
+ stub_full_request('http://example.org', method: :any)
+ end
+
+ context 'when no timeouts are set' do
+ it 'sets default open and read and write timeouts' do
+ expect(described_class).to receive(:httparty_perform_request).with(
+ Net::HTTP::Get, 'http://example.org', default_options
+ ).and_call_original
+
+ described_class.get('http://example.org')
+ end
+ end
+
+ context 'when :timeout is set' do
+ it 'does not set any default timeouts' do
+ expect(described_class).to receive(:httparty_perform_request).with(
+ Net::HTTP::Get, 'http://example.org', timeout: 1
+ ).and_call_original
+
+ described_class.get('http://example.org', timeout: 1)
+ end
+ end
+
+ context 'when :open_timeout is set' do
+ it 'only sets default read and write timeout' do
+ expect(described_class).to receive(:httparty_perform_request).with(
+ Net::HTTP::Get, 'http://example.org', default_options.merge(open_timeout: 1)
+ ).and_call_original
+
+ described_class.get('http://example.org', open_timeout: 1)
+ end
+ end
+
+ context 'when :read_timeout is set' do
+ it 'only sets default open and write timeout' do
+ expect(described_class).to receive(:httparty_perform_request).with(
+ Net::HTTP::Get, 'http://example.org', default_options.merge(read_timeout: 1)
+ ).and_call_original
+
+ described_class.get('http://example.org', read_timeout: 1)
+ end
+ end
+
+ context 'when :write_timeout is set' do
+ it 'only sets default open and read timeout' do
+ expect(described_class).to receive(:httparty_perform_request).with(
+ Net::HTTP::Put, 'http://example.org', default_options.merge(write_timeout: 1)
+ ).and_call_original
+
+ described_class.put('http://example.org', write_timeout: 1)
+ end
+ end
+
+ context 'when default timeouts feature is disabled' do
+ it 'does not apply any defaults' do
+ stub_feature_flags(http_default_timeouts: false)
+ expect(described_class).to receive(:httparty_perform_request).with(
+ Net::HTTP::Get, 'http://example.org', open_timeout: 1
+ ).and_call_original
+
+ described_class.get('http://example.org', open_timeout: 1)
+ end
+ end
+ end
+
describe '.try_get' do
let(:path) { 'http://example.org' }
@@ -111,10 +180,10 @@ RSpec.describe Gitlab::HTTP do
end
let(:request_options) do
- {
+ default_options.merge({
verify: false,
basic_auth: { username: 'user', password: 'pass' }
- }
+ })
end
described_class::HTTP_ERRORS.each do |exception_class|
@@ -123,8 +192,8 @@ RSpec.describe Gitlab::HTTP do
context 'with path' do
before do
- expect(described_class).to receive(:get)
- .with(path, {})
+ expect(described_class).to receive(:httparty_perform_request)
+ .with(Net::HTTP::Get, path, default_options)
.and_raise(klass)
end
@@ -155,8 +224,8 @@ RSpec.describe Gitlab::HTTP do
context 'with path and options' do
before do
- expect(described_class).to receive(:get)
- .with(path, request_options)
+ expect(described_class).to receive(:httparty_perform_request)
+ .with(Net::HTTP::Get, path, request_options)
.and_raise(klass)
end
@@ -191,8 +260,8 @@ RSpec.describe Gitlab::HTTP do
end
before do
- expect(described_class).to receive(:get)
- .with(path, request_options, &block)
+ expect(described_class).to receive(:httparty_perform_request)
+ .with(Net::HTTP::Get, path, request_options, &block)
.and_raise(klass)
end
diff --git a/spec/lib/gitlab/i18n/po_linter_spec.rb b/spec/lib/gitlab/i18n/po_linter_spec.rb
index 5dfc9d3613c..cfa39d95ebd 100644
--- a/spec/lib/gitlab/i18n/po_linter_spec.rb
+++ b/spec/lib/gitlab/i18n/po_linter_spec.rb
@@ -6,7 +6,7 @@ require 'simple_po_parser'
# Disabling this cop to allow for multi-language examples in comments
# rubocop:disable Style/AsciiComments
RSpec.describe Gitlab::I18n::PoLinter do
- let(:linter) { described_class.new(po_path) }
+ let(:linter) { described_class.new(po_path: po_path, html_todolist: {}) }
let(:po_path) { 'spec/fixtures/valid.po' }
def fake_translation(msgid:, translation:, plural_id: nil, plurals: [])
@@ -23,8 +23,9 @@ RSpec.describe Gitlab::I18n::PoLinter do
end
Gitlab::I18n::TranslationEntry.new(
- data,
- plurals.size + 1
+ entry_data: data,
+ nplurals: plurals.size + 1,
+ html_allowed: nil
)
end
@@ -145,6 +146,67 @@ RSpec.describe Gitlab::I18n::PoLinter do
expect(errors[message_id]).to include(expected_error)
end
end
+
+ context 'when an entry contains html' do
+ let(:po_path) { 'spec/fixtures/potential_html.po' }
+
+ it 'presents an error for each component containing angle brackets' do
+ message_id = 'String with some <strong>emphasis</strong>'
+
+ expect(errors[message_id]).to match_array [
+ a_string_starting_with('contains < or >.'),
+ a_string_starting_with('plural id contains < or >.'),
+ a_string_starting_with('translation contains < or >.')
+ ]
+ end
+ end
+
+ context 'when an entry contains html on the todolist' do
+ subject(:linter) { described_class.new(po_path: po_path, html_todolist: todolist) }
+
+ let(:po_path) { 'spec/fixtures/potential_html.po' }
+ let(:todolist) do
+ {
+ 'String with a legitimate < use' => {
+ 'plural_id' => 'String with lots of < > uses',
+ 'translations' => [
+ 'Translated string with a legitimate < use',
+ 'Translated string with lots of < > uses'
+ ]
+ }
+ }
+ end
+
+ it 'does not present an error' do
+ message_id = 'String with a legitimate < use'
+
+ expect(errors[message_id]).to be_nil
+ end
+ end
+
+ context 'when an entry on the html todolist has changed' do
+ subject(:linter) { described_class.new(po_path: po_path, html_todolist: todolist) }
+
+ let(:po_path) { 'spec/fixtures/potential_html.po' }
+ let(:todolist) do
+ {
+ 'String with a legitimate < use' => {
+ 'plural_id' => 'String with lots of < > uses',
+ 'translations' => [
+ 'Translated string with a different legitimate < use',
+ 'Translated string with lots of < > uses'
+ ]
+ }
+ }
+ end
+
+ it 'presents an error for the changed component' do
+ message_id = 'String with a legitimate < use'
+
+ expect(errors[message_id])
+ .to include a_string_starting_with('translation contains < or >.')
+ end
+ end
end
describe '#parse_po' do
@@ -200,6 +262,7 @@ RSpec.describe Gitlab::I18n::PoLinter do
expect(linter).to receive(:validate_number_of_plurals).with([], fake_entry)
expect(linter).to receive(:validate_unescaped_chars).with([], fake_entry)
expect(linter).to receive(:validate_translation).with([], fake_entry)
+ expect(linter).to receive(:validate_html).with([], fake_entry)
linter.validate_entry(fake_entry)
end
@@ -212,8 +275,9 @@ RSpec.describe Gitlab::I18n::PoLinter do
allow(linter).to receive(:metadata_entry).and_return(fake_metadata)
fake_entry = Gitlab::I18n::TranslationEntry.new(
- { msgid: 'the singular', msgid_plural: 'the plural', 'msgstr[0]' => 'the singular' },
- 2
+ entry_data: { msgid: 'the singular', msgid_plural: 'the plural', 'msgstr[0]' => 'the singular' },
+ nplurals: 2,
+ html_allowed: nil
)
errors = []
diff --git a/spec/lib/gitlab/i18n/translation_entry_spec.rb b/spec/lib/gitlab/i18n/translation_entry_spec.rb
index 76879f75bec..2c95b0b0124 100644
--- a/spec/lib/gitlab/i18n/translation_entry_spec.rb
+++ b/spec/lib/gitlab/i18n/translation_entry_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::I18n::TranslationEntry do
describe '#singular_translation' do
it 'returns the normal `msgstr` for translations without plural' do
data = { msgid: 'Hello world', msgstr: 'Bonjour monde' }
- entry = described_class.new(data, 2)
+ entry = described_class.new(entry_data: data, nplurals: 2, html_allowed: nil)
expect(entry.singular_translation).to eq('Bonjour monde')
end
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::I18n::TranslationEntry do
'msgstr[0]' => 'Bonjour monde',
'msgstr[1]' => 'Bonjour mondes'
}
- entry = described_class.new(data, 2)
+ entry = described_class.new(entry_data: data, nplurals: 2, html_allowed: nil)
expect(entry.singular_translation).to eq('Bonjour monde')
end
@@ -27,7 +27,7 @@ RSpec.describe Gitlab::I18n::TranslationEntry do
describe '#all_translations' do
it 'returns all translations for singular translations' do
data = { msgid: 'Hello world', msgstr: 'Bonjour monde' }
- entry = described_class.new(data, 2)
+ entry = described_class.new(entry_data: data, nplurals: 2, html_allowed: nil)
expect(entry.all_translations).to eq(['Bonjour monde'])
end
@@ -39,7 +39,7 @@ RSpec.describe Gitlab::I18n::TranslationEntry do
'msgstr[0]' => 'Bonjour monde',
'msgstr[1]' => 'Bonjour mondes'
}
- entry = described_class.new(data, 2)
+ entry = described_class.new(entry_data: data, nplurals: 2, html_allowed: nil)
expect(entry.all_translations).to eq(['Bonjour monde', 'Bonjour mondes'])
end
@@ -52,7 +52,7 @@ RSpec.describe Gitlab::I18n::TranslationEntry do
msgid_plural: 'Hello worlds',
'msgstr[0]' => 'Bonjour monde'
}
- entry = described_class.new(data, 1)
+ entry = described_class.new(entry_data: data, nplurals: 1, html_allowed: nil)
expect(entry.plural_translations).to eq(['Bonjour monde'])
end
@@ -65,7 +65,7 @@ RSpec.describe Gitlab::I18n::TranslationEntry do
'msgstr[1]' => 'Bonjour mondes',
'msgstr[2]' => 'Bonjour tous les mondes'
}
- entry = described_class.new(data, 3)
+ entry = described_class.new(entry_data: data, nplurals: 3, html_allowed: nil)
expect(entry.plural_translations).to eq(['Bonjour mondes', 'Bonjour tous les mondes'])
end
@@ -77,7 +77,7 @@ RSpec.describe Gitlab::I18n::TranslationEntry do
msgid: 'hello world',
msgstr: 'hello'
}
- entry = described_class.new(data, 2)
+ entry = described_class.new(entry_data: data, nplurals: 2, html_allowed: nil)
expect(entry).to have_singular_translation
end
@@ -89,7 +89,7 @@ RSpec.describe Gitlab::I18n::TranslationEntry do
"msgstr[0]" => 'hello world',
"msgstr[1]" => 'hello worlds'
}
- entry = described_class.new(data, 2)
+ entry = described_class.new(entry_data: data, nplurals: 2, html_allowed: nil)
expect(entry).to have_singular_translation
end
@@ -100,7 +100,7 @@ RSpec.describe Gitlab::I18n::TranslationEntry do
msgid_plural: 'hello worlds',
"msgstr[0]" => 'hello worlds'
}
- entry = described_class.new(data, 1)
+ entry = described_class.new(entry_data: data, nplurals: 1, html_allowed: nil)
expect(entry).not_to have_singular_translation
end
@@ -109,7 +109,7 @@ RSpec.describe Gitlab::I18n::TranslationEntry do
describe '#msgid_contains_newlines' do
it 'is true when the msgid is an array' do
data = { msgid: %w(hello world) }
- entry = described_class.new(data, 2)
+ entry = described_class.new(entry_data: data, nplurals: 2, html_allowed: nil)
expect(entry.msgid_has_multiple_lines?).to be_truthy
end
@@ -118,7 +118,7 @@ RSpec.describe Gitlab::I18n::TranslationEntry do
describe '#plural_id_contains_newlines' do
it 'is true when the msgid is an array' do
data = { msgid_plural: %w(hello world) }
- entry = described_class.new(data, 2)
+ entry = described_class.new(entry_data: data, nplurals: 2, html_allowed: nil)
expect(entry.plural_id_has_multiple_lines?).to be_truthy
end
@@ -127,7 +127,7 @@ RSpec.describe Gitlab::I18n::TranslationEntry do
describe '#translations_contain_newlines' do
it 'is true when the msgid is an array' do
data = { msgstr: %w(hello world) }
- entry = described_class.new(data, 2)
+ entry = described_class.new(entry_data: data, nplurals: 2, html_allowed: nil)
expect(entry.translations_have_multiple_lines?).to be_truthy
end
@@ -135,7 +135,7 @@ RSpec.describe Gitlab::I18n::TranslationEntry do
describe '#contains_unescaped_chars' do
let(:data) { { msgid: '' } }
- let(:entry) { described_class.new(data, 2) }
+ let(:entry) { described_class.new(entry_data: data, nplurals: 2, html_allowed: nil) }
it 'is true when the msgid is an array' do
string = '「100%確定」'
@@ -177,7 +177,7 @@ RSpec.describe Gitlab::I18n::TranslationEntry do
describe '#msgid_contains_unescaped_chars' do
it 'is true when the msgid contains a `%`' do
data = { msgid: '「100%確定」' }
- entry = described_class.new(data, 2)
+ entry = described_class.new(entry_data: data, nplurals: 2, html_allowed: nil)
expect(entry).to receive(:contains_unescaped_chars?).and_call_original
expect(entry.msgid_contains_unescaped_chars?).to be_truthy
@@ -187,7 +187,7 @@ RSpec.describe Gitlab::I18n::TranslationEntry do
describe '#plural_id_contains_unescaped_chars' do
it 'is true when the plural msgid contains a `%`' do
data = { msgid_plural: '「100%確定」' }
- entry = described_class.new(data, 2)
+ entry = described_class.new(entry_data: data, nplurals: 2, html_allowed: nil)
expect(entry).to receive(:contains_unescaped_chars?).and_call_original
expect(entry.plural_id_contains_unescaped_chars?).to be_truthy
@@ -197,10 +197,144 @@ RSpec.describe Gitlab::I18n::TranslationEntry do
describe '#translations_contain_unescaped_chars' do
it 'is true when the translation contains a `%`' do
data = { msgstr: '「100%確定」' }
- entry = described_class.new(data, 2)
+ entry = described_class.new(entry_data: data, nplurals: 2, html_allowed: nil)
expect(entry).to receive(:contains_unescaped_chars?).and_call_original
expect(entry.translations_contain_unescaped_chars?).to be_truthy
end
end
+
+ describe '#msgid_contains_potential_html?' do
+ subject(:entry) { described_class.new(entry_data: data, nplurals: 2, html_allowed: nil) }
+
+ context 'when there are no angle brackets in the msgid' do
+ let(:data) { { msgid: 'String with no brackets' } }
+
+ it 'returns false' do
+ expect(entry.msgid_contains_potential_html?).to be_falsey
+ end
+ end
+
+ context 'when there are angle brackets in the msgid' do
+ let(:data) { { msgid: 'String with <strong> tag' } }
+
+ it 'returns true' do
+ expect(entry.msgid_contains_potential_html?).to be_truthy
+ end
+ end
+ end
+
+ describe '#plural_id_contains_potential_html?' do
+ subject(:entry) { described_class.new(entry_data: data, nplurals: 2, html_allowed: nil) }
+
+ context 'when there are no angle brackets in the plural_id' do
+ let(:data) { { msgid_plural: 'String with no brackets' } }
+
+ it 'returns false' do
+ expect(entry.plural_id_contains_potential_html?).to be_falsey
+ end
+ end
+
+ context 'when there are angle brackets in the plural_id' do
+ let(:data) { { msgid_plural: 'This string has a <strong>' } }
+
+ it 'returns true' do
+ expect(entry.plural_id_contains_potential_html?).to be_truthy
+ end
+ end
+ end
+
+ describe '#translations_contain_potential_html?' do
+ subject(:entry) { described_class.new(entry_data: data, nplurals: 2, html_allowed: nil) }
+
+ context 'when there are no angle brackets in the translations' do
+ let(:data) { { msgstr: 'This string has no angle brackets' } }
+
+ it 'returns false' do
+ expect(entry.translations_contain_potential_html?).to be_falsey
+ end
+ end
+
+ context 'when there are angle brackets in the translations' do
+ let(:data) { { msgstr: 'This string has a <strong>' } }
+
+ it 'returns true' do
+ expect(entry.translations_contain_potential_html?).to be_truthy
+ end
+ end
+ end
+
+ describe '#msgid_html_allowed?' do
+ subject(:entry) do
+ described_class.new(entry_data: { msgid: 'String with a <strong>' }, nplurals: 2, html_allowed: html_todo)
+ end
+
+ context 'when the html in the string is in the todolist' do
+ let(:html_todo) { { 'plural_id' => nil, 'translations' => [] } }
+
+ it 'returns true' do
+ expect(entry.msgid_html_allowed?).to be true
+ end
+ end
+
+ context 'when the html in the string is not in the todolist' do
+ let(:html_todo) { nil }
+
+ it 'returns false' do
+ expect(entry.msgid_html_allowed?).to be false
+ end
+ end
+ end
+
+ describe '#plural_id_html_allowed?' do
+ subject(:entry) do
+ described_class.new(entry_data: { msgid_plural: 'String with many <strong>' }, nplurals: 2, html_allowed: html_todo)
+ end
+
+ context 'when the html in the string is in the todolist' do
+ let(:html_todo) { { 'plural_id' => 'String with many <strong>', 'translations' => [] } }
+
+ it 'returns true' do
+ expect(entry.plural_id_html_allowed?).to be true
+ end
+ end
+
+ context 'when the html in the string is not in the todolist' do
+ let(:html_todo) { { 'plural_id' => 'String with some <strong>', 'translations' => [] } }
+
+ it 'returns false' do
+ expect(entry.plural_id_html_allowed?).to be false
+ end
+ end
+ end
+
+ describe '#translations_html_allowed?' do
+ subject(:entry) do
+ described_class.new(entry_data: { msgstr: 'String with a <strong>' }, nplurals: 2, html_allowed: html_todo)
+ end
+
+ context 'when the html in the string is in the todolist' do
+ let(:html_todo) { { 'plural_id' => nil, 'translations' => ['String with a <strong>'] } }
+
+ it 'returns true' do
+ expect(entry.translations_html_allowed?).to be true
+ end
+ end
+
+ context 'when the html in the string is not in the todolist' do
+ let(:html_todo) { { 'plural_id' => nil, 'translations' => ['String with a different <strong>'] } }
+
+ it 'returns false' do
+ expect(entry.translations_html_allowed?).to be false
+ end
+ end
+
+ context 'when the todolist only has the msgid' do
+ let(:html_todo) { { 'plural_id' => nil, 'translations' => nil } }
+
+ it 'returns false' do
+ expect(entry.translations_html_allowed?).to be false
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/i18n_spec.rb b/spec/lib/gitlab/i18n_spec.rb
index 592adadc362..ee10739195a 100644
--- a/spec/lib/gitlab/i18n_spec.rb
+++ b/spec/lib/gitlab/i18n_spec.rb
@@ -5,6 +5,14 @@ require 'spec_helper'
RSpec.describe Gitlab::I18n do
let(:user) { create(:user, preferred_language: 'es') }
+ describe '.selectable_locales' do
+ it 'does not return languages that should not be available in the UI' do
+ Gitlab::I18n::NOT_AVAILABLE_IN_UI.each do |language|
+ expect(described_class.selectable_locales).not_to include(language)
+ end
+ end
+ end
+
describe '.locale=' do
after do
described_class.use_default_locale
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 02500778426..37b5d8a1021 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -12,6 +12,7 @@ issues:
- resource_weight_events
- resource_milestone_events
- resource_state_events
+- resource_iteration_events
- sent_notifications
- sentry_issue
- label_links
@@ -46,6 +47,8 @@ issues:
- system_note_metadata
- alert_management_alert
- status_page_published_incident
+- namespace
+- note_authors
events:
- author
- project
@@ -166,6 +169,7 @@ merge_requests:
- deployments
- user_mentions
- system_note_metadata
+- note_authors
external_pull_requests:
- project
merge_request_diff:
@@ -229,6 +233,7 @@ ci_pipelines:
- daily_report_results
- latest_builds_report_results
- messages
+- pipeline_artifacts
ci_refs:
- project
- ci_pipelines
@@ -312,6 +317,7 @@ project:
- chat_services
- cluster
- clusters
+- cluster_agents
- cluster_project
- creator
- cycle_analytics_stages
@@ -351,7 +357,6 @@ project:
- youtrack_service
- custom_issue_tracker_service
- bugzilla_service
-- gitlab_issue_tracker_service
- external_wiki_service
- mock_ci_service
- mock_deployment_service
@@ -465,6 +470,7 @@ project:
- vulnerability_identifiers
- vulnerability_scanners
- dast_site_profiles
+- dast_scanner_profiles
- dast_sites
- operations_feature_flags
- operations_feature_flags_client
@@ -515,6 +521,9 @@ project:
- webex_teams_service
- build_report_results
- vulnerability_statistic
+- vulnerability_historical_statistics
+- product_analytics_events
+- pipeline_artifacts
award_emoji:
- awardable
- user
@@ -665,6 +674,7 @@ epic:
- events
- resource_label_events
- user_mentions
+- note_authors
epic_issue:
- epic
- issue
diff --git a/spec/lib/gitlab/import_export/base/object_builder_spec.rb b/spec/lib/gitlab/import_export/base/object_builder_spec.rb
index d560c8ea5a7..38c3b23db36 100644
--- a/spec/lib/gitlab/import_export/base/object_builder_spec.rb
+++ b/spec/lib/gitlab/import_export/base/object_builder_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Gitlab::ImportExport::Base::ObjectBuilder do
name: 'project',
path: 'project')
end
+
let(:klass) { Milestone }
let(:attributes) { { 'title' => 'Test Base::ObjectBuilder Milestone', 'project' => project } }
diff --git a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
index 6cc16ee9cbb..2eb983cc050 100644
--- a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
@@ -172,7 +172,7 @@ RSpec.describe Gitlab::ImportExport::Group::TreeRestorer do
let(:filepath) { "group_exports/visibility_levels/#{visibility_level}" }
it "imports all subgroups as #{visibility_level}" do
- expect(group.children.map(&:visibility_level)).to eq(expected_visibilities)
+ expect(group.children.map(&:visibility_level)).to match_array(expected_visibilities)
end
end
end
diff --git a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
index eb6b07ce02f..949cfb5a34d 100644
--- a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
@@ -81,6 +81,7 @@ RSpec.describe Gitlab::ImportExport::JSON::StreamingSerializer do
let(:group_options) do
{ include: [], only: [:name, :path, :description] }
end
+
let(:include) do
[{ group: group_options }]
end
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
index 6d5604dc40f..f75494aa7c7 100644
--- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -502,6 +502,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
let(:project_tree_restorer) do
described_class.new(user: user, shared: shared, project: project)
end
+
let(:restored_project_json) { project_tree_restorer.restore }
it 'does not read a symlink' do
@@ -919,6 +920,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
}
]
end
+
let(:tree_hash) { { 'project_members' => project_members } }
before do
diff --git a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
index 40c103eeda6..a2c5848f100 100644
--- a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
@@ -275,6 +275,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeSaver do
File.join(shared.export_path, Gitlab::ImportExport.project_filename)
end
end
+
let(:shared) { project.import_export_shared }
let(:params) { {} }
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 2d313b4dcad..a108bc94da5 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -32,6 +32,7 @@ Issue:
- discussion_locked
- health_status
- external_key
+- issue_type
Event:
- id
- target_type
@@ -41,6 +42,7 @@ Event:
- updated_at
- action
- author_id
+- fingerprint
WikiPage::Meta:
- id
- title
@@ -215,6 +217,7 @@ MergeRequestDiff:
- head_commit_sha
- start_commit_sha
- commits_count
+- files_count
MergeRequestDiffCommit:
- merge_request_diff_id
- relative_order
@@ -285,6 +288,7 @@ MergeRequest::Metrics:
- first_approved_at
- first_reassigned_at
- added_lines
+- target_project_id
- removed_lines
Ci::Pipeline:
- id
@@ -656,6 +660,7 @@ PrometheusMetric:
- group
- common
- identifier
+- dashboard_path
PrometheusAlert:
- threshold
- operator
@@ -735,6 +740,8 @@ Board:
- milestone_id
- weight
- name
+- hide_backlog_list
+- hide_closed_list
List:
- id
- board_id
@@ -765,6 +772,7 @@ DesignManagement::Design:
- id
- project_id
- filename
+- relative_position
DesignManagement::Action:
- id
- event
diff --git a/spec/lib/gitlab/incident_management/pager_duty/incident_issue_description_spec.rb b/spec/lib/gitlab/incident_management/pager_duty/incident_issue_description_spec.rb
index 9a55e21d031..6dc96217f09 100644
--- a/spec/lib/gitlab/incident_management/pager_duty/incident_issue_description_spec.rb
+++ b/spec/lib/gitlab/incident_management/pager_duty/incident_issue_description_spec.rb
@@ -10,9 +10,11 @@ RSpec.describe Gitlab::IncidentManagement::PagerDuty::IncidentIssueDescription d
let(:assignees) do
[{ 'summary' => 'Laura Haley', 'url' => 'https://webdemo.pagerduty.com/users/P553OPV' }]
end
+
let(:impacted_services) do
[{ 'summary' => 'Production XDB Cluster', 'url' => 'https://webdemo.pagerduty.com/services/PN49J75' }]
end
+
let(:incident_payload) do
{
'url' => 'https://webdemo.pagerduty.com/incidents/PRORDTY',
diff --git a/spec/lib/gitlab/incoming_email_spec.rb b/spec/lib/gitlab/incoming_email_spec.rb
index 19d608cf48e..72d201eed77 100644
--- a/spec/lib/gitlab/incoming_email_spec.rb
+++ b/spec/lib/gitlab/incoming_email_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::IncomingEmail do
end
it 'returns true' do
- expect(described_class.enabled?).to be_truthy
+ expect(described_class.enabled?).to be(true)
end
end
@@ -20,7 +20,7 @@ RSpec.describe Gitlab::IncomingEmail do
end
it "returns false" do
- expect(described_class.enabled?).to be_falsey
+ expect(described_class.enabled?).to be(false)
end
end
end
@@ -32,7 +32,7 @@ RSpec.describe Gitlab::IncomingEmail do
end
it 'confirms that wildcard is supported' do
- expect(described_class.supports_wildcard?).to be_truthy
+ expect(described_class.supports_wildcard?).to be(true)
end
end
@@ -42,7 +42,7 @@ RSpec.describe Gitlab::IncomingEmail do
end
it 'returns that wildcard is not supported' do
- expect(described_class.supports_wildcard?).to be_falsey
+ expect(described_class.supports_wildcard?).to be(false)
end
end
@@ -52,7 +52,7 @@ RSpec.describe Gitlab::IncomingEmail do
end
it 'returns that wildcard is not supported' do
- expect(described_class.supports_wildcard?).to be_falsey
+ expect(described_class.supports_wildcard?).to be(false)
end
end
end
diff --git a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
index 5b0ad63ee72..09280402e2b 100644
--- a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
+++ b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb
@@ -25,8 +25,8 @@ RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_sh
# Exercise counting of a bulk reply
[[:set, 'foo', 'bar' * 100]] | [:get, 'foo'] | 3 + 3 | 3 * 100
- # Nested array response: ['123456-89', ['foo', 'bar']]
- [[:xadd, 'mystream', '123456-89', 'foo', 'bar']] | [:xrange, 'mystream', '-', '+'] | 6 + 8 + 1 + 1 | 9 + 3 + 3
+ # Nested array response: [['foo', 0], ['bar', 1]]
+ [[:zadd, 'myset', 0, 'foo'], [:zadd, 'myset', 1, 'bar']] | [:zrange, 'myset', 0, -1, 'withscores'] | 6 + 5 + 1 + 2 + 10 | 3 + 1 + 3 + 1
end
with_them do
diff --git a/spec/lib/gitlab/issuable_sorter_spec.rb b/spec/lib/gitlab/issuable_sorter_spec.rb
index 60f62062f04..b8d0c7b0609 100644
--- a/spec/lib/gitlab/issuable_sorter_spec.rb
+++ b/spec/lib/gitlab/issuable_sorter_spec.rb
@@ -52,6 +52,7 @@ RSpec.describe Gitlab::IssuableSorter do
build_stubbed(:issue, iid: 1, project: project5),
build_stubbed(:issue, iid: 1, project: project6)]
end
+
let(:unsorted) do
[sorted[3], sorted[1], sorted[4], sorted[2],
sorted[6], sorted[5], sorted[0], sorted[7]]
diff --git a/spec/lib/gitlab/issuables_count_for_state_spec.rb b/spec/lib/gitlab/issuables_count_for_state_spec.rb
index 1c186a8e6ca..d96152e47ea 100644
--- a/spec/lib/gitlab/issuables_count_for_state_spec.rb
+++ b/spec/lib/gitlab/issuables_count_for_state_spec.rb
@@ -9,6 +9,21 @@ RSpec.describe Gitlab::IssuablesCountForState do
let(:counter) { described_class.new(finder) }
+ describe 'project given' do
+ let(:project) { build(:project) }
+ let(:counter) { described_class.new(finder, project) }
+
+ it 'provides the project' do
+ expect(counter.project).to eq(project)
+ end
+ end
+
+ describe '.declarative_policy_class' do
+ subject { described_class.declarative_policy_class }
+
+ it { is_expected.to eq('IssuablePolicy') }
+ end
+
describe '#for_state_or_opened' do
it 'returns the number of issuables for the given state' do
expect(counter.for_state_or_opened(:closed)).to eq(1)
diff --git a/spec/lib/gitlab/jira_import/issue_serializer_spec.rb b/spec/lib/gitlab/jira_import/issue_serializer_spec.rb
index 4adc4e4d22a..e57a8457e7c 100644
--- a/spec/lib/gitlab/jira_import/issue_serializer_spec.rb
+++ b/spec/lib/gitlab/jira_import/issue_serializer_spec.rb
@@ -25,6 +25,7 @@ RSpec.describe Gitlab::JiraImport::IssueSerializer do
let(:parent_field) do
{ 'key' => 'FOO-2', 'id' => '1050', 'fields' => { 'summary' => 'parent issue FOO' } }
end
+
let(:priority_field) { { 'name' => 'Medium' } }
let(:labels_field) { %w(bug dev backend frontend) }
diff --git a/spec/lib/gitlab/jira_import/metadata_collector_spec.rb b/spec/lib/gitlab/jira_import/metadata_collector_spec.rb
index 86863d67f25..51751c7b75f 100644
--- a/spec/lib/gitlab/jira_import/metadata_collector_spec.rb
+++ b/spec/lib/gitlab/jira_import/metadata_collector_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe Gitlab::JiraImport::MetadataCollector do
let(:parent_field) do
{ 'key' => 'FOO-2', 'id' => '1050', 'fields' => { 'summary' => 'parent issue FOO' } }
end
+
let(:issue_type_field) { { 'name' => 'Task' } }
let(:fix_versions_field) { [{ 'name' => '1.0' }, { 'name' => '1.1' }] }
let(:priority_field) { { 'name' => 'Medium' } }
@@ -30,6 +31,7 @@ RSpec.describe Gitlab::JiraImport::MetadataCollector do
'duedate' => duedate_field
}
end
+
let(:jira_issue) do
double(
id: '1234',
diff --git a/spec/lib/gitlab/job_waiter_spec.rb b/spec/lib/gitlab/job_waiter_spec.rb
index 4d7c838aa3b..7aa0a3485fb 100644
--- a/spec/lib/gitlab/job_waiter_spec.rb
+++ b/spec/lib/gitlab/job_waiter_spec.rb
@@ -60,16 +60,14 @@ RSpec.describe Gitlab::JobWaiter do
described_class.notify(waiter.key, 'a')
described_class.notify(waiter.key, 'b')
- result = nil
- expect { Timeout.timeout(1) { result = waiter.wait(2) } }.not_to raise_error
+ expect { Timeout.timeout(1) { waiter.wait(2) } }.not_to raise_error
end
it 'increments job_waiter_started_total and job_waiter_timeouts_total when it times out' do
expect(started_total).to receive(:increment).with(worker: 'Foo')
expect(timeouts_total).to receive(:increment).with(worker: 'Foo')
- result = nil
- expect { Timeout.timeout(2) { result = waiter.wait(1) } }.not_to raise_error
+ expect { Timeout.timeout(2) { waiter.wait(1) } }.not_to raise_error
end
end
end
diff --git a/spec/lib/gitlab/json_spec.rb b/spec/lib/gitlab/json_spec.rb
index d7671dda323..0402296a3a8 100644
--- a/spec/lib/gitlab/json_spec.rb
+++ b/spec/lib/gitlab/json_spec.rb
@@ -407,4 +407,36 @@ RSpec.describe Gitlab::Json do
end
end
end
+
+ describe Gitlab::Json::LimitedEncoder do
+ subject { described_class.encode(obj, limit: 8.kilobytes) }
+
+ context 'when object size is acceptable' do
+ let(:obj) { { test: true } }
+
+ it 'returns json string' do
+ is_expected.to eq("{\"test\":true}")
+ end
+ end
+
+ context 'when object is too big' do
+ let(:obj) { [{ test: true }] * 1000 }
+
+ it 'raises LimitExceeded error' do
+ expect { subject }.to raise_error(
+ Gitlab::Json::LimitedEncoder::LimitExceeded
+ )
+ end
+ end
+
+ context 'when json_limited_encoder is disabled' do
+ let(:obj) { [{ test: true }] * 1000 }
+
+ it 'does not raise an error' do
+ stub_feature_flags(json_limited_encoder: false)
+
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb b/spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb
new file mode 100644
index 00000000000..9600a70a95d
--- /dev/null
+++ b/spec/lib/gitlab/kubernetes/cilium_network_policy_spec.rb
@@ -0,0 +1,217 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Kubernetes::CiliumNetworkPolicy do
+ let(:policy) do
+ described_class.new(
+ name: name,
+ namespace: namespace,
+ creation_timestamp: '2020-04-14T00:08:30Z',
+ endpoint_selector: endpoint_selector,
+ ingress: ingress,
+ egress: egress,
+ description: description
+ )
+ end
+
+ let(:resource) do
+ ::Kubeclient::Resource.new(
+ kind: partial_class_name,
+ apiVersion: "cilium.io/v2",
+ metadata: { name: name, namespace: namespace, resourceVersion: resource_version },
+ spec: { endpointSelector: endpoint_selector, ingress: ingress, egress: nil }
+ )
+ end
+
+ let(:name) { 'example-name' }
+ let(:namespace) { 'example-namespace' }
+ let(:endpoint_selector) { { matchLabels: { role: 'db' } } }
+ let(:description) { 'example-description' }
+ let(:partial_class_name) { described_class.name.split('::').last }
+ let(:resource_version) { 101 }
+ let(:ingress) do
+ [
+ {
+ fromEndpoints: [
+ { matchLabels: { project: 'myproject' } }
+ ]
+ }
+ ]
+ end
+
+ let(:egress) do
+ [
+ {
+ ports: [{ port: 5978 }]
+ }
+ ]
+ end
+
+ include_examples 'network policy common specs' do
+ let(:selector) { endpoint_selector}
+ let(:policy) do
+ described_class.new(
+ name: name,
+ namespace: namespace,
+ selector: selector,
+ ingress: ingress,
+ labels: labels,
+ resource_version: resource_version
+ )
+ end
+
+ let(:spec) { { endpointSelector: selector, ingress: ingress, egress: nil } }
+ let(:metadata) { { name: name, namespace: namespace, resourceVersion: resource_version } }
+ end
+
+ describe '#generate' do
+ subject { policy.generate }
+
+ it { is_expected.to eq(resource) }
+ end
+
+ describe '.from_yaml' do
+ let(:manifest) do
+ <<~POLICY
+ apiVersion: cilium.io/v2
+ kind: CiliumNetworkPolicy
+ metadata:
+ name: example-name
+ namespace: example-namespace
+ resourceVersion: 101
+ spec:
+ endpointSelector:
+ matchLabels:
+ role: db
+ ingress:
+ - fromEndpoints:
+ - matchLabels:
+ project: myproject
+ POLICY
+ end
+
+ subject { Gitlab::Kubernetes::CiliumNetworkPolicy.from_yaml(manifest)&.generate }
+
+ it { is_expected.to eq(resource) }
+
+ context 'with nil manifest' do
+ let(:manifest) { nil }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'with invalid manifest' do
+ let(:manifest) { "\tfoo: bar" }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'with manifest without metadata' do
+ let(:manifest) do
+ <<~POLICY
+ apiVersion: cilium.io/v2
+ kind: CiliumNetworkPolicy
+ spec:
+ endpointSelector:
+ matchLabels:
+ role: db
+ ingress:
+ - fromEndpoints:
+ matchLabels:
+ project: myproject
+ POLICY
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'with manifest without spec' do
+ let(:manifest) do
+ <<~POLICY
+ apiVersion: cilium.io/v2
+ kind: CiliumNetworkPolicy
+ metadata:
+ name: example-name
+ namespace: example-namespace
+ POLICY
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'with disallowed class' do
+ let(:manifest) do
+ <<~POLICY
+ apiVersion: cilium.io/v2
+ kind: CiliumNetworkPolicy
+ metadata:
+ name: example-name
+ namespace: example-namespace
+ creationTimestamp: 2020-04-14T00:08:30Z
+ spec:
+ endpointSelector:
+ matchLabels:
+ role: db
+ ingress:
+ - fromEndpoints:
+ matchLabels:
+ project: myproject
+ POLICY
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '.from_resource' do
+ let(:resource) do
+ ::Kubeclient::Resource.new(
+ metadata: {
+ name: name, namespace: namespace, creationTimestamp: '2020-04-14T00:08:30Z',
+ labels: { app: 'foo' }, resourceVersion: resource_version
+ },
+ spec: { endpointSelector: endpoint_selector, ingress: ingress, egress: nil, labels: nil, description: nil }
+ )
+ end
+
+ let(:generated_resource) do
+ ::Kubeclient::Resource.new(
+ kind: partial_class_name,
+ apiVersion: "cilium.io/v2",
+ metadata: { name: name, namespace: namespace, resourceVersion: resource_version, labels: { app: 'foo' } },
+ spec: { endpointSelector: endpoint_selector, ingress: ingress, egress: nil }
+ )
+ end
+
+ subject { Gitlab::Kubernetes::CiliumNetworkPolicy.from_resource(resource)&.generate }
+
+ it { is_expected.to eq(generated_resource) }
+
+ context 'with nil resource' do
+ let(:resource) { nil }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'with resource without metadata' do
+ let(:resource) do
+ ::Kubeclient::Resource.new(
+ spec: { endpointSelector: endpoint_selector, ingress: ingress, egress: nil, labels: nil, description: nil }
+ )
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'with resource without spec' do
+ let(:resource) do
+ ::Kubeclient::Resource.new(
+ metadata: { name: name, namespace: namespace, uid: '128cf288-7de4-11ea-aceb-42010a800089', resourceVersion: resource_version }
+ )
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/kubernetes/helm/api_spec.rb b/spec/lib/gitlab/kubernetes/helm/api_spec.rb
index dabbab27b13..bcc95bdbf2b 100644
--- a/spec/lib/gitlab/kubernetes/helm/api_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/api_spec.rb
@@ -17,8 +17,7 @@ RSpec.describe Gitlab::Kubernetes::Helm::API do
name: application_name,
chart: 'chart-name',
rbac: rbac,
- files: files,
- local_tiller_enabled: true
+ files: files
)
end
@@ -143,7 +142,7 @@ RSpec.describe Gitlab::Kubernetes::Helm::API do
end
context 'with a service account' do
- let(:command) { Gitlab::Kubernetes::Helm::InitCommand.new(name: application_name, files: files, rbac: rbac, local_tiller_enabled: true) }
+ let(:command) { Gitlab::Kubernetes::Helm::InitCommand.new(name: application_name, files: files, rbac: rbac) }
context 'rbac-enabled cluster' do
let(:rbac) { true }
diff --git a/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb
index 57fafaebf98..a7abd6ab1bf 100644
--- a/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/base_command_spec.rb
@@ -16,8 +16,7 @@ RSpec.describe Gitlab::Kubernetes::Helm::BaseCommand do
super(
name: 'test-class-name',
rbac: rbac,
- files: { some: 'value' },
- local_tiller_enabled: false
+ files: { some: 'value' }
)
end
end
diff --git a/spec/lib/gitlab/kubernetes/helm/delete_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/delete_command_spec.rb
index 7b182478cc3..ff2c2d76f22 100644
--- a/spec/lib/gitlab/kubernetes/helm/delete_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/delete_command_spec.rb
@@ -3,12 +3,11 @@
require 'spec_helper'
RSpec.describe Gitlab::Kubernetes::Helm::DeleteCommand do
- subject(:delete_command) { described_class.new(name: app_name, rbac: rbac, files: files, local_tiller_enabled: local_tiller_enabled) }
+ subject(:delete_command) { described_class.new(name: app_name, rbac: rbac, files: files) }
let(:app_name) { 'app-name' }
let(:rbac) { true }
let(:files) { {} }
- let(:local_tiller_enabled) { true }
it_behaves_like 'helm command generator' do
let(:commands) do
@@ -21,50 +20,6 @@ RSpec.describe Gitlab::Kubernetes::Helm::DeleteCommand do
end
end
- context 'tillerless feature disabled' do
- let(:local_tiller_enabled) { false }
-
- it_behaves_like 'helm command generator' do
- let(:commands) do
- <<~EOS
- helm init --upgrade
- for i in $(seq 1 30); do helm version && s=0 && break || s=$?; sleep 1s; echo \"Retrying ($i)...\"; done; (exit $s)
- helm delete --purge app-name
- EOS
- end
- end
-
- context 'when there is a ca.pem file' do
- let(:files) { { 'ca.pem': 'some file content' } }
-
- let(:tls_flags) do
- <<~EOS.squish
- --tls
- --tls-ca-cert /data/helm/app-name/config/ca.pem
- --tls-cert /data/helm/app-name/config/cert.pem
- --tls-key /data/helm/app-name/config/key.pem
- EOS
- end
-
- it_behaves_like 'helm command generator' do
- let(:commands) do
- <<~EOS
- helm init --upgrade
- for i in $(seq 1 30); do helm version #{tls_flags} && s=0 && break || s=$?; sleep 1s; echo \"Retrying ($i)...\"; done; (exit $s)
- #{helm_delete_command}
- EOS
- end
-
- let(:helm_delete_command) do
- <<~EOS.squish
- helm delete --purge app-name
- #{tls_flags}
- EOS
- end
- end
- end
- end
-
describe '#pod_name' do
subject { delete_command.pod_name }
diff --git a/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb
index c982a417682..d538ed12a07 100644
--- a/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/init_command_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Kubernetes::Helm::InitCommand do
- subject(:init_command) { described_class.new(name: application.name, files: files, rbac: rbac, local_tiller_enabled: false) }
+ subject(:init_command) { described_class.new(name: application.name, files: files, rbac: rbac) }
let(:application) { create(:clusters_applications_helm) }
let(:rbac) { false }
diff --git a/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb
index aad350256ec..6ed7323c96f 100644
--- a/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/install_command_spec.rb
@@ -12,8 +12,7 @@ RSpec.describe Gitlab::Kubernetes::Helm::InstallCommand do
version: version,
repository: repository,
preinstall: preinstall,
- postinstall: postinstall,
- local_tiller_enabled: local_tiller_enabled
+ postinstall: postinstall
)
end
@@ -23,7 +22,6 @@ RSpec.describe Gitlab::Kubernetes::Helm::InstallCommand do
let(:version) { '1.2.3' }
let(:preinstall) { nil }
let(:postinstall) { nil }
- let(:local_tiller_enabled) { true }
it_behaves_like 'helm command generator' do
let(:commands) do
@@ -52,46 +50,6 @@ RSpec.describe Gitlab::Kubernetes::Helm::InstallCommand do
end
end
- context 'tillerless feature disabled' do
- let(:local_tiller_enabled) { false }
-
- let(:tls_flags) do
- <<~EOS.squish
- --tls
- --tls-ca-cert /data/helm/app-name/config/ca.pem
- --tls-cert /data/helm/app-name/config/cert.pem
- --tls-key /data/helm/app-name/config/key.pem
- EOS
- end
-
- it_behaves_like 'helm command generator' do
- let(:commands) do
- <<~EOS
- helm init --upgrade
- for i in $(seq 1 30); do helm version #{tls_flags} && s=0 && break || s=$?; sleep 1s; echo \"Retrying ($i)...\"; done; (exit $s)
- helm repo add app-name https://repository.example.com
- helm repo update
- #{helm_install_comand}
- EOS
- end
-
- let(:helm_install_comand) do
- <<~EOS.squish
- helm upgrade app-name chart-name
- --install
- --atomic
- --cleanup-on-fail
- --reset-values
- #{tls_flags}
- --version 1.2.3
- --set rbac.create\\=false,rbac.enabled\\=false
- --namespace gitlab-managed-apps
- -f /data/helm/app-name/config/values.yaml
- EOS
- end
- end
- end
-
context 'when rbac is true' do
let(:rbac) { true }
diff --git a/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb
index ea2ade18e37..487a38f286d 100644
--- a/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/patch_command_spec.rb
@@ -7,7 +7,6 @@ RSpec.describe Gitlab::Kubernetes::Helm::PatchCommand do
let(:repository) { 'https://repository.example.com' }
let(:rbac) { false }
let(:version) { '1.2.3' }
- let(:local_tiller_enabled) { true }
subject(:patch_command) do
described_class.new(
@@ -16,47 +15,10 @@ RSpec.describe Gitlab::Kubernetes::Helm::PatchCommand do
rbac: rbac,
files: files,
version: version,
- repository: repository,
- local_tiller_enabled: local_tiller_enabled
+ repository: repository
)
end
- context 'when local tiller feature is disabled' do
- let(:local_tiller_enabled) { false }
-
- let(:tls_flags) do
- <<~EOS.squish
- --tls
- --tls-ca-cert /data/helm/app-name/config/ca.pem
- --tls-cert /data/helm/app-name/config/cert.pem
- --tls-key /data/helm/app-name/config/key.pem
- EOS
- end
-
- it_behaves_like 'helm command generator' do
- let(:commands) do
- <<~EOS
- helm init --upgrade
- for i in $(seq 1 30); do helm version #{tls_flags} && s=0 && break || s=$?; sleep 1s; echo \"Retrying ($i)...\"; done; (exit $s)
- helm repo add app-name https://repository.example.com
- helm repo update
- #{helm_upgrade_comand}
- EOS
- end
-
- let(:helm_upgrade_comand) do
- <<~EOS.squish
- helm upgrade app-name chart-name
- --reuse-values
- #{tls_flags}
- --version 1.2.3
- --namespace gitlab-managed-apps
- -f /data/helm/app-name/config/values.yaml
- EOS
- end
- end
- end
-
it_behaves_like 'helm command generator' do
let(:commands) do
<<~EOS
diff --git a/spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb b/spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb
index 8d386d41ad5..5a3ba59b8c0 100644
--- a/spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb
+++ b/spec/lib/gitlab/kubernetes/helm/reset_command_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Kubernetes::Helm::ResetCommand do
- subject(:reset_command) { described_class.new(name: name, rbac: rbac, files: files, local_tiller_enabled: false) }
+ subject(:reset_command) { described_class.new(name: name, rbac: rbac, files: files) }
let(:rbac) { true }
let(:name) { 'helm' }
diff --git a/spec/lib/gitlab/kubernetes/kube_client_spec.rb b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
index a15be42f393..8211b096d3b 100644
--- a/spec/lib/gitlab/kubernetes/kube_client_spec.rb
+++ b/spec/lib/gitlab/kubernetes/kube_client_spec.rb
@@ -80,13 +80,13 @@ RSpec.describe Gitlab::Kubernetes::KubeClient do
context 'errored' do
using RSpec::Parameterized::TableSyntax
- where(:error, :error_status) do
- SocketError | :unreachable
- OpenSSL::X509::CertificateError | :authentication_failure
- StandardError | :unknown_failure
- Kubeclient::HttpError.new(408, "timed out", nil) | :unreachable
- Kubeclient::HttpError.new(408, "timeout", nil) | :unreachable
- Kubeclient::HttpError.new(408, "", nil) | :authentication_failure
+ where(:error, :connection_status, :error_status) do
+ SocketError | :unreachable | :connection_error
+ OpenSSL::X509::CertificateError | :authentication_failure | :authentication_error
+ StandardError | :unknown_failure | :unknown_error
+ Kubeclient::HttpError.new(408, "timed out", nil) | :unreachable | :http_error
+ Kubeclient::HttpError.new(408, "timeout", nil) | :unreachable | :http_error
+ Kubeclient::HttpError.new(408, "", nil) | :authentication_failure | :http_error
end
with_them do
@@ -97,7 +97,7 @@ RSpec.describe Gitlab::Kubernetes::KubeClient do
it 'returns error status' do
result = described_class.graceful_request(1) { client.foo }
- expect(result).to eq({ status: error_status })
+ expect(result).to eq({ status: connection_status, connection_error: error_status })
end
end
end
@@ -227,6 +227,20 @@ RSpec.describe Gitlab::Kubernetes::KubeClient do
end
end
+ describe '#cilium_networking_client' do
+ subject { client.cilium_networking_client }
+
+ it_behaves_like 'a Kubeclient'
+
+ it 'has the cilium API group endpoint' do
+ expect(subject.api_endpoint.to_s).to match(%r{\/apis\/cilium.io\Z})
+ end
+
+ it 'has the api_version' do
+ expect(subject.instance_variable_get(:@api_version)).to eq('v2')
+ end
+ end
+
describe '#metrics_client' do
subject { client.metrics_client }
@@ -380,6 +394,30 @@ RSpec.describe Gitlab::Kubernetes::KubeClient do
end
end
+ describe 'cilium API group' do
+ let(:cilium_networking_client) { client.cilium_networking_client }
+
+ [
+ :create_cilium_network_policy,
+ :get_cilium_network_policies,
+ :update_cilium_network_policy,
+ :delete_cilium_network_policy
+ ].each do |method|
+ describe "##{method}" do
+ include_examples 'redirection not allowed', method
+ include_examples 'dns rebinding not allowed', method
+
+ it 'delegates to the cilium client' do
+ expect(client).to delegate_method(method).to(:cilium_networking_client)
+ end
+
+ it 'responds to the method' do
+ expect(client).to respond_to method
+ end
+ end
+ end
+ end
+
describe 'non-entity methods' do
it 'does not proxy for non-entity methods' do
expect(client).not_to respond_to :proxy_url
diff --git a/spec/lib/gitlab/kubernetes/network_policy_spec.rb b/spec/lib/gitlab/kubernetes/network_policy_spec.rb
index a8ca15f998b..5d1dd5dec59 100644
--- a/spec/lib/gitlab/kubernetes/network_policy_spec.rb
+++ b/spec/lib/gitlab/kubernetes/network_policy_spec.rb
@@ -8,13 +8,20 @@ RSpec.describe Gitlab::Kubernetes::NetworkPolicy do
name: name,
namespace: namespace,
creation_timestamp: '2020-04-14T00:08:30Z',
- pod_selector: pod_selector,
+ selector: pod_selector,
policy_types: %w(Ingress Egress),
ingress: ingress,
egress: egress
)
end
+ let(:resource) do
+ ::Kubeclient::Resource.new(
+ metadata: { name: name, namespace: namespace },
+ spec: { podSelector: pod_selector, policyTypes: %w(Ingress), ingress: ingress, egress: nil }
+ )
+ end
+
let(:name) { 'example-name' }
let(:namespace) { 'example-namespace' }
let(:pod_selector) { { matchLabels: { role: 'db' } } }
@@ -37,6 +44,28 @@ RSpec.describe Gitlab::Kubernetes::NetworkPolicy do
]
end
+ include_examples 'network policy common specs' do
+ let(:selector) { pod_selector }
+ let(:policy) do
+ described_class.new(
+ name: name,
+ namespace: namespace,
+ selector: selector,
+ ingress: ingress,
+ labels: labels
+ )
+ end
+
+ let(:spec) { { podSelector: selector, policyTypes: ["Ingress"], ingress: ingress, egress: nil } }
+ let(:metadata) { { name: name, namespace: namespace } }
+ end
+
+ describe '#generate' do
+ subject { policy.generate }
+
+ it { is_expected.to eq(resource) }
+ end
+
describe '.from_yaml' do
let(:manifest) do
<<~POLICY
@@ -45,8 +74,6 @@ RSpec.describe Gitlab::Kubernetes::NetworkPolicy do
metadata:
name: example-name
namespace: example-namespace
- labels:
- app: foo
spec:
podSelector:
matchLabels:
@@ -60,12 +87,6 @@ RSpec.describe Gitlab::Kubernetes::NetworkPolicy do
project: myproject
POLICY
end
- let(:resource) do
- ::Kubeclient::Resource.new(
- metadata: { name: name, namespace: namespace, labels: { app: 'foo' } },
- spec: { podSelector: pod_selector, policyTypes: %w(Ingress), ingress: ingress, egress: nil }
- )
- end
subject { Gitlab::Kubernetes::NetworkPolicy.from_yaml(manifest)&.generate }
@@ -156,6 +177,7 @@ RSpec.describe Gitlab::Kubernetes::NetworkPolicy do
spec: { podSelector: pod_selector, policyTypes: %w(Ingress), ingress: ingress, egress: nil }
)
end
+
let(:generated_resource) do
::Kubeclient::Resource.new(
metadata: { name: name, namespace: namespace, labels: { app: 'foo' } },
@@ -193,202 +215,4 @@ RSpec.describe Gitlab::Kubernetes::NetworkPolicy do
it { is_expected.to be_nil }
end
end
-
- describe '#generate' do
- let(:resource) do
- ::Kubeclient::Resource.new(
- metadata: { name: name, namespace: namespace },
- spec: { podSelector: pod_selector, policyTypes: %w(Ingress Egress), ingress: ingress, egress: egress }
- )
- end
-
- subject { policy.generate }
-
- it { is_expected.to eq(resource) }
- end
-
- describe '#as_json' do
- let(:json_policy) do
- {
- name: name,
- namespace: namespace,
- creation_timestamp: '2020-04-14T00:08:30Z',
- manifest: YAML.dump(
- {
- metadata: { name: name, namespace: namespace },
- spec: { podSelector: pod_selector, policyTypes: %w(Ingress Egress), ingress: ingress, egress: egress }
- }.deep_stringify_keys
- ),
- is_autodevops: false,
- is_enabled: true
- }
- end
-
- subject { policy.as_json }
-
- it { is_expected.to eq(json_policy) }
- end
-
- describe '#autodevops?' do
- subject { policy.autodevops? }
-
- let(:chart) { nil }
- let(:policy) do
- described_class.new(
- name: name,
- namespace: namespace,
- labels: { chart: chart },
- pod_selector: pod_selector,
- ingress: ingress
- )
- end
-
- it { is_expected.to be false }
-
- context 'with non-autodevops chart' do
- let(:chart) { 'foo' }
-
- it { is_expected.to be false }
- end
-
- context 'with autodevops chart' do
- let(:chart) { 'auto-deploy-app-0.6.0' }
-
- it { is_expected.to be true }
- end
- end
-
- describe '#enabled?' do
- subject { policy.enabled? }
-
- let(:pod_selector) { nil }
- let(:policy) do
- described_class.new(
- name: name,
- namespace: namespace,
- pod_selector: pod_selector,
- ingress: ingress
- )
- end
-
- it { is_expected.to be true }
-
- context 'with empty pod_selector' do
- let(:pod_selector) { {} }
-
- it { is_expected.to be true }
- end
-
- context 'with nil matchLabels in pod_selector' do
- let(:pod_selector) { { matchLabels: nil } }
-
- it { is_expected.to be true }
- end
-
- context 'with empty matchLabels in pod_selector' do
- let(:pod_selector) { { matchLabels: {} } }
-
- it { is_expected.to be true }
- end
-
- context 'with disabled_by label in matchLabels in pod_selector' do
- let(:pod_selector) do
- { matchLabels: { Gitlab::Kubernetes::NetworkPolicy::DISABLED_BY_LABEL => 'gitlab' } }
- end
-
- it { is_expected.to be false }
- end
- end
-
- describe '#enable' do
- subject { policy.enabled? }
-
- let(:pod_selector) { nil }
- let(:policy) do
- described_class.new(
- name: name,
- namespace: namespace,
- pod_selector: pod_selector,
- ingress: ingress
- )
- end
-
- before do
- policy.enable
- end
-
- it { is_expected.to be true }
-
- context 'with empty pod_selector' do
- let(:pod_selector) { {} }
-
- it { is_expected.to be true }
- end
-
- context 'with nil matchLabels in pod_selector' do
- let(:pod_selector) { { matchLabels: nil } }
-
- it { is_expected.to be true }
- end
-
- context 'with empty matchLabels in pod_selector' do
- let(:pod_selector) { { matchLabels: {} } }
-
- it { is_expected.to be true }
- end
-
- context 'with disabled_by label in matchLabels in pod_selector' do
- let(:pod_selector) do
- { matchLabels: { Gitlab::Kubernetes::NetworkPolicy::DISABLED_BY_LABEL => 'gitlab' } }
- end
-
- it { is_expected.to be true }
- end
- end
-
- describe '#disable' do
- subject { policy.enabled? }
-
- let(:pod_selector) { nil }
- let(:policy) do
- described_class.new(
- name: name,
- namespace: namespace,
- pod_selector: pod_selector,
- ingress: ingress
- )
- end
-
- before do
- policy.disable
- end
-
- it { is_expected.to be false }
-
- context 'with empty pod_selector' do
- let(:pod_selector) { {} }
-
- it { is_expected.to be false }
- end
-
- context 'with nil matchLabels in pod_selector' do
- let(:pod_selector) { { matchLabels: nil } }
-
- it { is_expected.to be false }
- end
-
- context 'with empty matchLabels in pod_selector' do
- let(:pod_selector) { { matchLabels: {} } }
-
- it { is_expected.to be false }
- end
-
- context 'with disabled_by label in matchLabels in pod_selector' do
- let(:pod_selector) do
- { matchLabels: { Gitlab::Kubernetes::NetworkPolicy::DISABLED_BY_LABEL => 'gitlab' } }
- end
-
- it { is_expected.to be false }
- end
- end
end
diff --git a/spec/lib/gitlab/kubernetes/node_spec.rb b/spec/lib/gitlab/kubernetes/node_spec.rb
index 732bf29bc44..fdc3433ff0f 100644
--- a/spec/lib/gitlab/kubernetes/node_spec.rb
+++ b/spec/lib/gitlab/kubernetes/node_spec.rb
@@ -7,45 +7,51 @@ RSpec.describe Gitlab::Kubernetes::Node do
describe '#all' do
let(:cluster) { create(:cluster, :provided_by_user, :group) }
- let(:expected_nodes) { [] }
+ let(:expected_nodes) { nil }
+ let(:nodes) { [kube_node.merge(kube_node_metrics)] }
+
+ subject { described_class.new(cluster).all }
before do
stub_kubeclient_nodes_and_nodes_metrics(cluster.platform.api_url)
end
- subject { described_class.new(cluster).all }
-
context 'when connection to the cluster is successful' do
- let(:expected_nodes) { [kube_node.merge(kube_node_metrics)] }
+ let(:expected_nodes) { { nodes: nodes } }
it { is_expected.to eq(expected_nodes) }
end
- context 'when cluster cannot be reached' do
- before do
- allow(cluster.kubeclient.core_client).to receive(:discover)
- .and_raise(SocketError)
+ context 'when there is a connection error' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:error, :error_status) do
+ SocketError | :kubernetes_connection_error
+ OpenSSL::X509::CertificateError | :kubernetes_authentication_error
+ StandardError | :unknown_error
+ Kubeclient::HttpError.new(408, "", nil) | :kubeclient_http_error
end
- it { is_expected.to eq(expected_nodes) }
- end
+ context 'when there is an error while querying nodes' do
+ with_them do
+ before do
+ allow(cluster.kubeclient).to receive(:get_nodes).and_raise(error)
+ end
- context 'when cluster cannot be authenticated to' do
- before do
- allow(cluster.kubeclient.core_client).to receive(:discover)
- .and_raise(OpenSSL::X509::CertificateError.new('Certificate error'))
+ it { is_expected.to eq({ node_connection_error: error_status }) }
+ end
end
- it { is_expected.to eq(expected_nodes) }
- end
+ context 'when there is an error while querying metrics' do
+ with_them do
+ before do
+ allow(cluster.kubeclient).to receive(:get_nodes).and_return({ response: nodes })
+ allow(cluster.kubeclient).to receive(:metrics_client).and_raise(error)
+ end
- context 'when Kubeclient::HttpError is raised' do
- before do
- allow(cluster.kubeclient.core_client).to receive(:discover)
- .and_raise(Kubeclient::HttpError.new(403, 'Forbidden', nil))
+ it { is_expected.to eq({ nodes: nodes, metrics_connection_error: error_status }) }
+ end
end
-
- it { is_expected.to eq(expected_nodes) }
end
context 'when an uncategorised error is raised' do
@@ -54,7 +60,7 @@ RSpec.describe Gitlab::Kubernetes::Node do
.and_raise(StandardError)
end
- it { is_expected.to eq(expected_nodes) }
+ it { is_expected.to eq({ node_connection_error: :unknown_error }) }
it 'notifies Sentry' do
expect(Gitlab::ErrorTracking).to receive(:track_exception)
diff --git a/spec/lib/gitlab/language_detection_spec.rb b/spec/lib/gitlab/language_detection_spec.rb
index 04ad19a04ec..14523be8ec6 100644
--- a/spec/lib/gitlab/language_detection_spec.rb
+++ b/spec/lib/gitlab/language_detection_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe Gitlab::LanguageDetection do
{ value: 1.51, label: "Go", color: "#2a4776", highlight: "#244776" },
{ value: 1.1, label: "MepmepLang", color: "#2a4776", highlight: "#244776" }]
end
+
let(:repository_languages) do
[RepositoryLanguage.new(share: 10, programming_language: ruby)]
end
diff --git a/spec/lib/gitlab/legacy_github_import/importer_spec.rb b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
index c443944678f..56d708a1e11 100644
--- a/spec/lib/gitlab/legacy_github_import/importer_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
@@ -238,6 +238,7 @@ RSpec.describe Gitlab::LegacyGithubImport::Importer do
labels: [double(name: 'Label #2')]
)
end
+
let(:closed_pull_request) do
double(
number: 1347,
diff --git a/spec/lib/gitlab/legacy_github_import/issuable_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/issuable_formatter_spec.rb
index 6a51cb6f39d..a5d2e00890b 100644
--- a/spec/lib/gitlab/legacy_github_import/issuable_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/issuable_formatter_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::LegacyGithubImport::IssuableFormatter do
let(:raw_data) do
double(number: 42)
end
+
let(:project) { double(import_type: 'github') }
let(:issuable_formatter) { described_class.new(project, raw_data) }
diff --git a/spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb
index 2ac79c4f5b8..148b59dedab 100644
--- a/spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Gitlab::LegacyGithubImport::MilestoneFormatter do
closed_at: nil
}
end
+
let(:iid_attr) { :number }
subject(:formatter) { described_class.new(project, raw_data) }
diff --git a/spec/lib/gitlab/lograge/custom_options_spec.rb b/spec/lib/gitlab/lograge/custom_options_spec.rb
index 218007c6e2a..9daedfc37e4 100644
--- a/spec/lib/gitlab/lograge/custom_options_spec.rb
+++ b/spec/lib/gitlab/lograge/custom_options_spec.rb
@@ -22,6 +22,7 @@ RSpec.describe Gitlab::Lograge::CustomOptions do
metadata: { 'meta.user' => 'jane.doe' }
}
end
+
let(:event) { ActiveSupport::Notifications::Event.new('test', 1, 2, 'transaction_id', event_payload) }
subject { described_class.call(event) }
diff --git a/spec/lib/gitlab/manifest_import/project_creator_spec.rb b/spec/lib/gitlab/manifest_import/project_creator_spec.rb
index 354acf53b7a..0ab5b277552 100644
--- a/spec/lib/gitlab/manifest_import/project_creator_spec.rb
+++ b/spec/lib/gitlab/manifest_import/project_creator_spec.rb
@@ -23,13 +23,14 @@ RSpec.describe Gitlab::ManifestImport::ProjectCreator do
it { expect { subject.execute }.to change { Project.count }.by(1) }
it { expect { subject.execute }.to change { Group.count }.by(1) }
- it 'creates project with valid full path and import url' do
+ it 'creates project with valid full path, import url and import source' do
subject.execute
project = Project.last
expect(project.full_path).to eq(File.join(group.path, 'device/common'))
expect(project.import_url).to eq('https://android-review.googlesource.com/device/common')
+ expect(project.import_source).to eq('https://android-review.googlesource.com/device/common')
end
end
end
diff --git a/spec/lib/gitlab/markdown_cache/redis/store_spec.rb b/spec/lib/gitlab/markdown_cache/redis/store_spec.rb
index 40ff9a765a6..bf40af8e62e 100644
--- a/spec/lib/gitlab/markdown_cache/redis/store_spec.rb
+++ b/spec/lib/gitlab/markdown_cache/redis/store_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe Gitlab::MarkdownCache::Redis::Store, :clean_gitlab_redis_cache do
end
end
end
+
let(:storable) { storable_class.new }
let(:cache_key) { "markdown_cache:#{storable.cache_key}" }
diff --git a/spec/lib/gitlab/metrics/background_transaction_spec.rb b/spec/lib/gitlab/metrics/background_transaction_spec.rb
index 640bbebf0da..b2a53fe1626 100644
--- a/spec/lib/gitlab/metrics/background_transaction_spec.rb
+++ b/spec/lib/gitlab/metrics/background_transaction_spec.rb
@@ -4,16 +4,30 @@ require 'spec_helper'
RSpec.describe Gitlab::Metrics::BackgroundTransaction do
let(:test_worker_class) { double(:class, name: 'TestWorker') }
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Metric, base_labels: {}) }
+
+ before do
+ allow(described_class).to receive(:prometheus_metric).and_return(prometheus_metric)
+ end
subject { described_class.new(test_worker_class) }
+ RSpec.shared_examples 'metric with worker labels' do |metric_method|
+ it 'measures with correct labels and value' do
+ value = 1
+ expect(prometheus_metric).to receive(metric_method).with({ controller: 'TestWorker', action: 'perform', feature_category: '' }, value)
+
+ subject.send(metric_method, :bau, value)
+ end
+ end
+
describe '#label' do
it 'returns labels based on class name' do
expect(subject.labels).to eq(controller: 'TestWorker', action: 'perform', feature_category: '')
end
it 'contains only the labels defined for metrics' do
- expect(subject.labels.keys).to contain_exactly(*described_class.superclass::BASE_LABELS.keys)
+ expect(subject.labels.keys).to contain_exactly(*described_class.superclass::BASE_LABEL_KEYS)
end
it 'includes the feature category if there is one' do
@@ -21,4 +35,22 @@ RSpec.describe Gitlab::Metrics::BackgroundTransaction do
expect(subject.labels).to include(feature_category: 'source_code_management')
end
end
+
+ describe '#increment' do
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Counter, :increment, base_labels: {}) }
+
+ it_behaves_like 'metric with worker labels', :increment
+ end
+
+ describe '#set' do
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Gauge, :set, base_labels: {}) }
+
+ it_behaves_like 'metric with worker labels', :set
+ end
+
+ describe '#observe' do
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Histogram, :observe, base_labels: {}) }
+
+ it_behaves_like 'metric with worker labels', :observe
+ end
end
diff --git a/spec/lib/gitlab/metrics/dashboard/cache_spec.rb b/spec/lib/gitlab/metrics/dashboard/cache_spec.rb
new file mode 100644
index 00000000000..9467d441ae1
--- /dev/null
+++ b/spec/lib/gitlab/metrics/dashboard/cache_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::Dashboard::Cache, :use_clean_rails_memory_store_caching do
+ let_it_be(:project1) { build_stubbed(:project) }
+ let_it_be(:project2) { build_stubbed(:project) }
+
+ let(:project1_key1) { "#{project1.id}_key1" }
+ let(:project1_key2) { "#{project1.id}_key2" }
+ let(:project2_key1) { "#{project2.id}_key1" }
+
+ let(:cache1) { described_class.for(project1) }
+ let(:cache2) { described_class.for(project2) }
+
+ before do
+ cache1.fetch(project1_key1) { 'data1' }
+ cache1.fetch(project1_key2) { 'data2' }
+ cache2.fetch(project2_key1) { 'data3' }
+ end
+
+ describe '.fetch' do
+ it 'stores data correctly' do
+ described_class.fetch('key1') { 'data1' }
+ described_class.fetch('key2') { 'data2' }
+
+ expect(described_class.fetch('key1')).to eq('data1')
+ expect(described_class.fetch('key2')).to eq('data2')
+ end
+ end
+
+ describe '.for' do
+ it 'returns a new instance' do
+ expect(described_class.for(project1)).to be_instance_of(described_class)
+ end
+ end
+
+ describe '#fetch' do
+ it 'stores data correctly' do
+ expect(cache1.fetch(project1_key1)).to eq('data1')
+ expect(cache1.fetch(project1_key2)).to eq('data2')
+ expect(cache2.fetch(project2_key1)).to eq('data3')
+ end
+ end
+
+ describe '#delete_all!' do
+ it 'deletes keys of the given project', :aggregate_failures do
+ cache1.delete_all!
+
+ expect(Rails.cache.exist?(project1_key1)).to be(false)
+ expect(Rails.cache.exist?(project1_key2)).to be(false)
+ expect(cache2.fetch(project2_key1)).to eq('data3')
+
+ cache2.delete_all!
+
+ expect(Rails.cache.exist?(project2_key1)).to be(false)
+ end
+
+ it 'does not fail when nothing to delete' do
+ project3 = build_stubbed(:project)
+ cache3 = described_class.for(project3)
+
+ expect { cache3.delete_all! }.not_to raise_error
+ end
+ end
+
+ context 'multiple fetches and deletes' do
+ specify :aggregate_failures do
+ cache1.delete_all!
+
+ expect(Rails.cache.exist?(project1_key1)).to be(false)
+ expect(Rails.cache.exist?(project1_key2)).to be(false)
+
+ cache1.fetch("#{project1.id}_key3") { 'data1' }
+ cache1.fetch("#{project1.id}_key4") { 'data2' }
+
+ expect(cache1.fetch("#{project1.id}_key3")).to eq('data1')
+ expect(cache1.fetch("#{project1.id}_key4")).to eq('data2')
+
+ cache1.delete_all!
+
+ expect(Rails.cache.exist?("#{project1.id}_key3")).to be(false)
+ expect(Rails.cache.exist?("#{project1.id}_key4")).to be(false)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/dashboard/defaults_spec.rb b/spec/lib/gitlab/metrics/dashboard/defaults_spec.rb
index dd61f8ebc4d..1f306753c39 100644
--- a/spec/lib/gitlab/metrics/dashboard/defaults_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/defaults_spec.rb
@@ -4,5 +4,4 @@ require 'spec_helper'
RSpec.describe Gitlab::Metrics::Dashboard::Defaults do
it { is_expected.to be_const_defined(:DEFAULT_PANEL_TYPE) }
- it { is_expected.to be_const_defined(:DEFAULT_PANEL_WEIGHT) }
end
diff --git a/spec/lib/gitlab/metrics/dashboard/finder_spec.rb b/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
index 60e1e29d4c5..730a31346d7 100644
--- a/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/finder_spec.rb
@@ -142,20 +142,42 @@ RSpec.describe Gitlab::Metrics::Dashboard::Finder, :use_clean_rails_memory_store
describe '.find_all_paths' do
let(:all_dashboard_paths) { described_class.find_all_paths(project) }
- let(:system_dashboard) { { path: system_dashboard_path, display_name: 'Default dashboard', default: true, system_dashboard: true, out_of_the_box_dashboard: true } }
+ let(:system_dashboard) { { path: system_dashboard_path, display_name: 'Overview', default: true, system_dashboard: true, out_of_the_box_dashboard: true } }
+ let(:k8s_pod_health_dashboard) { { path: pod_dashboard_path, display_name: 'K8s pod health', default: false, system_dashboard: false, out_of_the_box_dashboard: true } }
- it 'includes only the system dashboard by default' do
- expect(all_dashboard_paths).to eq([system_dashboard])
+ it 'includes OOTB dashboards by default' do
+ expect(all_dashboard_paths).to eq([k8s_pod_health_dashboard, system_dashboard])
end
context 'when the project contains dashboards' do
- let(:dashboard_path) { '.gitlab/dashboards/test.yml' }
- let(:project) { project_with_dashboard(dashboard_path) }
+ let(:dashboard_content) { fixture_file('lib/gitlab/metrics/dashboard/sample_dashboard.yml') }
+ let(:project) { project_with_dashboards(dashboards) }
- it 'includes system and project dashboards' do
- project_dashboard = { path: dashboard_path, display_name: 'test.yml', default: false, system_dashboard: false, out_of_the_box_dashboard: false }
+ let(:dashboards) do
+ {
+ '.gitlab/dashboards/metrics.yml' => dashboard_content,
+ '.gitlab/dashboards/better_metrics.yml' => dashboard_content
+ }
+ end
- expect(all_dashboard_paths).to contain_exactly(system_dashboard, project_dashboard)
+ it 'includes OOTB and project dashboards' do
+ project_dashboard1 = {
+ path: '.gitlab/dashboards/metrics.yml',
+ display_name: 'metrics.yml',
+ default: false,
+ system_dashboard: false,
+ out_of_the_box_dashboard: false
+ }
+
+ project_dashboard2 = {
+ path: '.gitlab/dashboards/better_metrics.yml',
+ display_name: 'better_metrics.yml',
+ default: false,
+ system_dashboard: false,
+ out_of_the_box_dashboard: false
+ }
+
+ expect(all_dashboard_paths).to eq([project_dashboard2, k8s_pod_health_dashboard, project_dashboard1, system_dashboard])
end
end
@@ -163,12 +185,13 @@ RSpec.describe Gitlab::Metrics::Dashboard::Finder, :use_clean_rails_memory_store
let(:self_monitoring_dashboard) do
{
path: self_monitoring_dashboard_path,
- display_name: 'Default dashboard',
+ display_name: 'Overview',
default: true,
- system_dashboard: false,
+ system_dashboard: true,
out_of_the_box_dashboard: true
}
end
+
let(:dashboard_path) { '.gitlab/dashboards/test.yml' }
let(:project) { project_with_dashboard(dashboard_path) }
@@ -185,7 +208,7 @@ RSpec.describe Gitlab::Metrics::Dashboard::Finder, :use_clean_rails_memory_store
out_of_the_box_dashboard: false
}
- expect(all_dashboard_paths).to contain_exactly(self_monitoring_dashboard, project_dashboard)
+ expect(all_dashboard_paths).to eq([self_monitoring_dashboard, project_dashboard])
end
end
end
diff --git a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
index 7f7070dfafb..14a4c01fce3 100644
--- a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
@@ -16,7 +16,6 @@ RSpec.describe Gitlab::Metrics::Dashboard::Processor do
Gitlab::Metrics::Dashboard::Stages::CustomMetricsInserter,
Gitlab::Metrics::Dashboard::Stages::CustomMetricsDetailsInserter,
Gitlab::Metrics::Dashboard::Stages::MetricEndpointInserter,
- Gitlab::Metrics::Dashboard::Stages::Sorter,
Gitlab::Metrics::Dashboard::Stages::AlertsInserter,
Gitlab::Metrics::Dashboard::Stages::PanelIdsInserter,
Gitlab::Metrics::Dashboard::Stages::UrlValidator
@@ -26,12 +25,6 @@ RSpec.describe Gitlab::Metrics::Dashboard::Processor do
let(:process_params) { [project, dashboard_yml, sequence, { environment: environment }] }
let(:dashboard) { described_class.new(*process_params).process }
- it 'includes a path for the prometheus endpoint with each metric' do
- expect(all_metrics).to satisfy_all do |metric|
- metric[:prometheus_endpoint_path] == prometheus_path(metric[:query_range])
- end
- end
-
it 'includes an id for each dashboard panel' do
expect(all_panels).to satisfy_all do |panel|
panel[:id].present?
@@ -72,14 +65,14 @@ RSpec.describe Gitlab::Metrics::Dashboard::Processor do
expect(all_metrics).to include get_metric_details(project_business_metric)
end
- it 'orders groups by priority and panels by weight' do
+ it 'display groups and panels in the order they are defined' do
expected_metrics_order = [
- 'metric_b', # group priority 10, panel weight 1
- 'metric_a2', # group priority 1, panel weight 2
- 'metric_a1', # group priority 1, panel weight 1
- project_business_metric.id, # group priority 0, panel weight nil (0)
- project_response_metric.id, # group priority -5, panel weight nil (0)
- project_system_metric.id # group priority -10, panel weight nil (0)
+ 'metric_b',
+ 'metric_a2',
+ 'metric_a1',
+ project_business_metric.id,
+ project_response_metric.id,
+ project_system_metric.id
]
actual_metrics_order = all_metrics.map { |m| m[:id] || m[:metric_id] }
@@ -100,10 +93,10 @@ RSpec.describe Gitlab::Metrics::Dashboard::Processor do
let(:sequence) do
[
Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter,
- Gitlab::Metrics::Dashboard::Stages::MetricEndpointInserter,
- Gitlab::Metrics::Dashboard::Stages::Sorter
+ Gitlab::Metrics::Dashboard::Stages::MetricEndpointInserter
]
end
+
let(:dashboard) { described_class.new(*process_params).process }
it 'includes only dashboard metrics' do
diff --git a/spec/lib/gitlab/metrics/dashboard/repo_dashboard_finder_spec.rb b/spec/lib/gitlab/metrics/dashboard/repo_dashboard_finder_spec.rb
new file mode 100644
index 00000000000..a2c9906c0e9
--- /dev/null
+++ b/spec/lib/gitlab/metrics/dashboard/repo_dashboard_finder_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::Dashboard::RepoDashboardFinder do
+ include MetricsDashboardHelpers
+
+ let_it_be(:project) { create(:project) }
+
+ describe '.list_dashboards' do
+ it 'deletes dashboard cache entries' do
+ cache = instance_double(Gitlab::Metrics::Dashboard::Cache)
+ allow(Gitlab::Metrics::Dashboard::Cache).to receive(:for).and_return(cache)
+
+ expect(cache).to receive(:delete_all!)
+
+ described_class.list_dashboards(project)
+ end
+
+ it 'returns empty array when there are no dashboards' do
+ expect(described_class.list_dashboards(project)).to eq([])
+ end
+
+ context 'when there are project dashboards available' do
+ let_it_be(:dashboard_path) { '.gitlab/dashboards/test.yml' }
+ let_it_be(:project) { project_with_dashboard(dashboard_path) }
+
+ it 'returns the dashboard list' do
+ expect(described_class.list_dashboards(project)).to contain_exactly(dashboard_path)
+ end
+ end
+ end
+
+ describe '.read_dashboard' do
+ it 'raises error when dashboard does not exist' do
+ dashboard_path = '.gitlab/dashboards/test.yml'
+
+ expect { described_class.read_dashboard(project, dashboard_path) }.to raise_error(
+ Gitlab::Metrics::Dashboard::Errors::NOT_FOUND_ERROR
+ )
+ end
+
+ context 'when there are project dashboards available' do
+ let_it_be(:dashboard_path) { '.gitlab/dashboards/test.yml' }
+ let_it_be(:project) { project_with_dashboard(dashboard_path) }
+
+ it 'reads dashboard' do
+ expect(described_class.read_dashboard(project, dashboard_path)).to eq(
+ fixture_file('lib/gitlab/metrics/dashboard/sample_dashboard.yml')
+ )
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/dashboard/stages/metric_endpoint_inserter_spec.rb b/spec/lib/gitlab/metrics/dashboard/stages/metric_endpoint_inserter_spec.rb
new file mode 100644
index 00000000000..bb3c8626d32
--- /dev/null
+++ b/spec/lib/gitlab/metrics/dashboard/stages/metric_endpoint_inserter_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::Dashboard::Stages::MetricEndpointInserter do
+ include MetricsDashboardHelpers
+
+ let(:project) { build_stubbed(:project) }
+ let(:environment) { build_stubbed(:environment, project: project) }
+
+ describe '#transform!' do
+ subject(:transform!) { described_class.new(project, dashboard, environment: environment).transform! }
+
+ let(:dashboard) { load_sample_dashboard.deep_symbolize_keys }
+
+ it 'generates prometheus_endpoint_path without newlines' do
+ query = 'avg( sum( container_memory_usage_bytes{ container_name!="POD", '\
+ 'pod_name=~"^{{ci_environment_slug}}-(.*)", namespace="{{kube_namespace}}" } ) '\
+ 'by (job) ) without (job) /1024/1024/1024'
+
+ transform!
+
+ expect(all_metrics[2][:prometheus_endpoint_path]).to eq(prometheus_path(query))
+ end
+
+ it 'includes a path for the prometheus endpoint with each metric' do
+ transform!
+
+ expect(all_metrics).to satisfy_all do |metric|
+ metric[:prometheus_endpoint_path].present? && !metric[:prometheus_endpoint_path].include?("\n")
+ end
+ end
+
+ it 'works when query/query_range is a number' do
+ query = 2000
+
+ transform!
+
+ expect(all_metrics[1][:prometheus_endpoint_path]).to eq(prometheus_path(query))
+ end
+ end
+
+ private
+
+ def all_metrics
+ dashboard[:panel_groups].flat_map do |group|
+ group[:panels].flat_map { |panel| panel[:metrics] }
+ end
+ end
+
+ def prometheus_path(query)
+ Gitlab::Routing.url_helpers.prometheus_api_project_environment_path(
+ project,
+ environment,
+ proxy_path: :query_range,
+ query: query
+ )
+ end
+end
diff --git a/spec/lib/gitlab/metrics/dashboard/stages/track_panel_type_spec.rb b/spec/lib/gitlab/metrics/dashboard/stages/track_panel_type_spec.rb
new file mode 100644
index 00000000000..d9987b67127
--- /dev/null
+++ b/spec/lib/gitlab/metrics/dashboard/stages/track_panel_type_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::Dashboard::Stages::TrackPanelType do
+ include MetricsDashboardHelpers
+
+ let(:project) { build_stubbed(:project) }
+ let(:environment) { build_stubbed(:environment, project: project) }
+
+ describe '#transform!' do
+ subject { described_class.new(project, dashboard, environment: environment) }
+
+ let(:dashboard) { load_sample_dashboard.deep_symbolize_keys }
+
+ it 'creates tracking event' do
+ stub_application_setting(snowplow_enabled: true, snowplow_collector_hostname: 'localhost')
+ allow(Gitlab::Tracking).to receive(:event).and_call_original
+
+ subject.transform!
+
+ expect(Gitlab::Tracking).to have_received(:event)
+ .with('MetricsDashboard::Chart', 'chart_rendered', { label: 'area-chart' })
+ .at_least(:once)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/dashboard/url_spec.rb b/spec/lib/gitlab/metrics/dashboard/url_spec.rb
index 56556423b05..205e1000376 100644
--- a/spec/lib/gitlab/metrics/dashboard/url_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/url_spec.rb
@@ -102,6 +102,34 @@ RSpec.describe Gitlab::Metrics::Dashboard::Url do
it_behaves_like 'regex which matches url when expected'
end
+ describe '#alert_regex' do
+ let(:url) do
+ Gitlab::Routing.url_helpers.metrics_dashboard_namespace_project_prometheus_alert_url(
+ 'foo',
+ 'bar',
+ '1',
+ start: '2020-02-10T12:59:49.938Z',
+ end: '2020-02-10T20:59:49.938Z',
+ anchor: "anchor"
+ )
+ end
+
+ let(:expected_params) do
+ {
+ 'url' => url,
+ 'namespace' => 'foo',
+ 'project' => 'bar',
+ 'alert' => '1',
+ 'query' => "?end=2020-02-10T20%3A59%3A49.938Z&start=2020-02-10T12%3A59%3A49.938Z",
+ 'anchor' => '#anchor'
+ }
+ end
+
+ subject { described_class.alert_regex }
+
+ it_behaves_like 'regex which matches url when expected'
+ end
+
describe '#build_dashboard_url' do
it 'builds the url for the dashboard endpoint' do
url = described_class.build_dashboard_url('foo', 'bar', 1)
diff --git a/spec/lib/gitlab/metrics/dashboard/validator/client_spec.rb b/spec/lib/gitlab/metrics/dashboard/validator/client_spec.rb
new file mode 100644
index 00000000000..4b07f9dbbab
--- /dev/null
+++ b/spec/lib/gitlab/metrics/dashboard/validator/client_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::Dashboard::Validator::Client do
+ include MetricsDashboardHelpers
+
+ let_it_be(:schema_path) { 'lib/gitlab/metrics/dashboard/validator/schemas/dashboard.json' }
+
+ subject { described_class.new(dashboard, schema_path) }
+
+ describe '#execute' do
+ context 'with no validation errors' do
+ let(:dashboard) { load_sample_dashboard }
+
+ it 'returns empty array' do
+ expect(subject.execute).to eq([])
+ end
+ end
+
+ context 'with validation errors' do
+ let(:dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/invalid_dashboard.yml')) }
+
+ it 'returns array of error objects' do
+ expect(subject.execute).to include(Gitlab::Metrics::Dashboard::Validator::Errors::SchemaValidationError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/dashboard/validator/custom_formats_spec.rb b/spec/lib/gitlab/metrics/dashboard/validator/custom_formats_spec.rb
new file mode 100644
index 00000000000..129fb631f3e
--- /dev/null
+++ b/spec/lib/gitlab/metrics/dashboard/validator/custom_formats_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::Dashboard::Validator::CustomFormats do
+ describe '#format_handlers' do
+ describe 'add_to_metric_id_cache' do
+ it 'adds data to metric id cache' do
+ subject.format_handlers['add_to_metric_id_cache'].call('metric_id', '_schema')
+
+ expect(subject.metric_ids_cache).to eq(["metric_id"])
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/dashboard/validator/errors_spec.rb b/spec/lib/gitlab/metrics/dashboard/validator/errors_spec.rb
new file mode 100644
index 00000000000..f0db1bd0d33
--- /dev/null
+++ b/spec/lib/gitlab/metrics/dashboard/validator/errors_spec.rb
@@ -0,0 +1,140 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::Dashboard::Validator::Errors do
+ describe Gitlab::Metrics::Dashboard::Validator::Errors::SchemaValidationError do
+ context 'empty error hash' do
+ let(:error_hash) { {} }
+
+ it 'uses default error message' do
+ expect(described_class.new(error_hash).message).to eq('Dashboard failed schema validation')
+ end
+ end
+
+ context 'formatted message' do
+ subject { described_class.new(error_hash).message }
+
+ let(:error_hash) do
+ {
+ 'data' => 'property_name',
+ 'data_pointer' => pointer,
+ 'type' => type,
+ 'schema' => 'schema',
+ 'details' => details
+ }
+ end
+
+ context 'for root object' do
+ let(:pointer) { '' }
+
+ context 'when required keys are missing' do
+ let(:type) { 'required' }
+ let(:details) { { 'missing_keys' => ['one'] } }
+
+ it { is_expected.to eq 'root is missing required keys: one' }
+ end
+ end
+
+ context 'for nested object' do
+ let(:pointer) { '/nested_objects/0' }
+
+ context 'when required keys are missing' do
+ let(:type) { 'required' }
+ let(:details) { { 'missing_keys' => ['two'] } }
+
+ it { is_expected.to eq '/nested_objects/0 is missing required keys: two' }
+ end
+
+ context 'when there is type mismatch' do
+ %w(null string boolean integer number array object).each do |expected_type|
+ context "on type: #{expected_type}" do
+ let(:type) { expected_type }
+ let(:details) { nil }
+
+ subject { described_class.new(error_hash).message }
+
+ it { is_expected.to eq "'property_name' at /nested_objects/0 is not of type: #{expected_type}" }
+ end
+ end
+ end
+
+ context 'when data does not match pattern' do
+ let(:type) { 'pattern' }
+ let(:error_hash) do
+ {
+ 'data' => 'property_name',
+ 'data_pointer' => pointer,
+ 'type' => type,
+ 'schema' => { 'pattern' => 'aa.*' }
+ }
+ end
+
+ it { is_expected.to eq "'property_name' at /nested_objects/0 does not match pattern: aa.*" }
+ end
+
+ context 'when data does not match format' do
+ let(:type) { 'format' }
+ let(:error_hash) do
+ {
+ 'data' => 'property_name',
+ 'data_pointer' => pointer,
+ 'type' => type,
+ 'schema' => { 'format' => 'date-time' }
+ }
+ end
+
+ it { is_expected.to eq "'property_name' at /nested_objects/0 does not match format: date-time" }
+ end
+
+ context 'when data is not const' do
+ let(:type) { 'const' }
+ let(:error_hash) do
+ {
+ 'data' => 'property_name',
+ 'data_pointer' => pointer,
+ 'type' => type,
+ 'schema' => { 'const' => 'one' }
+ }
+ end
+
+ it { is_expected.to eq "'property_name' at /nested_objects/0 is not: \"one\"" }
+ end
+
+ context 'when data is not included in enum' do
+ let(:type) { 'enum' }
+ let(:error_hash) do
+ {
+ 'data' => 'property_name',
+ 'data_pointer' => pointer,
+ 'type' => type,
+ 'schema' => { 'enum' => %w(one two) }
+ }
+ end
+
+ it { is_expected.to eq "'property_name' at /nested_objects/0 is not one of: [\"one\", \"two\"]" }
+ end
+
+ context 'when data is not included in enum' do
+ let(:type) { 'unknown' }
+ let(:error_hash) do
+ {
+ 'data' => 'property_name',
+ 'data_pointer' => pointer,
+ 'type' => type,
+ 'schema' => 'schema'
+ }
+ end
+
+ it { is_expected.to eq "'property_name' at /nested_objects/0 is invalid: error_type=unknown" }
+ end
+ end
+ end
+ end
+
+ describe Gitlab::Metrics::Dashboard::Validator::Errors::DuplicateMetricIds do
+ it 'has custom error message' do
+ expect(described_class.new.message).to eq('metric_id must be unique across a project')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/dashboard/validator/post_schema_validator_spec.rb b/spec/lib/gitlab/metrics/dashboard/validator/post_schema_validator_spec.rb
new file mode 100644
index 00000000000..e7cb1429ca9
--- /dev/null
+++ b/spec/lib/gitlab/metrics/dashboard/validator/post_schema_validator_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::Dashboard::Validator::PostSchemaValidator do
+ describe '#validate' do
+ context 'with no project and dashboard_path provided' do
+ context 'unique local metric_ids' do
+ it 'returns empty array' do
+ expect(described_class.new(metric_ids: [1, 2, 3]).validate).to eq([])
+ end
+ end
+
+ context 'duplicate local metrics_ids' do
+ it 'returns error' do
+ expect(described_class.new(metric_ids: [1, 1]).validate)
+ .to eq([Gitlab::Metrics::Dashboard::Validator::Errors::DuplicateMetricIds])
+ end
+ end
+ end
+
+ context 'with project and dashboard_path' do
+ let(:project) { create(:project) }
+
+ subject do
+ described_class.new(
+ project: project,
+ metric_ids: ['some_identifier'],
+ dashboard_path: 'test/path.yml'
+ ).validate
+ end
+
+ context 'with unique metric identifiers' do
+ before do
+ create(:prometheus_metric,
+ project: project,
+ identifier: 'some_other_identifier',
+ dashboard_path: 'test/path.yml'
+ )
+ end
+
+ it 'returns empty array' do
+ expect(subject).to eq([])
+ end
+ end
+
+ context 'duplicate metric identifiers in database' do
+ context 'with different dashboard_path' do
+ before do
+ create(:prometheus_metric,
+ project: project,
+ identifier: 'some_identifier',
+ dashboard_path: 'some/other/path.yml'
+ )
+ end
+
+ it 'returns error' do
+ expect(subject).to include(Gitlab::Metrics::Dashboard::Validator::Errors::DuplicateMetricIds)
+ end
+ end
+
+ context 'with same dashboard_path' do
+ before do
+ create(:prometheus_metric,
+ project: project,
+ identifier: 'some_identifier',
+ dashboard_path: 'test/path.yml'
+ )
+ end
+
+ it 'returns empty array' do
+ expect(subject).to eq([])
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/dashboard/validator_spec.rb b/spec/lib/gitlab/metrics/dashboard/validator_spec.rb
new file mode 100644
index 00000000000..c4cda271408
--- /dev/null
+++ b/spec/lib/gitlab/metrics/dashboard/validator_spec.rb
@@ -0,0 +1,146 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::Dashboard::Validator do
+ include MetricsDashboardHelpers
+
+ let_it_be(:valid_dashboard) { load_sample_dashboard }
+ let_it_be(:invalid_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/invalid_dashboard.yml')) }
+ let_it_be(:duplicate_id_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/duplicate_id_dashboard.yml')) }
+
+ let_it_be(:project) { create(:project) }
+
+ describe '#validate' do
+ context 'valid dashboard schema' do
+ it 'returns true' do
+ expect(described_class.validate(valid_dashboard)).to be true
+ end
+
+ context 'with duplicate metric_ids' do
+ it 'returns false' do
+ expect(described_class.validate(duplicate_id_dashboard)).to be false
+ end
+ end
+
+ context 'with dashboard_path and project' do
+ subject { described_class.validate(valid_dashboard, dashboard_path: 'test/path.yml', project: project) }
+
+ context 'with no conflicting metric identifiers in db' do
+ it { is_expected.to be true }
+ end
+
+ context 'with metric identifier present in current dashboard' do
+ before do
+ create(:prometheus_metric,
+ identifier: 'metric_a1',
+ dashboard_path: 'test/path.yml',
+ project: project
+ )
+ end
+
+ it { is_expected.to be true }
+ end
+
+ context 'with metric identifier present in another dashboard' do
+ before do
+ create(:prometheus_metric,
+ identifier: 'metric_a1',
+ dashboard_path: 'some/other/dashboard/path.yml',
+ project: project
+ )
+ end
+
+ it { is_expected.to be false }
+ end
+ end
+ end
+
+ context 'invalid dashboard schema' do
+ it 'returns false' do
+ expect(described_class.validate(invalid_dashboard)).to be false
+ end
+ end
+ end
+
+ describe '#validate!' do
+ shared_examples 'validation failed' do |errors_message|
+ it 'raises error with corresponding messages', :aggregate_failures do
+ expect { subject }.to raise_error do |error|
+ expect(error).to be_kind_of(Gitlab::Metrics::Dashboard::Validator::Errors::InvalidDashboardError)
+ expect(error.message).to eq(errors_message)
+ end
+ end
+ end
+
+ context 'valid dashboard schema' do
+ it 'returns true' do
+ expect(described_class.validate!(valid_dashboard)).to be true
+ end
+
+ context 'with duplicate metric_ids' do
+ subject { described_class.validate!(duplicate_id_dashboard) }
+
+ it_behaves_like 'validation failed', 'metric_id must be unique across a project'
+ end
+
+ context 'with dashboard_path and project' do
+ subject { described_class.validate!(valid_dashboard, dashboard_path: 'test/path.yml', project: project) }
+
+ context 'with no conflicting metric identifiers in db' do
+ it { is_expected.to be true }
+ end
+
+ context 'with metric identifier present in current dashboard' do
+ before do
+ create(:prometheus_metric,
+ identifier: 'metric_a1',
+ dashboard_path: 'test/path.yml',
+ project: project
+ )
+ end
+
+ it { is_expected.to be true }
+ end
+
+ context 'with metric identifier present in another dashboard' do
+ before do
+ create(:prometheus_metric,
+ identifier: 'metric_a1',
+ dashboard_path: 'some/other/dashboard/path.yml',
+ project: project
+ )
+ end
+
+ it_behaves_like 'validation failed', 'metric_id must be unique across a project'
+ end
+ end
+ end
+
+ context 'invalid dashboard schema' do
+ subject { described_class.validate!(invalid_dashboard) }
+
+ context 'wrong property type' do
+ it_behaves_like 'validation failed', "'this_should_be_a_int' at /panel_groups/0/panels/0/weight is not of type: number"
+ end
+
+ context 'panel groups missing' do
+ let_it_be(:invalid_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/dashboard_missing_panel_groups.yml')) }
+
+ it_behaves_like 'validation failed', 'root is missing required keys: panel_groups'
+ end
+
+ context 'groups are missing panels and group keys' do
+ let_it_be(:invalid_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/dashboard_groups_missing_panels_and_group.yml')) }
+
+ it_behaves_like 'validation failed', '/panel_groups/0 is missing required keys: group'
+ end
+
+ context 'panel is missing metrics key' do
+ let_it_be(:invalid_dashboard) { load_dashboard_yaml(fixture_file('lib/gitlab/metrics/dashboard/dashboard_panel_is_missing_metrics.yml')) }
+
+ it_behaves_like 'validation failed', '/panel_groups/0/panels/0 is missing required keys: metrics'
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/elasticsearch_rack_middleware_spec.rb b/spec/lib/gitlab/metrics/elasticsearch_rack_middleware_spec.rb
index 1fbd41bcc88..78b73f148e4 100644
--- a/spec/lib/gitlab/metrics/elasticsearch_rack_middleware_spec.rb
+++ b/spec/lib/gitlab/metrics/elasticsearch_rack_middleware_spec.rb
@@ -9,8 +9,6 @@ RSpec.describe Gitlab::Metrics::ElasticsearchRackMiddleware do
let(:transaction) { Gitlab::Metrics::WebTransaction.new(env) }
describe '#call' do
- let(:counter) { instance_double(Prometheus::Client::Counter, increment: nil) }
- let(:histogram) { instance_double(Prometheus::Client::Histogram, observe: nil) }
let(:elasticsearch_query_time) { 0.1 }
let(:elasticsearch_requests_count) { 2 }
@@ -18,19 +16,6 @@ RSpec.describe Gitlab::Metrics::ElasticsearchRackMiddleware do
allow(Gitlab::Instrumentation::ElasticsearchTransport).to receive(:query_time) { elasticsearch_query_time }
allow(Gitlab::Instrumentation::ElasticsearchTransport).to receive(:get_request_count) { elasticsearch_requests_count }
- allow(Gitlab::Metrics).to receive(:counter)
- .with(:http_elasticsearch_requests_total,
- an_instance_of(String),
- Gitlab::Metrics::Transaction::BASE_LABELS)
- .and_return(counter)
-
- allow(Gitlab::Metrics).to receive(:histogram)
- .with(:http_elasticsearch_requests_duration_seconds,
- an_instance_of(String),
- Gitlab::Metrics::Transaction::BASE_LABELS,
- described_class::HISTOGRAM_BUCKETS)
- .and_return(histogram)
-
allow(Gitlab::Metrics).to receive(:current_transaction).and_return(transaction)
end
@@ -39,19 +24,30 @@ RSpec.describe Gitlab::Metrics::ElasticsearchRackMiddleware do
end
it 'records elasticsearch metrics' do
- expect(counter).to receive(:increment).with(transaction.labels, elasticsearch_requests_count)
- expect(histogram).to receive(:observe).with(transaction.labels, elasticsearch_query_time)
+ expect(transaction).to receive(:increment).with(:http_elasticsearch_requests_total, elasticsearch_requests_count)
+ expect(transaction).to receive(:observe).with(:http_elasticsearch_requests_duration_seconds, elasticsearch_query_time)
middleware.call(env)
end
it 'records elasticsearch metrics if an error is raised' do
- expect(counter).to receive(:increment).with(transaction.labels, elasticsearch_requests_count)
- expect(histogram).to receive(:observe).with(transaction.labels, elasticsearch_query_time)
+ expect(transaction).to receive(:increment).with(:http_elasticsearch_requests_total, elasticsearch_requests_count)
+ expect(transaction).to receive(:observe).with(:http_elasticsearch_requests_duration_seconds, elasticsearch_query_time)
allow(app).to receive(:call).with(env).and_raise(StandardError)
expect { middleware.call(env) }.to raise_error(StandardError)
end
+
+ context 'when there are no elasticsearch requests' do
+ let(:elasticsearch_requests_count) { 0 }
+
+ it 'does not record any metrics' do
+ expect(transaction).not_to receive(:observe).with(:http_elasticsearch_requests_duration_seconds)
+ expect(transaction).not_to receive(:increment).with(:http_elasticsearch_requests_total, 0)
+
+ middleware.call(env)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/metrics/method_call_spec.rb b/spec/lib/gitlab/metrics/method_call_spec.rb
index 42361cbc36a..825c91b6cb4 100644
--- a/spec/lib/gitlab/metrics/method_call_spec.rb
+++ b/spec/lib/gitlab/metrics/method_call_spec.rb
@@ -3,12 +3,12 @@
require 'spec_helper'
RSpec.describe Gitlab::Metrics::MethodCall do
- let(:transaction) { double(:transaction, labels: {}) }
+ let(:transaction) { Gitlab::Metrics::WebTransaction.new({}) }
let(:method_call) { described_class.new('Foo#bar', :Foo, '#bar', transaction) }
describe '#measure' do
after do
- described_class.reload_metric!(:gitlab_method_call_duration_seconds)
+ ::Gitlab::Metrics::Transaction.reload_metric!(:gitlab_method_call_duration_seconds)
end
it 'measures the performance of the supplied block' do
@@ -36,13 +36,13 @@ RSpec.describe Gitlab::Metrics::MethodCall do
end
it 'metric is not a NullMetric' do
- expect(described_class).not_to be_instance_of(Gitlab::Metrics::NullMetric)
+ method_call.measure { 'foo' }
+ expect(::Gitlab::Metrics::Transaction.prometheus_metric(:gitlab_method_call_duration_seconds, :histogram)).not_to be_instance_of(Gitlab::Metrics::NullMetric)
end
it 'observes the performance of the supplied block' do
- expect(described_class.gitlab_method_call_duration_seconds)
- .to receive(:observe)
- .with({ module: :Foo, method: '#bar' }, be_a_kind_of(Numeric))
+ expect(transaction)
+ .to receive(:observe).with(:gitlab_method_call_duration_seconds, be_a_kind_of(Numeric), { method: "#bar", module: :Foo })
method_call.measure { 'foo' }
end
@@ -53,11 +53,17 @@ RSpec.describe Gitlab::Metrics::MethodCall do
stub_feature_flags(prometheus_metrics_method_instrumentation: false)
end
- it 'observes using NullMetric' do
- expect(described_class.gitlab_method_call_duration_seconds).to be_instance_of(Gitlab::Metrics::NullMetric)
- expect(described_class.gitlab_method_call_duration_seconds).to receive(:observe)
+ it 'observes the performance of the supplied block' do
+ expect(transaction)
+ .to receive(:observe).with(:gitlab_method_call_duration_seconds, be_a_kind_of(Numeric), { method: "#bar", module: :Foo })
+
+ method_call.measure { 'foo' }
+ end
+ it 'observes using NullMetric' do
method_call.measure { 'foo' }
+
+ expect(::Gitlab::Metrics::Transaction.prometheus_metric(:gitlab_method_call_duration_seconds, :histogram)).to be_instance_of(Gitlab::Metrics::NullMetric)
end
end
end
@@ -68,8 +74,9 @@ RSpec.describe Gitlab::Metrics::MethodCall do
end
it 'does not observe the performance' do
- expect(described_class.gitlab_method_call_duration_seconds)
+ expect(transaction)
.not_to receive(:observe)
+ .with(:gitlab_method_call_duration_seconds, be_a_kind_of(Numeric))
method_call.measure { 'foo' }
end
diff --git a/spec/lib/gitlab/metrics/methods_spec.rb b/spec/lib/gitlab/metrics/methods_spec.rb
index 3c171680272..71135a6e9c5 100644
--- a/spec/lib/gitlab/metrics/methods_spec.rb
+++ b/spec/lib/gitlab/metrics/methods_spec.rb
@@ -9,9 +9,9 @@ RSpec.describe Gitlab::Metrics::Methods do
let(:docstring) { 'description' }
let(:metric_name) { :sample_metric }
- describe "#define_#{metric_type}" do
+ describe "#define_metrics" do
define_method(:call_define_metric_method) do |**args|
- subject.__send__("define_#{metric_type}", metric_name, **args)
+ subject.__send__(:define_metric, metric_type, metric_name, **args)
end
context 'metrics access method not defined' do
@@ -55,11 +55,11 @@ RSpec.describe Gitlab::Metrics::Methods do
end
end
- describe "#fetch_#{metric_type}" do
+ describe "#fetch_metric" do
let(:null_metric) { Gitlab::Metrics::NullMetric.instance }
define_method(:call_fetch_metric_method) do |**args|
- subject.__send__("fetch_#{metric_type}", metric_name, **args)
+ subject.__send__(:fetch_metric, metric_type, metric_name, **args)
end
context "when #{metric_type} is not cached" do
@@ -135,5 +135,5 @@ RSpec.describe Gitlab::Metrics::Methods do
include_examples 'metric', :counter, {}
include_examples 'metric', :gauge, {}, :all
- include_examples 'metric', :histogram, {}, [0.005, 0.01, 0.1, 1, 10]
+ include_examples 'metric', :histogram, {}, ::Prometheus::Client::Histogram::DEFAULT_BUCKETS
end
diff --git a/spec/lib/gitlab/metrics/rack_middleware_spec.rb b/spec/lib/gitlab/metrics/rack_middleware_spec.rb
index 335e5a490a6..ab56f38f0c1 100644
--- a/spec/lib/gitlab/metrics/rack_middleware_spec.rb
+++ b/spec/lib/gitlab/metrics/rack_middleware_spec.rb
@@ -25,12 +25,4 @@ RSpec.describe Gitlab::Metrics::RackMiddleware do
expect { middleware.call(env) }.to raise_error(RuntimeError)
end
end
-
- describe '#transaction_from_env' do
- let(:transaction) { middleware.transaction_from_env(env) }
-
- it 'returns a Transaction' do
- expect(transaction).to be_an_instance_of(Gitlab::Metrics::WebTransaction)
- end
- end
end
diff --git a/spec/lib/gitlab/metrics/redis_rack_middleware_spec.rb b/spec/lib/gitlab/metrics/redis_rack_middleware_spec.rb
deleted file mode 100644
index a85968dbd43..00000000000
--- a/spec/lib/gitlab/metrics/redis_rack_middleware_spec.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Metrics::RedisRackMiddleware do
- let(:app) { double(:app) }
- let(:middleware) { described_class.new(app) }
- let(:env) { {} }
- let(:transaction) { Gitlab::Metrics::WebTransaction.new(env) }
-
- before do
- allow(app).to receive(:call).with(env).and_return('wub wub')
- end
-
- describe '#call' do
- let(:counter) { double(Prometheus::Client::Counter, increment: nil) }
- let(:histogram) { double(Prometheus::Client::Histogram, observe: nil) }
- let(:redis_query_time) { 0.1 }
- let(:redis_requests_count) { 2 }
-
- before do
- allow(Gitlab::Instrumentation::Redis).to receive(:query_time) { redis_query_time }
- allow(Gitlab::Instrumentation::Redis).to receive(:get_request_count) { redis_requests_count }
-
- allow(Gitlab::Metrics).to receive(:counter)
- .with(:http_redis_requests_total,
- an_instance_of(String),
- Gitlab::Metrics::Transaction::BASE_LABELS)
- .and_return(counter)
-
- allow(Gitlab::Metrics).to receive(:histogram)
- .with(:http_redis_requests_duration_seconds,
- an_instance_of(String),
- Gitlab::Metrics::Transaction::BASE_LABELS,
- Gitlab::Instrumentation::Redis::QUERY_TIME_BUCKETS)
- .and_return(histogram)
-
- allow(Gitlab::Metrics).to receive(:current_transaction).and_return(transaction)
- end
-
- it 'calls the app' do
- expect(middleware.call(env)).to eq('wub wub')
- end
-
- it 'records redis metrics' do
- expect(counter).to receive(:increment).with(transaction.labels, redis_requests_count)
- expect(histogram).to receive(:observe).with(transaction.labels, redis_query_time)
-
- middleware.call(env)
- end
-
- it 'records redis metrics if an error is raised' do
- expect(counter).to receive(:increment).with(transaction.labels, redis_requests_count)
- expect(histogram).to receive(:observe).with(transaction.labels, redis_query_time)
-
- allow(app).to receive(:call).with(env).and_raise(StandardError)
-
- expect { middleware.call(env) }.to raise_error(StandardError)
- end
- end
-end
diff --git a/spec/lib/gitlab/metrics/samplers/threads_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/threads_sampler_spec.rb
new file mode 100644
index 00000000000..19477589289
--- /dev/null
+++ b/spec/lib/gitlab/metrics/samplers/threads_sampler_spec.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Metrics::Samplers::ThreadsSampler do
+ subject { described_class.new }
+
+ describe '#interval' do
+ it 'samples every five seconds by default' do
+ expect(subject.interval).to eq(5)
+ end
+
+ it 'samples at other intervals if requested' do
+ expect(described_class.new(11).interval).to eq(11)
+ end
+ end
+
+ describe '#sample' do
+ before do
+ described_class::METRIC_DESCRIPTIONS.each_key do |metric|
+ allow(subject.metrics[metric]).to receive(:set)
+ end
+ end
+
+ it 'sets the gauge for the concurrency total' do
+ expect(Gitlab::Runtime).to receive(:max_threads).and_return(9000)
+ expect(subject.metrics[:max_expected_threads]).to receive(:set).with({}, 9000)
+
+ subject.sample
+ end
+
+ context 'thread counts' do
+ it 'reports if any of the threads per group uses the db' do
+ threads = [
+ fake_thread(described_class::SIDEKIQ_WORKER_THREAD_NAME, true), fake_thread(described_class::SIDEKIQ_WORKER_THREAD_NAME, false),
+ fake_thread(described_class::SIDEKIQ_WORKER_THREAD_NAME, nil)
+ ]
+ allow(Thread).to receive(:list).and_return(threads)
+
+ expect(subject.metrics[:running_threads]).to receive(:set)
+ .with({ uses_db_connection: 'yes', thread_name: described_class::SIDEKIQ_WORKER_THREAD_NAME }, 1)
+ expect(subject.metrics[:running_threads]).to receive(:set)
+ .with({ uses_db_connection: 'no', thread_name: described_class::SIDEKIQ_WORKER_THREAD_NAME }, 2)
+
+ subject.sample
+ end
+
+ context 'thread names', :aggregate_failures do
+ where(:thread_names, :expected_names) do
+ [
+ [[nil], %w(unnamed)],
+ [['puma threadpool 1', 'puma threadpool 001', 'puma threadpool 002'], ['puma threadpool']],
+ [%w(sidekiq_worker_thread), %w(sidekiq_worker_thread)],
+ [%w(some_sampler some_exporter), %w(some_sampler some_exporter)],
+ [%w(unknown thing), %w(unrecognized)]
+ ]
+ end
+
+ with_them do
+ it do
+ allow(Thread).to receive(:list).and_return(thread_names.map { |name| fake_thread(name) })
+
+ expected_names.each do |expected_name|
+ expect(subject.metrics[:running_threads]).to receive(:set)
+ .with({ uses_db_connection: 'yes', thread_name: expected_name }, instance_of(Integer))
+ expect(subject.metrics[:running_threads]).to receive(:set)
+ .with({ uses_db_connection: 'no', thread_name: expected_name }, instance_of(Integer))
+ end
+
+ subject.sample
+ end
+ end
+ end
+ end
+
+ def fake_thread(name = nil, db_connection = nil)
+ thready = { uses_db_connection: db_connection }
+ allow(thready).to receive(:name).and_return(name)
+
+ thready
+ end
+ end
+end
diff --git a/spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb b/spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb
index c66d8b1075c..047d1e5d205 100644
--- a/spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/metrics/sidekiq_middleware_spec.rb
@@ -11,8 +11,8 @@ RSpec.describe Gitlab::Metrics::SidekiqMiddleware do
worker = double(:worker, class: double(:class, name: 'TestWorker'))
expect_next_instance_of(Gitlab::Metrics::BackgroundTransaction) do |transaction|
- expect(transaction).to receive(:set).with(:sidekiq_queue_duration, instance_of(Float))
- expect(transaction).to receive(:increment).with(:db_count, 1)
+ expect(transaction).to receive(:set).with(:gitlab_transaction_sidekiq_queue_duration_total, instance_of(Float))
+ expect(transaction).to receive(:increment).with(:gitlab_transaction_db_count_total, 1)
end
middleware.call(worker, message, :test) do
@@ -42,7 +42,7 @@ RSpec.describe Gitlab::Metrics::SidekiqMiddleware do
.and_call_original
expect_any_instance_of(Gitlab::Metrics::Transaction).to receive(:set)
- .with(:sidekiq_queue_duration, instance_of(Float))
+ .with(:gitlab_transaction_sidekiq_queue_duration_total, instance_of(Float))
middleware.call(worker, {}, :test) { nil }
end
diff --git a/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb b/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb
index 161527c01aa..adbc474343f 100644
--- a/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/action_view_spec.rb
@@ -22,15 +22,15 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActionView do
describe '#render_template' do
it 'tracks rendering of a template' do
expect(transaction).to receive(:increment)
- .with(:view_duration, 2.1)
+ .with(:gitlab_transaction_view_duration_total, 2.1)
subscriber.render_template(event)
end
it 'observes view rendering time' do
- expect(described_class.gitlab_view_rendering_duration_seconds)
+ expect(transaction)
.to receive(:observe)
- .with({ view: 'app/views/x.html.haml' }, 2.1)
+ .with(:gitlab_view_rendering_duration_seconds, 2.1, { view: "app/views/x.html.haml" })
subscriber.render_template(event)
end
diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
index 2fd5dd1d83b..a31686b8061 100644
--- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
@@ -37,10 +37,11 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
it 'increments only db count value' do
described_class::DB_COUNTERS.each do |counter|
+ prometheus_counter = "gitlab_transaction_#{counter}_total".to_sym
if expected_counters[counter] > 0
- expect(transaction).to receive(:increment).with(counter, 1)
+ expect(transaction).to receive(:increment).with(prometheus_counter, 1)
else
- expect(transaction).not_to receive(:increment).with(counter, 1)
+ expect(transaction).not_to receive(:increment).with(prometheus_counter, 1)
end
end
@@ -74,10 +75,18 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
expect(subscriber).to receive(:current_transaction)
.at_least(:once)
.and_return(transaction)
- expect(described_class.send(:gitlab_sql_duration_seconds)).to receive(:observe).with({}, 0.002)
+ expect(transaction).to receive(:observe).with(:gitlab_sql_duration_seconds, 0.002)
+
subscriber.sql(event)
end
+ it 'marks the current thread as using the database' do
+ # since it would already have been toggled by other specs
+ Thread.current[:uses_db_connection] = nil
+
+ expect { subscriber.sql(event) }.to change { Thread.current[:uses_db_connection] }.from(nil).to(true)
+ end
+
context 'with read query' do
let(:expected_counters) do
{
@@ -217,7 +226,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
end
it 'skips schema/begin/commit sql commands' do
- expect(subscriber).to receive(:current_transaction)
+ allow(subscriber).to receive(:current_transaction)
.at_least(:once)
.and_return(transaction)
diff --git a/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb b/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb
index f7ac719c16a..9aba6ac293c 100644
--- a/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/rails_cache_spec.rb
@@ -26,21 +26,12 @@ RSpec.describe Gitlab::Metrics::Subscribers::RailsCache do
context 'with hit event' do
let(:event) { double(:event, duration: 15.2, payload: { hit: true }) }
- it 'increments the cache_read_hit count' do
- expect(transaction).to receive(:increment)
- .with(:cache_read_hit_count, 1, false)
- expect(transaction).to receive(:increment)
- .with(any_args).at_least(1) # Other calls
-
- subscriber.cache_read(event)
- end
-
context 'when super operation is fetch' do
let(:event) { double(:event, duration: 15.2, payload: { hit: true, super_operation: :fetch }) }
- it 'does not increment cache read miss' do
+ it 'does not increment cache read miss total' do
expect(transaction).not_to receive(:increment)
- .with(:cache_read_hit_count, 1)
+ .with(:gitlab_cache_misses_total, 1)
subscriber.cache_read(event)
end
@@ -50,33 +41,21 @@ RSpec.describe Gitlab::Metrics::Subscribers::RailsCache do
context 'with miss event' do
let(:event) { double(:event, duration: 15.2, payload: { hit: false }) }
- it 'increments the cache_read_miss count' do
+ it 'increments the cache_read_miss total' do
expect(transaction).to receive(:increment)
- .with(:cache_read_miss_count, 1, false)
+ .with(:gitlab_cache_misses_total, 1)
expect(transaction).to receive(:increment)
.with(any_args).at_least(1) # Other calls
subscriber.cache_read(event)
end
- it 'increments the cache_read_miss total' do
- expect(subscriber.send(:metric_cache_misses_total)).to receive(:increment).with({})
-
- subscriber.cache_read(event)
- end
-
context 'when super operation is fetch' do
let(:event) { double(:event, duration: 15.2, payload: { hit: false, super_operation: :fetch }) }
- it 'does not increment cache read miss' do
+ it 'does not increment cache read miss total' do
expect(transaction).not_to receive(:increment)
- .with(:cache_read_miss_count, 1)
-
- subscriber.cache_read(event)
- end
-
- it 'does not increment cache_read_miss total' do
- expect(subscriber.send(:metric_cache_misses_total)).not_to receive(:increment).with({})
+ .with(:gitlab_cache_misses_total, 1)
subscriber.cache_read(event)
end
@@ -129,7 +108,7 @@ RSpec.describe Gitlab::Metrics::Subscribers::RailsCache do
it 'increments the cache_read_hit count' do
expect(transaction).to receive(:increment)
- .with(:cache_read_hit_count, 1)
+ .with(:gitlab_transaction_cache_read_hit_count_total, 1)
subscriber.cache_fetch_hit(event)
end
@@ -146,25 +125,17 @@ RSpec.describe Gitlab::Metrics::Subscribers::RailsCache do
end
context 'with a transaction' do
- let(:metric_cache_misses_total) { double('metric_cache_misses_total', increment: nil) }
-
before do
- allow(subscriber).to receive(:metric_cache_misses_total).and_return(metric_cache_misses_total)
allow(subscriber).to receive(:current_transaction)
.and_return(transaction)
end
- it 'increments the cache_fetch_miss count' do
+ it 'increments the cache_fetch_miss count and cache_read_miss total' do
+ expect(transaction).to receive(:increment).with(:gitlab_cache_misses_total, 1)
expect(transaction).to receive(:increment)
- .with(:cache_read_miss_count, 1)
-
- subscriber.cache_generate(event)
- end
+ .with(:gitlab_transaction_cache_read_miss_count_total, 1)
- it 'increments the cache_read_miss total' do
subscriber.cache_generate(event)
-
- expect(metric_cache_misses_total).to have_received(:increment).with({})
end
end
end
@@ -184,22 +155,6 @@ RSpec.describe Gitlab::Metrics::Subscribers::RailsCache do
.and_return(transaction)
end
- it 'increments the total and specific cache duration' do
- expect(transaction).to receive(:increment)
- .with(:cache_duration, event.duration, false)
-
- expect(transaction).to receive(:increment)
- .with(:cache_count, 1, false)
-
- expect(transaction).to receive(:increment)
- .with(:cache_delete_duration, event.duration, false)
-
- expect(transaction).to receive(:increment)
- .with(:cache_delete_count, 1, false)
-
- subscriber.observe(:delete, event.duration)
- end
-
it 'observes cache metric' do
expect(subscriber.send(:metric_cache_operation_duration_seconds))
.to receive(:observe)
@@ -209,9 +164,9 @@ RSpec.describe Gitlab::Metrics::Subscribers::RailsCache do
end
it 'increments the operations total' do
- expect(subscriber.send(:metric_cache_operations_total))
+ expect(transaction)
.to receive(:increment)
- .with(transaction.labels.merge(operation: :delete))
+ .with(:gitlab_cache_operations_total, 1, { operation: :delete })
subscriber.observe(:delete, event.duration)
end
diff --git a/spec/lib/gitlab/metrics/transaction_spec.rb b/spec/lib/gitlab/metrics/transaction_spec.rb
index e64179bd5c1..88293f11149 100644
--- a/spec/lib/gitlab/metrics/transaction_spec.rb
+++ b/spec/lib/gitlab/metrics/transaction_spec.rb
@@ -28,14 +28,6 @@ RSpec.describe Gitlab::Metrics::Transaction do
end
end
- describe '#allocated_memory' do
- it 'returns the allocated memory in bytes' do
- transaction.run { 'a' * 32 }
-
- expect(transaction.allocated_memory).to be_a_kind_of(Numeric)
- end
- end
-
describe '#run' do
it 'yields the supplied block' do
expect { |b| transaction.run(&b) }.to yield_control
@@ -63,7 +55,7 @@ RSpec.describe Gitlab::Metrics::Transaction do
end
describe '#add_event' do
- let(:prometheus_metric) { instance_double(Prometheus::Client::Counter, increment: nil) }
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Counter, increment: nil, base_labels: {}) }
it 'adds a metric' do
expect(prometheus_metric).to receive(:increment)
@@ -82,7 +74,7 @@ RSpec.describe Gitlab::Metrics::Transaction do
context 'with sensitive tags' do
before do
transaction.add_event(:baubau, **sensitive_tags.merge(sane: 'yes'))
- allow(described_class).to receive(:transaction_metric).and_return(prometheus_metric)
+ allow(described_class).to receive(:prometheus_metric).and_return(prometheus_metric)
end
it 'filters tags' do
@@ -94,24 +86,119 @@ RSpec.describe Gitlab::Metrics::Transaction do
end
describe '#increment' do
- let(:prometheus_metric) { instance_double(Prometheus::Client::Counter, increment: nil) }
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Counter, increment: nil, base_labels: {}) }
it 'adds a metric' do
- expect(prometheus_metric).to receive(:increment).with(hash_including(:action, :controller), 1)
- expect(described_class).to receive(:fetch_metric).with(:counter, :gitlab_transaction_meow_total).and_return(prometheus_metric)
+ expect(prometheus_metric).to receive(:increment)
+ expect(::Gitlab::Metrics).to receive(:counter).with(:meow, 'Meow counter', hash_including(:controller, :action)).and_return(prometheus_metric)
transaction.increment(:meow, 1)
end
+
+ context 'with block' do
+ it 'overrides docstring' do
+ expect(::Gitlab::Metrics).to receive(:counter).with(:block_docstring, 'test', hash_including(:controller, :action)).and_return(prometheus_metric)
+
+ transaction.increment(:block_docstring, 1) do
+ docstring 'test'
+ end
+ end
+
+ it 'overrides labels' do
+ expect(::Gitlab::Metrics).to receive(:counter).with(:block_labels, 'Block labels counter', hash_including(:controller, :action, :sane)).and_return(prometheus_metric)
+
+ labels = { sane: 'yes' }
+ transaction.increment(:block_labels, 1, labels) do
+ label_keys %i(sane)
+ end
+ end
+
+ it 'filters sensitive tags' do
+ expect(::Gitlab::Metrics).to receive(:counter).with(:metric_with_sensitive_block, 'Metric with sensitive block counter', hash_excluding(sensitive_tags)).and_return(prometheus_metric)
+
+ labels_keys = sensitive_tags.keys
+ transaction.increment(:metric_with_sensitive_block, 1, sensitive_tags) do
+ label_keys labels_keys
+ end
+ end
+ end
end
describe '#set' do
- let(:prometheus_metric) { instance_double(Prometheus::Client::Gauge, set: nil) }
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Gauge, set: nil, base_labels: {}) }
it 'adds a metric' do
- expect(prometheus_metric).to receive(:set).with(hash_including(:action, :controller), 1)
- expect(described_class).to receive(:fetch_metric).with(:gauge, :gitlab_transaction_meow_total).and_return(prometheus_metric)
+ expect(prometheus_metric).to receive(:set)
+ expect(::Gitlab::Metrics).to receive(:gauge).with(:meow_set, 'Meow set gauge', hash_including(:controller, :action), :all).and_return(prometheus_metric)
+
+ transaction.set(:meow_set, 1)
+ end
+
+ context 'with block' do
+ it 'overrides docstring' do
+ expect(::Gitlab::Metrics).to receive(:gauge).with(:block_docstring_set, 'test', hash_including(:controller, :action), :all).and_return(prometheus_metric)
+
+ transaction.set(:block_docstring_set, 1) do
+ docstring 'test'
+ end
+ end
+
+ it 'overrides labels' do
+ expect(::Gitlab::Metrics).to receive(:gauge).with(:block_labels_set, 'Block labels set gauge', hash_including(:controller, :action, :sane), :all).and_return(prometheus_metric)
- transaction.set(:meow, 1)
+ labels = { sane: 'yes' }
+ transaction.set(:block_labels_set, 1, labels) do
+ label_keys %i(sane)
+ end
+ end
+
+ it 'filters sensitive tags' do
+ expect(::Gitlab::Metrics).to receive(:gauge).with(:metric_set_with_sensitive_block, 'Metric set with sensitive block gauge', hash_excluding(sensitive_tags), :all).and_return(prometheus_metric)
+
+ label_keys = sensitive_tags.keys
+ transaction.set(:metric_set_with_sensitive_block, 1, sensitive_tags) do
+ label_keys label_keys
+ end
+ end
+ end
+ end
+
+ describe '#observe' do
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Histogram, observe: nil, base_labels: {}) }
+
+ it 'adds a metric' do
+ expect(prometheus_metric).to receive(:observe)
+ expect(::Gitlab::Metrics).to receive(:histogram).with(:meow_observe, 'Meow observe histogram', hash_including(:controller, :action), kind_of(Array)).and_return(prometheus_metric)
+
+ transaction.observe(:meow_observe, 1)
+ end
+
+ context 'with block' do
+ it 'overrides docstring' do
+ expect(::Gitlab::Metrics).to receive(:histogram).with(:block_docstring_observe, 'test', hash_including(:controller, :action), kind_of(Array)).and_return(prometheus_metric)
+
+ transaction.observe(:block_docstring_observe, 1) do
+ docstring 'test'
+ end
+ end
+
+ it 'overrides labels' do
+ expect(::Gitlab::Metrics).to receive(:histogram).with(:block_labels_observe, 'Block labels observe histogram', hash_including(:controller, :action, :sane), kind_of(Array)).and_return(prometheus_metric)
+
+ labels = { sane: 'yes' }
+ transaction.observe(:block_labels_observe, 1, labels) do
+ label_keys %i(sane)
+ end
+ end
+
+ it 'filters sensitive tags' do
+ expect(::Gitlab::Metrics).to receive(:histogram).with(:metric_observe_with_sensitive_block, 'Metric observe with sensitive block histogram', hash_excluding(sensitive_tags), kind_of(Array)).and_return(prometheus_metric)
+
+ label_keys = sensitive_tags.keys
+ transaction.observe(:metric_observe_with_sensitive_block, 1, sensitive_tags) do
+ label_keys label_keys
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/metrics/web_transaction_spec.rb b/spec/lib/gitlab/metrics/web_transaction_spec.rb
index 12e98089066..6903ce53f65 100644
--- a/spec/lib/gitlab/metrics/web_transaction_spec.rb
+++ b/spec/lib/gitlab/metrics/web_transaction_spec.rb
@@ -5,29 +5,52 @@ require 'spec_helper'
RSpec.describe Gitlab::Metrics::WebTransaction do
let(:env) { {} }
let(:transaction) { described_class.new(env) }
- let(:prometheus_metric) { double("prometheus metric") }
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Metric, base_labels: {}) }
before do
- allow(described_class).to receive(:transaction_metric).and_return(prometheus_metric)
+ allow(described_class).to receive(:prometheus_metric).and_return(prometheus_metric)
end
- describe '#duration' do
- it 'returns the duration of a transaction in seconds' do
- transaction.run { sleep(0.5) }
+ RSpec.shared_context 'ActionController request' do
+ let(:request) { double(:request, format: double(:format, ref: :html)) }
+ let(:controller_class) { double(:controller_class, name: 'TestController') }
- expect(transaction.duration).to be >= 0.5
+ before do
+ controller = double(:controller, class: controller_class, action_name: 'show', request: request)
+ env['action_controller.instance'] = controller
+ end
+ end
+
+ RSpec.shared_context 'transaction observe metrics' do
+ before do
+ allow(transaction).to receive(:observe)
+ end
+ end
+
+ RSpec.shared_examples 'metric with labels' do |metric_method|
+ include_context 'ActionController request'
+
+ it 'measures with correct labels and value' do
+ value = 1
+ expect(prometheus_metric).to receive(metric_method).with({ controller: 'TestController', action: 'show', feature_category: '' }, value)
+
+ transaction.send(metric_method, :bau, value)
end
end
- describe '#allocated_memory' do
- it 'returns the allocated memory in bytes' do
- transaction.run { 'a' * 32 }
+ describe '#duration' do
+ include_context 'transaction observe metrics'
+
+ it 'returns the duration of a transaction in seconds' do
+ transaction.run { sleep(0.5) }
- expect(transaction.allocated_memory).to be_a_kind_of(Numeric)
+ expect(transaction.duration).to be >= 0.5
end
end
describe '#run' do
+ include_context 'transaction observe metrics'
+
it 'yields the supplied block' do
expect { |b| transaction.run(&b) }.to yield_control
end
@@ -53,26 +76,7 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
end
end
- describe '#increment' do
- it 'increments a counter' do
- expect(prometheus_metric).to receive(:increment).with({}, 1)
-
- transaction.increment(:time, 1)
- end
- end
-
- describe '#set' do
- it 'sets a value' do
- expect(prometheus_metric).to receive(:set).with({}, 10)
-
- transaction.set(:number, 10)
- end
- end
-
describe '#labels' do
- let(:request) { double(:request, format: double(:format, ref: :html)) }
- let(:controller_class) { double(:controller_class, name: 'TestController') }
-
context 'when request goes to Grape endpoint' do
before do
route = double(:route, request_method: 'GET', path: '/:version/projects/:id/archive(.:format)')
@@ -86,7 +90,7 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
end
it 'contains only the labels defined for transactions' do
- expect(transaction.labels.keys).to contain_exactly(*described_class.superclass::BASE_LABELS.keys)
+ expect(transaction.labels.keys).to contain_exactly(*described_class.superclass::BASE_LABEL_KEYS)
end
it 'does not provide labels if route infos are missing' do
@@ -100,18 +104,14 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
end
context 'when request goes to ActionController' do
- before do
- controller = double(:controller, class: controller_class, action_name: 'show', request: request)
-
- env['action_controller.instance'] = controller
- end
+ include_context 'ActionController request'
it 'tags a transaction with the name and action of a controller' do
expect(transaction.labels).to eq({ controller: 'TestController', action: 'show', feature_category: '' })
end
it 'contains only the labels defined for transactions' do
- expect(transaction.labels.keys).to contain_exactly(*described_class.superclass::BASE_LABELS.keys)
+ expect(transaction.labels.keys).to contain_exactly(*described_class.superclass::BASE_LABEL_KEYS)
end
context 'when the request content type is not :html' do
@@ -144,6 +144,8 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
end
describe '#add_event' do
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Counter, :increment, base_labels: {}) }
+
it 'adds a metric' do
expect(prometheus_metric).to receive(:increment)
@@ -156,4 +158,22 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
transaction.add_event(:bau, animal: 'dog')
end
end
+
+ describe '#increment' do
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Counter, :increment, base_labels: {}) }
+
+ it_behaves_like 'metric with labels', :increment
+ end
+
+ describe '#set' do
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Gauge, :set, base_labels: {}) }
+
+ it_behaves_like 'metric with labels', :set
+ end
+
+ describe '#observe' do
+ let(:prometheus_metric) { instance_double(Prometheus::Client::Histogram, :observe, base_labels: {}) }
+
+ it_behaves_like 'metric with labels', :observe
+ end
end
diff --git a/spec/lib/gitlab/metrics_spec.rb b/spec/lib/gitlab/metrics_spec.rb
index bdf72a3c288..db5a23e2328 100644
--- a/spec/lib/gitlab/metrics_spec.rb
+++ b/spec/lib/gitlab/metrics_spec.rb
@@ -71,14 +71,9 @@ RSpec.describe Gitlab::Metrics do
end
it 'adds a metric to the current transaction' do
- expect(transaction).to receive(:increment)
- .with('foo_real_time', a_kind_of(Numeric), false)
+ expect(transaction).to receive(:observe).with(:gitlab_foo_real_duration_seconds, a_kind_of(Numeric))
- expect(transaction).to receive(:increment)
- .with('foo_cpu_time', a_kind_of(Numeric), false)
-
- expect(transaction).to receive(:increment)
- .with('foo_call_count', 1, false)
+ expect(transaction).to receive(:observe).with(:gitlab_foo_cpu_duration_seconds, a_kind_of(Numeric))
described_class.measure(:foo) { 10 }
end
diff --git a/spec/lib/gitlab/middleware/rails_queue_duration_spec.rb b/spec/lib/gitlab/middleware/rails_queue_duration_spec.rb
index 8f9b0aec9eb..cdb48024531 100644
--- a/spec/lib/gitlab/middleware/rails_queue_duration_spec.rb
+++ b/spec/lib/gitlab/middleware/rails_queue_duration_spec.rb
@@ -29,26 +29,19 @@ RSpec.describe Gitlab::Middleware::RailsQueueDuration do
it 'sets proxy_flight_time and calls the app when the header is present' do
env['HTTP_GITLAB_WORKHORSE_PROXY_START'] = '123'
- expect(transaction).to receive(:set).with(:rails_queue_duration, an_instance_of(Float))
+ expect(transaction).to receive(:set).with(:gitlab_transaction_rails_queue_duration_total, an_instance_of(Float))
expect(middleware.call(env)).to eq('yay')
end
it 'observes rails queue duration metrics and calls the app when the header is present' do
env['HTTP_GITLAB_WORKHORSE_PROXY_START'] = '2000000000'
- expect(middleware.send(:metric_rails_queue_duration_seconds)).to receive(:observe).with(transaction.labels, 1)
+ expect(transaction).to receive(:observe).with(:gitlab_rails_queue_duration_seconds, 1)
Timecop.freeze(Time.at(3)) do
expect(middleware.call(env)).to eq('yay')
end
end
-
- it 'creates a metric with a docstring' do
- metric = middleware.send(:metric_rails_queue_duration_seconds)
-
- expect(metric).to be_instance_of(Prometheus::Client::Histogram)
- expect(metric.docstring).to eq('Measures latency between GitLab Workhorse forwarding a request to Rails')
- end
end
end
end
diff --git a/spec/lib/gitlab/middleware/read_only_spec.rb b/spec/lib/gitlab/middleware/read_only_spec.rb
index 3bdf2a5077f..50dd38278b9 100644
--- a/spec/lib/gitlab/middleware/read_only_spec.rb
+++ b/spec/lib/gitlab/middleware/read_only_spec.rb
@@ -110,6 +110,19 @@ RSpec.describe Gitlab::Middleware::ReadOnly do
expect(subject).not_to disallow_request
end
+ context 'relative URL is configured' do
+ before do
+ stub_config_setting(relative_url_root: '/gitlab')
+ end
+
+ it 'expects a graphql request to be allowed' do
+ response = request.post("/gitlab/api/graphql")
+
+ expect(response).not_to be_redirect
+ expect(subject).not_to disallow_request
+ end
+ end
+
context 'sidekiq admin requests' do
where(:mounted_at) do
[
diff --git a/spec/lib/gitlab/pages/settings_spec.rb b/spec/lib/gitlab/pages/settings_spec.rb
new file mode 100644
index 00000000000..7d4db073d73
--- /dev/null
+++ b/spec/lib/gitlab/pages/settings_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pages::Settings do
+ describe '#path' do
+ subject { described_class.new(settings).path }
+
+ let(:settings) { double(path: 'the path') }
+
+ it { is_expected.to eq('the path') }
+
+ it 'does not track calls' do
+ expect(::Gitlab::ErrorTracking).not_to receive(:track_exception)
+
+ subject
+ end
+
+ context 'when running under a web server' do
+ before do
+ allow(::Gitlab::Runtime).to receive(:web_server?).and_return(true)
+ end
+
+ it { is_expected.to eq('the path') }
+
+ it 'does not track calls' do
+ expect(::Gitlab::ErrorTracking).not_to receive(:track_exception)
+
+ subject
+ end
+
+ context 'with the env var' do
+ before do
+ stub_env('GITLAB_PAGES_DENY_DISK_ACCESS', '1')
+ end
+
+ it { is_expected.to eq('the path') }
+
+ it 'tracks a DiskAccessDenied exception' do
+ expect(::Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(instance_of(described_class::DiskAccessDenied)).and_call_original
+
+ subject
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb b/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb
new file mode 100644
index 00000000000..156a440833c
--- /dev/null
+++ b/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pagination::GitalyKeysetPager do
+ let(:pager) { described_class.new(request_context, project) }
+
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:request_context) { double("request context") }
+ let(:finder) { double("branch finder") }
+ let(:custom_port) { 8080 }
+ let(:incoming_api_projects_url) { "#{Gitlab.config.gitlab.url}:#{custom_port}/api/v4/projects" }
+
+ before do
+ stub_config_setting(port: custom_port)
+ end
+
+ describe '.paginate' do
+ let(:base_query) { { per_page: 2 } }
+ let(:query) { base_query }
+
+ before do
+ allow(request_context).to receive(:params).and_return(query)
+ allow(request_context).to receive(:header)
+ end
+
+ shared_examples_for 'offset pagination' do
+ let(:paginated_array) { double 'paginated array' }
+ let(:branches) { [] }
+
+ it 'uses offset pagination' do
+ expect(finder).to receive(:execute).and_return(branches)
+ expect(Kaminari).to receive(:paginate_array).with(branches).and_return(paginated_array)
+ expect_next_instance_of(Gitlab::Pagination::OffsetPagination) do |offset_pagination|
+ expect(offset_pagination).to receive(:paginate).with(paginated_array)
+ end
+
+ pager.paginate(finder)
+ end
+ end
+
+ context 'with branch_list_keyset_pagination feature off' do
+ before do
+ stub_feature_flags(branch_list_keyset_pagination: false)
+ end
+
+ context 'without keyset pagination option' do
+ it_behaves_like 'offset pagination'
+ end
+
+ context 'with keyset pagination option' do
+ let(:query) { base_query.merge(pagination: 'keyset') }
+
+ it_behaves_like 'offset pagination'
+ end
+ end
+
+ context 'with branch_list_keyset_pagination feature on' do
+ before do
+ stub_feature_flags(branch_list_keyset_pagination: project)
+ end
+
+ context 'without keyset pagination option' do
+ it_behaves_like 'offset pagination'
+ end
+
+ context 'with keyset pagination option' do
+ let(:query) { base_query.merge(pagination: 'keyset') }
+ let(:fake_request) { double(url: "#{incoming_api_projects_url}?#{query.to_query}") }
+
+ before do
+ allow(request_context).to receive(:request).and_return(fake_request)
+ expect(finder).to receive(:execute).with(gitaly_pagination: true).and_return(branches)
+ end
+
+ context 'when next page could be available' do
+ let(:branch1) { double 'branch', name: 'branch1' }
+ let(:branch2) { double 'branch', name: 'branch2' }
+ let(:branches) { [branch1, branch2] }
+
+ let(:expected_next_page_link) { %Q(<#{incoming_api_projects_url}?#{query.merge(page_token: branch2.name).to_query}>; rel="next") }
+
+ it 'uses keyset pagination and adds link headers' do
+ expect(request_context).to receive(:header).with('Links', expected_next_page_link)
+ expect(request_context).to receive(:header).with('Link', expected_next_page_link)
+
+ pager.paginate(finder)
+ end
+ end
+
+ context 'when the current page is the last page' do
+ let(:branch1) { double 'branch', name: 'branch1' }
+ let(:branches) { [branch1] }
+
+ it 'uses keyset pagination without link headers' do
+ expect(request_context).not_to receive(:header).with('Links', anything)
+ expect(request_context).not_to receive(:header).with('Link', anything)
+
+ pager.paginate(finder)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/phabricator_import/user_finder_spec.rb b/spec/lib/gitlab/phabricator_import/user_finder_spec.rb
index e63ad6e6626..2ec2571b7fe 100644
--- a/spec/lib/gitlab/phabricator_import/user_finder_spec.rb
+++ b/spec/lib/gitlab/phabricator_import/user_finder_spec.rb
@@ -58,6 +58,7 @@ RSpec.describe Gitlab::PhabricatorImport::UserFinder, :clean_gitlab_redis_cache
)
]
end
+
let(:client) do
client = instance_double(Gitlab::PhabricatorImport::Conduit::User)
allow(client).to receive(:users).and_return(response)
diff --git a/spec/lib/gitlab/popen/runner_spec.rb b/spec/lib/gitlab/popen/runner_spec.rb
index 5f72852c63e..c7b64e8108b 100644
--- a/spec/lib/gitlab/popen/runner_spec.rb
+++ b/spec/lib/gitlab/popen/runner_spec.rb
@@ -118,7 +118,7 @@ RSpec.describe Gitlab::Popen::Runner do
stdout: 'stdout',
stderr: '',
exitstatus: 0,
- status: double(exitstatus: exitstatus, success?: exitstatus.zero?),
+ status: double(exitstatus: exitstatus, success?: exitstatus == 0),
duration: 0.1)
result =
diff --git a/spec/lib/gitlab/project_search_results_spec.rb b/spec/lib/gitlab/project_search_results_spec.rb
index 75a3fe06632..6e3c60b58dc 100644
--- a/spec/lib/gitlab/project_search_results_spec.rb
+++ b/spec/lib/gitlab/project_search_results_spec.rb
@@ -430,6 +430,7 @@ RSpec.describe Gitlab::ProjectSearchResults do
private_project.add_maintainer(user)
user
end
+
let(:team_reporter) do
user = create(:user, username: 'private-project-reporter')
private_project.add_reporter(user)
diff --git a/spec/lib/gitlab/prometheus/queries/matched_metric_query_spec.rb b/spec/lib/gitlab/prometheus/queries/matched_metric_query_spec.rb
index 117ca798022..60449aeef7d 100644
--- a/spec/lib/gitlab/prometheus/queries/matched_metric_query_spec.rb
+++ b/spec/lib/gitlab/prometheus/queries/matched_metric_query_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Gitlab::Prometheus::Queries::MatchedMetricQuery do
[{ '__name__' => 'metric_a' },
{ '__name__' => 'metric_b' }]
end
+
let(:partially_empty_series_info) { [{ '__name__' => 'metric_a', 'environment' => '' }] }
let(:empty_series_info) { [] }
diff --git a/spec/lib/gitlab/prometheus_client_spec.rb b/spec/lib/gitlab/prometheus_client_spec.rb
index 242a0ced031..0774c2f3144 100644
--- a/spec/lib/gitlab/prometheus_client_spec.rb
+++ b/spec/lib/gitlab/prometheus_client_spec.rb
@@ -191,6 +191,7 @@ RSpec.describe Gitlab::PrometheusClient do
}
}
end
+
let(:query_url) { prometheus_query_with_time_url(query, Time.now.utc) }
around do |example|
diff --git a/spec/lib/gitlab/redis/hll_spec.rb b/spec/lib/gitlab/redis/hll_spec.rb
new file mode 100644
index 00000000000..cbf78f23036
--- /dev/null
+++ b/spec/lib/gitlab/redis/hll_spec.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::HLL, :clean_gitlab_redis_shared_state do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:expiry) { 1.day }
+
+ describe '.add' do
+ context 'when checking key format' do
+ context 'for invalid keys' do
+ where(:metric_key, :value) do
+ 'test' | 1
+ 'test-{metric' | 1
+ 'test-{metric}}' | 1
+ end
+
+ with_them do
+ it 'raise an error when using an invalid key format' do
+ expect { described_class.add(key: metric_key, value: value, expiry: expiry) }.to raise_error(Gitlab::Redis::HLL::KeyFormatError)
+ end
+ end
+ end
+
+ context 'for valid keys' do
+ where(:metric_key, :value) do
+ 'test-{metric}' | 1
+ 'test-{metric}-1' | 1
+ 'test:{metric}-1' | 1
+ '2020-216-{project_action}' | 1
+ 'i_{analytics}_dev_ops_score-2020-32' | 1
+ end
+
+ with_them do
+ it "doesn't raise error when having correct format" do
+ expect { described_class.add(key: metric_key, value: value, expiry: expiry) }.not_to raise_error
+ end
+ end
+ end
+ end
+ end
+
+ describe '.count' do
+ let(:event_2020_32) { '2020-32-{expand_vulnerabilities}' }
+ let(:event_2020_33) { '2020-33-{expand_vulnerabilities}' }
+ let(:event_2020_34) { '2020-34-{expand_vulnerabilities}' }
+
+ let(:entity1) { 'user_id_1'}
+ let(:entity2) { 'user_id_2'}
+ let(:entity3) { 'user_id_3'}
+ let(:entity4) { 'user_id_4'}
+
+ before do
+ track_event(event_2020_32, entity1)
+ track_event(event_2020_32, entity1)
+ track_event(event_2020_32, entity2)
+ track_event(event_2020_32, entity3)
+
+ track_event(event_2020_33, entity3)
+ track_event(event_2020_33, entity3)
+
+ track_event(event_2020_34, entity3)
+ track_event(event_2020_34, entity2)
+ end
+
+ it 'has 3 distinct users for weeks 32, 33, 34' do
+ unique_counts = count_unique_events([event_2020_32, event_2020_33, event_2020_34])
+
+ expect(unique_counts).to eq(3)
+ end
+
+ it 'has 3 distinct users for weeks 32, 33' do
+ unique_counts = count_unique_events([event_2020_32, event_2020_33])
+
+ expect(unique_counts).to eq(3)
+ end
+
+ it 'has 2 distinct users for weeks 33, 34' do
+ unique_counts = count_unique_events([event_2020_33, event_2020_34])
+
+ expect(unique_counts).to eq(2)
+ end
+
+ it 'has one distinct user for week 33' do
+ unique_counts = count_unique_events([event_2020_33])
+
+ expect(unique_counts).to eq(1)
+ end
+
+ it 'has 4 distinct users when one different user has an action on week 34' do
+ track_event(event_2020_34, entity4, 29.days)
+ unique_counts = count_unique_events([event_2020_32, event_2020_33, event_2020_34])
+
+ expect(unique_counts).to eq(4)
+ end
+
+ def track_event(key, value, expiry = 1.day)
+ described_class.add(key: key, value: value, expiry: expiry)
+ end
+
+ def count_unique_events(keys)
+ described_class.count(keys: keys)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index 7aece6fe697..1a6858858a7 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -131,6 +131,22 @@ RSpec.describe Gitlab::Regex do
it { is_expected.not_to match('9/9/2018') }
end
+ describe '.cluster_agent_name_regex' do
+ subject { described_class.cluster_agent_name_regex }
+
+ it { is_expected.to match('foo') }
+ it { is_expected.to match('foo-bar') }
+ it { is_expected.to match('1foo-bar') }
+ it { is_expected.to match('foo-bar2') }
+ it { is_expected.to match('foo-1bar') }
+ it { is_expected.not_to match('foo.bar') }
+ it { is_expected.not_to match('Foo') }
+ it { is_expected.not_to match('FoO') }
+ it { is_expected.not_to match('FoO-') }
+ it { is_expected.not_to match('-foo-') }
+ it { is_expected.not_to match('foo/bar') }
+ end
+
describe '.kubernetes_namespace_regex' do
subject { described_class.kubernetes_namespace_regex }
diff --git a/spec/lib/gitlab/repository_cache_adapter_spec.rb b/spec/lib/gitlab/repository_cache_adapter_spec.rb
index 3727217203e..c9ad79234d3 100644
--- a/spec/lib/gitlab/repository_cache_adapter_spec.rb
+++ b/spec/lib/gitlab/repository_cache_adapter_spec.rb
@@ -9,6 +9,89 @@ RSpec.describe Gitlab::RepositoryCacheAdapter do
let(:redis_set_cache) { repository.send(:redis_set_cache) }
let(:redis_hash_cache) { repository.send(:redis_hash_cache) }
+ describe '.cache_method_output_as_redis_set', :clean_gitlab_redis_cache, :aggregate_failures do
+ let(:klass) do
+ Class.new do
+ include Gitlab::RepositoryCacheAdapter # can't use described_class here
+
+ def letters
+ %w(b a c)
+ end
+ cache_method_as_redis_set(:letters)
+
+ def redis_set_cache
+ @redis_set_cache ||= Gitlab::RepositorySetCache.new(self)
+ end
+
+ def full_path
+ 'foo/bar'
+ end
+
+ def project
+ end
+ end
+ end
+
+ let(:fake_repository) { klass.new }
+
+ context 'with an existing repository' do
+ it 'caches the output, sorting the results' do
+ expect(fake_repository).to receive(:_uncached_letters).once.and_call_original
+
+ 2.times do
+ expect(fake_repository.letters).to eq(%w(a b c))
+ end
+
+ expect(fake_repository.redis_set_cache.exist?(:letters)).to eq(true)
+ expect(fake_repository.instance_variable_get(:@letters)).to eq(%w(a b c))
+ end
+
+ context 'membership checks' do
+ context 'when the cache key does not exist' do
+ it 'calls the original method and populates the cache' do
+ expect(fake_repository.redis_set_cache.exist?(:letters)).to eq(false)
+ expect(fake_repository).to receive(:_uncached_letters).once.and_call_original
+
+ # This populates the cache and memoizes the full result
+ expect(fake_repository.letters_include?('a')).to eq(true)
+ expect(fake_repository.letters_include?('d')).to eq(false)
+ expect(fake_repository.redis_set_cache.exist?(:letters)).to eq(true)
+ end
+ end
+
+ context 'when the cache key exists' do
+ before do
+ fake_repository.redis_set_cache.write(:letters, %w(b a c))
+ end
+
+ it 'calls #include? on the set cache' do
+ expect(fake_repository.redis_set_cache)
+ .to receive(:include?).with(:letters, 'a').and_call_original
+ expect(fake_repository.redis_set_cache)
+ .to receive(:include?).with(:letters, 'd').and_call_original
+
+ expect(fake_repository.letters_include?('a')).to eq(true)
+ expect(fake_repository.letters_include?('d')).to eq(false)
+ end
+
+ it 'memoizes the result' do
+ expect(fake_repository.redis_set_cache)
+ .to receive(:include?).once.and_call_original
+
+ expect(fake_repository.letters_include?('a')).to eq(true)
+ expect(fake_repository.letters_include?('a')).to eq(true)
+
+ expect(fake_repository.redis_set_cache)
+ .to receive(:include?).once.and_call_original
+
+ expect(fake_repository.letters_include?('d')).to eq(false)
+ expect(fake_repository.letters_include?('d')).to eq(false)
+ end
+ end
+ end
+ end
+ end
+
describe '#cache_method_output', :use_clean_rails_memory_store_caching do
let(:fallback) { 10 }
@@ -212,8 +295,7 @@ RSpec.describe Gitlab::RepositoryCacheAdapter do
expect(cache).to receive(:expire).with(:rendered_readme)
expect(cache).to receive(:expire).with(:branch_names)
expect(redis_set_cache).to receive(:expire).with(:rendered_readme, :branch_names)
- expect(redis_hash_cache).to receive(:delete).with(:rendered_readme)
- expect(redis_hash_cache).to receive(:delete).with(:branch_names)
+ expect(redis_hash_cache).to receive(:delete).with(:rendered_readme, :branch_names)
repository.expire_method_caches(%i(rendered_readme branch_names))
end
diff --git a/spec/lib/gitlab/repository_hash_cache_spec.rb b/spec/lib/gitlab/repository_hash_cache_spec.rb
index ea856c14a77..9b4ca3f9dca 100644
--- a/spec/lib/gitlab/repository_hash_cache_spec.rb
+++ b/spec/lib/gitlab/repository_hash_cache_spec.rb
@@ -48,6 +48,24 @@ RSpec.describe Gitlab::RepositoryHashCache, :clean_gitlab_redis_cache do
context "key doesn't exist" do
it { is_expected.to eq(0) }
end
+
+ context "multiple keys" do
+ before do
+ cache.write(:test1, test_hash)
+ cache.write(:test2, test_hash)
+ end
+
+ it "deletes multiple keys" do
+ cache.delete(:test1, :test2)
+
+ expect(cache.read_members(:test1, ["test"])).to eq("test" => nil)
+ expect(cache.read_members(:test2, ["test"])).to eq("test" => nil)
+ end
+
+ it "returns deleted key count" do
+ expect(cache.delete(:test1, :test2)).to eq(2)
+ end
+ end
end
describe "#key?" do
diff --git a/spec/lib/gitlab/repository_set_cache_spec.rb b/spec/lib/gitlab/repository_set_cache_spec.rb
index 024aae49b04..07f4d7c462d 100644
--- a/spec/lib/gitlab/repository_set_cache_spec.rb
+++ b/spec/lib/gitlab/repository_set_cache_spec.rb
@@ -93,23 +93,6 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
it { is_expected.to eq(0) }
end
-
- context "unlink isn't supported" do
- before do
- allow_any_instance_of(Redis).to receive(:unlink) { raise ::Redis::CommandError }
- end
-
- it 'still deletes the given key' do
- expect(cache.expire(:foo)).to eq(1)
- expect(cache.read(:foo)).to be_empty
- end
-
- it 'logs the failure' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception)
-
- cache.expire(:foo)
- end
- end
end
describe '#exist?' do
diff --git a/spec/lib/gitlab/search/query_spec.rb b/spec/lib/gitlab/search/query_spec.rb
index e9601002922..dd2f23a7e47 100644
--- a/spec/lib/gitlab/search/query_spec.rb
+++ b/spec/lib/gitlab/search/query_spec.rb
@@ -38,4 +38,12 @@ RSpec.describe Gitlab::Search::Query do
expect(subject.term).to eq(query)
end
end
+
+ context 'with an exclusive filter' do
+ let(:query) { 'something -name:bingo -other:dingo' }
+
+ it 'negates the filter' do
+ expect(subject.filters).to all(include(negated: true))
+ end
+ end
end
diff --git a/spec/lib/gitlab/service_desk_email_spec.rb b/spec/lib/gitlab/service_desk_email_spec.rb
index 23e2b2ff3cf..67a1f07eec6 100644
--- a/spec/lib/gitlab/service_desk_email_spec.rb
+++ b/spec/lib/gitlab/service_desk_email_spec.rb
@@ -56,4 +56,26 @@ RSpec.describe Gitlab::ServiceDeskEmail do
end
end
end
+
+ describe '.address_for_key' do
+ context 'when service desk address is set' do
+ before do
+ stub_service_desk_email_setting(address: 'address+%{key}@example.com')
+ end
+
+ it 'returns address' do
+ expect(described_class.address_for_key('foo')).to eq('address+foo@example.com')
+ end
+ end
+
+ context 'when service desk address is not set' do
+ before do
+ stub_service_desk_email_setting(address: nil)
+ end
+
+ it 'returns nil' do
+ expect(described_class.key_from_address('foo')).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/sidekiq_cluster_spec.rb b/spec/lib/gitlab/sidekiq_cluster_spec.rb
index d625a2a2185..5dd913aebb0 100644
--- a/spec/lib/gitlab/sidekiq_cluster_spec.rb
+++ b/spec/lib/gitlab/sidekiq_cluster_spec.rb
@@ -91,6 +91,7 @@ RSpec.describe Gitlab::SidekiqCluster do
let(:options) do
{ env: :production, directory: 'foo/bar', max_concurrency: 20, min_concurrency: 0, worker_id: first_worker_id, timeout: 10, dryrun: false }
end
+
let(:env) { { "ENABLE_SIDEKIQ_CLUSTER" => "1", "SIDEKIQ_WORKER_ID" => first_worker_id.to_s } }
let(:args) { ['bundle', 'exec', 'sidekiq', anything, '-eproduction', '-t10', *([anything] * 5)] }
diff --git a/spec/lib/gitlab/sidekiq_logging/exception_handler_spec.rb b/spec/lib/gitlab/sidekiq_logging/exception_handler_spec.rb
index 5a68b88c02d..94dcf6f9b9a 100644
--- a/spec/lib/gitlab/sidekiq_logging/exception_handler_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/exception_handler_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe Gitlab::SidekiqLogging::ExceptionHandler do
error_class: 'RuntimeError',
error_message: exception_message,
context: 'Test',
- error_backtrace: Gitlab::BacktraceCleaner.clean_backtrace(backtrace)
+ error_backtrace: Rails.backtrace_cleaner.clean(backtrace)
)
expect(logger).to receive(:warn).with(expected_data)
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index 03ace9a01c7..ad106837c47 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -41,6 +41,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
'scheduling_latency_s' => scheduling_latency_s
)
end
+
let(:end_payload) do
start_payload.merge(
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: done: 0.0 sec',
@@ -51,6 +52,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
'db_duration_s' => 0.0
)
end
+
let(:exception_payload) do
end_payload.merge(
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: fail: 0.0 sec',
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index 1b3b108d9ea..44bfaf4cc3c 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -128,6 +128,13 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
subject.call(worker, job, :test) { nil }
end
+ it 'sets the thread name if it was nil' do
+ allow(Thread.current).to receive(:name).and_return(nil)
+ expect(Thread.current).to receive(:name=).with(Gitlab::Metrics::Samplers::ThreadsSampler::SIDEKIQ_WORKER_THREAD_NAME)
+
+ subject.call(worker, job, :test) { nil }
+ end
+
context 'when job_duration is not available' do
let(:queue_duration_for_job) { nil }
diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb
index 018821e6c5e..4ee9569a0cf 100644
--- a/spec/lib/gitlab/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb
@@ -51,6 +51,7 @@ RSpec.describe Gitlab::SidekiqMiddleware do
Gitlab::SidekiqMiddleware::BatchLoader,
Labkit::Middleware::Sidekiq::Server,
Gitlab::SidekiqMiddleware::InstrumentationLogger,
+ Gitlab::SidekiqVersioning::Middleware,
Gitlab::SidekiqStatus::ServerMiddleware,
Gitlab::SidekiqMiddleware::ServerMetrics,
Gitlab::SidekiqMiddleware::ArgumentsLogger,
@@ -62,6 +63,7 @@ RSpec.describe Gitlab::SidekiqMiddleware do
Gitlab::SidekiqMiddleware::DuplicateJobs::Server
]
end
+
let(:enabled_sidekiq_middlewares) { all_sidekiq_middlewares - disabled_sidekiq_middlewares }
shared_examples "a server middleware chain" do
@@ -78,6 +80,41 @@ RSpec.describe Gitlab::SidekiqMiddleware do
end
end
+ shared_examples "a server middleware chain for mailer" do
+ let(:worker_class) { ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper }
+ let(:job_args) do
+ [
+ {
+ "job_class" => "ActionMailer::MailDeliveryJob",
+ "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e",
+ "provider_job_id" => nil,
+ "queue_name" => "mailers",
+ "priority" => nil,
+ "arguments" => [
+ "Notify",
+ "test_email",
+ "deliver_now",
+ {
+ "args" => [
+ "test@example.com",
+ "subject",
+ "body"
+ ],
+ "_aj_symbol_keys" => ["args"]
+ }
+ ],
+ "executions" => 0,
+ "exception_executions" => {},
+ "locale" => "en",
+ "timezone" => "UTC",
+ "enqueued_at" => "2020-07-27T07:43:31Z"
+ }
+ ]
+ end
+
+ it_behaves_like "a server middleware chain"
+ end
+
context "all optional middlewares off" do
let(:metrics) { false }
let(:arguments_logger) { false }
@@ -91,6 +128,7 @@ RSpec.describe Gitlab::SidekiqMiddleware do
end
it_behaves_like "a server middleware chain"
+ it_behaves_like "a server middleware chain for mailer"
end
context "all optional middlewares on" do
@@ -100,6 +138,7 @@ RSpec.describe Gitlab::SidekiqMiddleware do
let(:disabled_sidekiq_middlewares) { [] }
it_behaves_like "a server middleware chain"
+ it_behaves_like "a server middleware chain for mailer"
context "server metrics" do
let(:gitaly_histogram) { double(:gitaly_histogram) }
diff --git a/spec/lib/gitlab/sidekiq_versioning/middleware_spec.rb b/spec/lib/gitlab/sidekiq_versioning/middleware_spec.rb
new file mode 100644
index 00000000000..b372f16de5e
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_versioning/middleware_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SidekiqVersioning::Middleware do
+ let(:worker_class) do
+ Class.new do
+ def self.name
+ 'DummyWorker'
+ end
+
+ include ApplicationWorker
+
+ version 2
+ end
+ end
+
+ describe '#call' do
+ let(:worker) { worker_class.new }
+ let(:job) { { 'version' => 3, 'queue' => queue } }
+ let(:queue) { worker_class.queue }
+
+ def call!(&block)
+ block ||= -> {}
+ subject.call(worker, job, queue, &block)
+ end
+
+ it 'sets worker.job_version' do
+ call!
+
+ expect(worker.job_version).to eq(job['version'])
+ end
+
+ it 'yields' do
+ expect { |b| call!(&b) }.to yield_control
+ end
+
+ context 'when worker is not ApplicationWorker' do
+ let(:worker_class) do
+ ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper
+ end
+
+ it 'does not err' do
+ expect { call! }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_versioning/worker_spec.rb b/spec/lib/gitlab/sidekiq_versioning/worker_spec.rb
new file mode 100644
index 00000000000..0781c5100fd
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_versioning/worker_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SidekiqVersioning::Worker do
+ let(:worker) do
+ Class.new do
+ def self.name
+ 'DummyWorker'
+ end
+
+ # ApplicationWorker includes Gitlab::SidekiqVersioning::Worker
+ include ApplicationWorker
+
+ version 2
+ end
+ end
+
+ describe '.version' do
+ context 'when called with an argument' do
+ it 'sets the version option' do
+ worker.version 3
+
+ expect(worker.get_sidekiq_options['version']).to eq(3)
+ end
+ end
+
+ context 'when called without an argument' do
+ it 'returns the version option' do
+ worker.sidekiq_options version: 3
+
+ expect(worker.version).to eq(3)
+ end
+ end
+ end
+
+ describe '#job_version' do
+ let(:job) { worker.new }
+
+ context 'when job_version is not set' do
+ it 'returns latest version' do
+ expect(job.job_version).to eq(2)
+ end
+ end
+
+ context 'when job_version is set' do
+ it 'returns the set version' do
+ job.job_version = 0
+
+ expect(job.job_version).to eq(0)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/static_site_editor/config_spec.rb b/spec/lib/gitlab/static_site_editor/config_spec.rb
index b60a6a9b006..56cdb573785 100644
--- a/spec/lib/gitlab/static_site_editor/config_spec.rb
+++ b/spec/lib/gitlab/static_site_editor/config_spec.rb
@@ -46,8 +46,6 @@ RSpec.describe Gitlab::StaticSiteEditor::Config do
end
context 'when file has .md.erb extension' do
- let(:file_path) { 'README.md.erb' }
-
before do
repository.create_file(
project.creator,
@@ -58,7 +56,25 @@ RSpec.describe Gitlab::StaticSiteEditor::Config do
)
end
- it { is_expected.to include(is_supported_content: 'true') }
+ context 'when feature flag is enabled' do
+ let(:file_path) { 'FEATURE_ON.md.erb' }
+
+ before do
+ stub_feature_flags(sse_erb_support: project)
+ end
+
+ it { is_expected.to include(is_supported_content: 'true') }
+ end
+
+ context 'when feature flag is disabled' do
+ let(:file_path) { 'FEATURE_OFF.md.erb' }
+
+ before do
+ stub_feature_flags(sse_erb_support: false)
+ end
+
+ it { is_expected.to include(is_supported_content: 'false') }
+ end
end
context 'when file path is nested' do
diff --git a/spec/lib/gitlab/template/gitlab_ci_yml_template_spec.rb b/spec/lib/gitlab/template/gitlab_ci_yml_template_spec.rb
index 3004de6fe08..55444114d39 100644
--- a/spec/lib/gitlab/template/gitlab_ci_yml_template_spec.rb
+++ b/spec/lib/gitlab/template/gitlab_ci_yml_template_spec.rb
@@ -6,10 +6,6 @@ RSpec.describe Gitlab::Template::GitlabCiYmlTemplate do
subject { described_class }
describe '.all' do
- it 'strips the gitlab-ci suffix' do
- expect(subject.all.first.name).not_to end_with('.gitlab-ci.yml')
- end
-
it 'combines the globals and rest' do
all = subject.all.map(&:name)
@@ -17,34 +13,6 @@ RSpec.describe Gitlab::Template::GitlabCiYmlTemplate do
expect(all).to include('Docker')
expect(all).to include('Ruby')
end
-
- it 'ensure that the template name is used exactly once' do
- all = subject.all.group_by(&:name)
- duplicates = all.select { |_, templates| templates.length > 1 }
-
- expect(duplicates).to be_empty
- end
- end
-
- describe '.find' do
- it 'returns nil if the file does not exist' do
- expect(subject.find('mepmep-yadida')).to be nil
- end
-
- it 'returns the GitlabCiYml object of a valid file' do
- ruby = subject.find('Ruby')
-
- expect(ruby).to be_a described_class
- expect(ruby.name).to eq('Ruby')
- end
- end
-
- describe '.by_category' do
- it 'returns sorted results' do
- result = described_class.by_category('General')
-
- expect(result).to eq(result.sort)
- end
end
describe '#content' do
@@ -56,13 +24,5 @@ RSpec.describe Gitlab::Template::GitlabCiYmlTemplate do
end
end
- describe '#<=>' do
- it 'sorts lexicographically' do
- one = described_class.new('a.gitlab-ci.yml')
- other = described_class.new('z.gitlab-ci.yml')
-
- expect(one.<=>(other)).to be(-1)
- expect([other, one].sort).to eq([one, other])
- end
- end
+ it_behaves_like 'file template shared examples', 'Ruby', '.gitlab-ci.yml'
end
diff --git a/spec/lib/gitlab/template/metrics_dashboard_template_spec.rb b/spec/lib/gitlab/template/metrics_dashboard_template_spec.rb
new file mode 100644
index 00000000000..4c2b3dea600
--- /dev/null
+++ b/spec/lib/gitlab/template/metrics_dashboard_template_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Template::MetricsDashboardTemplate do
+ subject { described_class }
+
+ describe '.all' do
+ it 'combines the globals and rest' do
+ all = subject.all.map(&:name)
+
+ expect(all).to include('Default')
+ end
+ end
+
+ describe '#content' do
+ it 'loads the full file' do
+ example_dashboard = subject.new(Rails.root.join('lib/gitlab/metrics/templates/Default.metrics-dashboard.yml'))
+
+ expect(example_dashboard.name).to eq 'Default'
+ expect(example_dashboard.content).to start_with('#')
+ end
+ end
+
+ it_behaves_like 'file template shared examples', 'Default', '.metrics-dashboard.yml'
+end
diff --git a/spec/lib/gitlab/tree_summary_spec.rb b/spec/lib/gitlab/tree_summary_spec.rb
index 4bd08fab60a..303a4a80581 100644
--- a/spec/lib/gitlab/tree_summary_spec.rb
+++ b/spec/lib/gitlab/tree_summary_spec.rb
@@ -191,7 +191,7 @@ RSpec.describe Gitlab::TreeSummary do
with_them do
before do
- create_file('dummy', path: 'other') if num_entries.zero?
+ create_file('dummy', path: 'other') if num_entries == 0
1.upto(num_entries) { |n| create_file(n, path: path) }
end
@@ -218,7 +218,7 @@ RSpec.describe Gitlab::TreeSummary do
with_them do
before do
- create_file('dummy', path: 'other') if num_entries.zero?
+ create_file('dummy', path: 'other') if num_entries == 0
1.upto(num_entries) { |n| create_file(n, path: path) }
end
diff --git a/spec/lib/gitlab/url_blocker_spec.rb b/spec/lib/gitlab/url_blocker_spec.rb
index 7edfde09864..b49efd6a092 100644
--- a/spec/lib/gitlab/url_blocker_spec.rb
+++ b/spec/lib/gitlab/url_blocker_spec.rb
@@ -280,6 +280,7 @@ RSpec.describe Gitlab::UrlBlocker, :stub_invalid_dns_only do
'[fc00:bf8b:e62c:abcd:abcd:aaaa:aaaa:aaaa]'
]
end
+
let(:fake_domain) { 'www.fakedomain.fake' }
shared_examples 'allows local requests' do |url_blocker_attributes|
diff --git a/spec/lib/gitlab/url_builder_spec.rb b/spec/lib/gitlab/url_builder_spec.rb
index a16ff252bc1..b58b5a84662 100644
--- a/spec/lib/gitlab/url_builder_spec.rb
+++ b/spec/lib/gitlab/url_builder_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::UrlBuilder do
:issue | ->(issue) { "/#{issue.project.full_path}/-/issues/#{issue.iid}" }
:merge_request | ->(merge_request) { "/#{merge_request.project.full_path}/-/merge_requests/#{merge_request.iid}" }
:project_milestone | ->(milestone) { "/#{milestone.project.full_path}/-/milestones/#{milestone.iid}" }
- :project_snippet | ->(snippet) { "/#{snippet.project.full_path}/snippets/#{snippet.id}" }
+ :project_snippet | ->(snippet) { "/#{snippet.project.full_path}/-/snippets/#{snippet.id}" }
:project_wiki | ->(wiki) { "/#{wiki.container.full_path}/-/wikis/home" }
:ci_build | ->(build) { "/#{build.project.full_path}/-/jobs/#{build.id}" }
:design | ->(design) { "/#{design.project.full_path}/-/design_management/designs/#{design.id}/raw_image" }
@@ -31,7 +31,7 @@ RSpec.describe Gitlab::UrlBuilder do
:group_milestone | ->(milestone) { "/groups/#{milestone.group.full_path}/-/milestones/#{milestone.iid}" }
:user | ->(user) { "/#{user.full_path}" }
- :personal_snippet | ->(snippet) { "/snippets/#{snippet.id}" }
+ :personal_snippet | ->(snippet) { "/-/snippets/#{snippet.id}" }
:wiki_page | ->(wiki_page) { "#{wiki_page.wiki.wiki_base_path}/#{wiki_page.slug}" }
:note_on_commit | ->(note) { "/#{note.project.full_path}/-/commit/#{note.commit_id}#note_#{note.id}" }
@@ -47,10 +47,10 @@ RSpec.describe Gitlab::UrlBuilder do
:discussion_note_on_merge_request | ->(note) { "/#{note.project.full_path}/-/merge_requests/#{note.noteable.iid}#note_#{note.id}" }
:legacy_diff_note_on_merge_request | ->(note) { "/#{note.project.full_path}/-/merge_requests/#{note.noteable.iid}#note_#{note.id}" }
- :note_on_project_snippet | ->(note) { "/#{note.project.full_path}/snippets/#{note.noteable_id}#note_#{note.id}" }
- :discussion_note_on_project_snippet | ->(note) { "/#{note.project.full_path}/snippets/#{note.noteable_id}#note_#{note.id}" }
- :discussion_note_on_personal_snippet | ->(note) { "/snippets/#{note.noteable_id}#note_#{note.id}" }
- :note_on_personal_snippet | ->(note) { "/snippets/#{note.noteable_id}#note_#{note.id}" }
+ :note_on_project_snippet | ->(note) { "/#{note.project.full_path}/-/snippets/#{note.noteable_id}#note_#{note.id}" }
+ :discussion_note_on_project_snippet | ->(note) { "/#{note.project.full_path}/-/snippets/#{note.noteable_id}#note_#{note.id}" }
+ :discussion_note_on_personal_snippet | ->(note) { "/-/snippets/#{note.noteable_id}#note_#{note.id}" }
+ :note_on_personal_snippet | ->(note) { "/-/snippets/#{note.noteable_id}#note_#{note.id}" }
end
with_them do
@@ -98,7 +98,7 @@ RSpec.describe Gitlab::UrlBuilder do
it 'returns a raw snippet URL if requested' do
url = subject.build(snippet, raw: true)
- expect(url).to eq "#{Gitlab.config.gitlab.url}/snippets/#{snippet.id}/raw"
+ expect(url).to eq "#{Gitlab.config.gitlab.url}/-/snippets/#{snippet.id}/raw"
end
it 'returns a raw snippet blob URL if requested' do
@@ -114,7 +114,7 @@ RSpec.describe Gitlab::UrlBuilder do
it 'returns a raw snippet URL if requested' do
url = subject.build(snippet, raw: true)
- expect(url).to eq "#{Gitlab.config.gitlab.url}/#{snippet.project.full_path}/snippets/#{snippet.id}/raw"
+ expect(url).to eq "#{Gitlab.config.gitlab.url}/#{snippet.project.full_path}/-/snippets/#{snippet.id}/raw"
end
it 'returns a raw snippet blob URL if requested' do
diff --git a/spec/lib/gitlab/usage_data/topology_spec.rb b/spec/lib/gitlab/usage_data/topology_spec.rb
index 2a7adea261d..7f4a25297e6 100644
--- a/spec/lib/gitlab/usage_data/topology_spec.rb
+++ b/spec/lib/gitlab/usage_data/topology_spec.rb
@@ -24,7 +24,9 @@ RSpec.describe Gitlab::UsageData::Topology do
expect_prometheus_api_to(
receive_app_request_volume_query,
receive_node_memory_query,
+ receive_node_memory_utilization_query,
receive_node_cpu_count_query,
+ receive_node_cpu_utilization_query,
receive_node_uname_info_query,
receive_node_service_memory_rss_query,
receive_node_service_memory_uss_query,
@@ -40,7 +42,9 @@ RSpec.describe Gitlab::UsageData::Topology do
nodes: [
{
node_memory_total_bytes: 512,
+ node_memory_utilization: 0.45,
node_cpus: 8,
+ node_cpu_utilization: 0.1,
node_uname_info: {
machine: 'x86_64',
sysname: 'Linux',
@@ -64,7 +68,9 @@ RSpec.describe Gitlab::UsageData::Topology do
},
{
node_memory_total_bytes: 1024,
+ node_memory_utilization: 0.25,
node_cpus: 16,
+ node_cpu_utilization: 0.2,
node_uname_info: {
machine: 'x86_64',
sysname: 'Linux',
@@ -102,7 +108,9 @@ RSpec.describe Gitlab::UsageData::Topology do
expect_prometheus_api_to(
receive_app_request_volume_query(result: []),
receive_node_memory_query(result: []),
+ receive_node_memory_utilization_query(result: []),
receive_node_cpu_count_query,
+ receive_node_cpu_utilization_query,
receive_node_uname_info_query,
receive_node_service_memory_rss_query(result: []),
receive_node_service_memory_uss_query(result: []),
@@ -116,6 +124,7 @@ RSpec.describe Gitlab::UsageData::Topology do
failures: [
{ 'app_requests' => 'empty_result' },
{ 'node_memory' => 'empty_result' },
+ { 'node_memory_utilization' => 'empty_result' },
{ 'service_rss' => 'empty_result' },
{ 'service_uss' => 'empty_result' },
{ 'service_workers' => 'empty_result' }
@@ -123,6 +132,7 @@ RSpec.describe Gitlab::UsageData::Topology do
nodes: [
{
node_cpus: 16,
+ node_cpu_utilization: 0.2,
node_uname_info: {
machine: 'x86_64',
release: '4.15.0-101-generic',
@@ -146,6 +156,7 @@ RSpec.describe Gitlab::UsageData::Topology do
},
{
node_cpus: 8,
+ node_cpu_utilization: 0.1,
node_uname_info: {
machine: 'x86_64',
release: '4.19.76-linuxkit',
@@ -177,6 +188,16 @@ RSpec.describe Gitlab::UsageData::Topology do
}
]
end
+
+ let(:node_memory_utilization_response) do
+ [
+ {
+ 'metric' => { 'instance' => 'localhost:9100' },
+ 'value' => [1000, '0.35']
+ }
+ ]
+ end
+
let(:node_uname_info_response) do
[
{
@@ -195,6 +216,7 @@ RSpec.describe Gitlab::UsageData::Topology do
]
end
# The services in this response should all be mapped to localhost i.e. the same node
+
let(:service_memory_response) do
[
{
@@ -224,7 +246,9 @@ RSpec.describe Gitlab::UsageData::Topology do
expect_prometheus_api_to(
receive_app_request_volume_query(result: []),
receive_node_memory_query(result: node_memory_response),
+ receive_node_memory_utilization_query(result: node_memory_utilization_response),
receive_node_cpu_count_query(result: []),
+ receive_node_cpu_utilization_query(result: []),
receive_node_uname_info_query(result: node_uname_info_response),
receive_node_service_memory_rss_query(result: service_memory_response),
receive_node_service_memory_uss_query(result: []),
@@ -238,6 +262,7 @@ RSpec.describe Gitlab::UsageData::Topology do
failures: [
{ 'app_requests' => 'empty_result' },
{ 'node_cpus' => 'empty_result' },
+ { 'node_cpu_utilization' => 'empty_result' },
{ 'service_uss' => 'empty_result' },
{ 'service_pss' => 'empty_result' },
{ 'service_process_count' => 'empty_result' },
@@ -246,6 +271,7 @@ RSpec.describe Gitlab::UsageData::Topology do
nodes: [
{
node_memory_total_bytes: 512,
+ node_memory_utilization: 0.35,
node_uname_info: {
machine: 'x86_64',
sysname: 'Linux',
@@ -284,7 +310,9 @@ RSpec.describe Gitlab::UsageData::Topology do
expect_prometheus_api_to(
receive_app_request_volume_query(result: []),
receive_node_memory_query(result: []),
+ receive_node_memory_utilization_query(result: []),
receive_node_cpu_count_query(result: []),
+ receive_node_cpu_utilization_query(result: []),
receive_node_uname_info_query(result: []),
receive_node_service_memory_rss_query,
receive_node_service_memory_uss_query(result: []),
@@ -298,7 +326,9 @@ RSpec.describe Gitlab::UsageData::Topology do
failures: [
{ 'app_requests' => 'empty_result' },
{ 'node_memory' => 'empty_result' },
+ { 'node_memory_utilization' => 'empty_result' },
{ 'node_cpus' => 'empty_result' },
+ { 'node_cpu_utilization' => 'empty_result' },
{ 'node_uname_info' => 'empty_result' },
{ 'service_uss' => 'empty_result' },
{ 'service_pss' => 'empty_result' },
@@ -335,27 +365,98 @@ RSpec.describe Gitlab::UsageData::Topology do
end
end
+ context 'and unknown services are encountered' do
+ let(:unknown_service_process_count_response) do
+ [
+ {
+ 'metric' => { 'instance' => 'instance2:9000', 'job' => 'unknown-service-A' },
+ 'value' => [1000, '42']
+ },
+ {
+ 'metric' => { 'instance' => 'instance2:9001', 'job' => 'unknown-service-B' },
+ 'value' => [1000, '42']
+ }
+ ]
+ end
+
+ it 'filters out unknown service data and reports the unknown services as a failure' do
+ expect_prometheus_api_to(
+ receive_app_request_volume_query(result: []),
+ receive_node_memory_query(result: []),
+ receive_node_memory_utilization_query(result: []),
+ receive_node_cpu_count_query(result: []),
+ receive_node_cpu_utilization_query(result: []),
+ receive_node_uname_info_query(result: []),
+ receive_node_service_memory_rss_query(result: []),
+ receive_node_service_memory_uss_query(result: []),
+ receive_node_service_memory_pss_query(result: []),
+ receive_node_service_process_count_query(result: unknown_service_process_count_response),
+ receive_node_service_app_server_workers_query(result: [])
+ )
+
+ expect(subject.dig(:topology, :failures)).to include(
+ { 'service_unknown' => 'unknown-service-A' },
+ { 'service_unknown' => 'unknown-service-B' }
+ )
+ end
+ end
+
context 'and an error is raised when querying Prometheus' do
- it 'returns empty result with failures' do
- expect_prometheus_api_to receive(:query)
- .at_least(:once)
- .and_raise(Gitlab::PrometheusClient::ConnectionError)
+ context 'without timeout failures' do
+ it 'returns empty result and executes subsequent queries as usual' do
+ expect_prometheus_api_to receive(:query)
+ .at_least(:once)
+ .and_raise(Gitlab::PrometheusClient::ConnectionError)
- expect(subject[:topology]).to eq({
- duration_s: 0,
- failures: [
- { 'app_requests' => 'Gitlab::PrometheusClient::ConnectionError' },
- { 'node_memory' => 'Gitlab::PrometheusClient::ConnectionError' },
- { 'node_cpus' => 'Gitlab::PrometheusClient::ConnectionError' },
- { 'node_uname_info' => 'Gitlab::PrometheusClient::ConnectionError' },
- { 'service_rss' => 'Gitlab::PrometheusClient::ConnectionError' },
- { 'service_uss' => 'Gitlab::PrometheusClient::ConnectionError' },
- { 'service_pss' => 'Gitlab::PrometheusClient::ConnectionError' },
- { 'service_process_count' => 'Gitlab::PrometheusClient::ConnectionError' },
- { 'service_workers' => 'Gitlab::PrometheusClient::ConnectionError' }
- ],
- nodes: []
- })
+ expect(subject[:topology]).to eq({
+ duration_s: 0,
+ failures: [
+ { 'app_requests' => 'Gitlab::PrometheusClient::ConnectionError' },
+ { 'node_memory' => 'Gitlab::PrometheusClient::ConnectionError' },
+ { 'node_memory_utilization' => 'Gitlab::PrometheusClient::ConnectionError' },
+ { 'node_cpus' => 'Gitlab::PrometheusClient::ConnectionError' },
+ { 'node_cpu_utilization' => 'Gitlab::PrometheusClient::ConnectionError' },
+ { 'node_uname_info' => 'Gitlab::PrometheusClient::ConnectionError' },
+ { 'service_rss' => 'Gitlab::PrometheusClient::ConnectionError' },
+ { 'service_uss' => 'Gitlab::PrometheusClient::ConnectionError' },
+ { 'service_pss' => 'Gitlab::PrometheusClient::ConnectionError' },
+ { 'service_process_count' => 'Gitlab::PrometheusClient::ConnectionError' },
+ { 'service_workers' => 'Gitlab::PrometheusClient::ConnectionError' }
+ ],
+ nodes: []
+ })
+ end
+ end
+
+ context 'with timeout failures' do
+ where(:exception) do
+ described_class::TIMEOUT_ERRORS
+ end
+
+ with_them do
+ it 'returns empty result and cancelled subsequent queries' do
+ expect_prometheus_api_to receive(:query)
+ .and_raise(exception)
+
+ expect(subject[:topology]).to eq({
+ duration_s: 0,
+ failures: [
+ { 'app_requests' => exception.to_s },
+ { 'node_memory' => 'timeout_cancellation' },
+ { 'node_memory_utilization' => 'timeout_cancellation' },
+ { 'node_cpus' => 'timeout_cancellation' },
+ { 'node_cpu_utilization' => 'timeout_cancellation' },
+ { 'node_uname_info' => 'timeout_cancellation' },
+ { 'service_rss' => 'timeout_cancellation' },
+ { 'service_uss' => 'timeout_cancellation' },
+ { 'service_pss' => 'timeout_cancellation' },
+ { 'service_process_count' => 'timeout_cancellation' },
+ { 'service_workers' => 'timeout_cancellation' }
+ ],
+ nodes: []
+ })
+ end
+ end
end
end
end
@@ -411,6 +512,21 @@ RSpec.describe Gitlab::UsageData::Topology do
])
end
+ def receive_node_memory_utilization_query(result: nil)
+ receive(:query)
+ .with(/node_memory_utilization/, an_instance_of(Hash))
+ .and_return(result || [
+ {
+ 'metric' => { 'instance' => 'instance1:8080' },
+ 'value' => [1000, '0.45']
+ },
+ {
+ 'metric' => { 'instance' => 'instance2:8090' },
+ 'value' => [1000, '0.25']
+ }
+ ])
+ end
+
def receive_node_cpu_count_query(result: nil)
receive(:query)
.with(/node_cpus/, an_instance_of(Hash))
@@ -426,6 +542,21 @@ RSpec.describe Gitlab::UsageData::Topology do
])
end
+ def receive_node_cpu_utilization_query(result: nil)
+ receive(:query)
+ .with(/node_cpu_utilization/, an_instance_of(Hash))
+ .and_return(result || [
+ {
+ 'metric' => { 'instance' => 'instance2:8090' },
+ 'value' => [1000, '0.2']
+ },
+ {
+ 'metric' => { 'instance' => 'instance1:8080' },
+ 'value' => [1000, '0.1']
+ }
+ ])
+ end
+
def receive_node_uname_info_query(result: nil)
receive(:query)
.with('node_uname_info')
@@ -534,11 +665,6 @@ RSpec.describe Gitlab::UsageData::Topology do
{
'metric' => { 'instance' => 'instance2:8080', 'job' => 'registry' },
'value' => [1000, '1']
- },
- # unknown service => should be stripped out
- {
- 'metric' => { 'instance' => 'instance2:9000', 'job' => 'not-a-gitlab-service' },
- 'value' => [1000, '42']
}
])
end
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
new file mode 100644
index 00000000000..2ab349a67d9
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_shared_state do
+ let(:entity1) { 'dfb9d2d2-f56c-4c77-8aeb-6cddc4a1f857' }
+ let(:entity2) { '1dd9afb2-a3ee-4de1-8ae3-a405579c8584' }
+ let(:entity3) { '34rfjuuy-ce56-sa35-ds34-dfer567dfrf2' }
+ let(:entity4) { '8b9a2671-2abf-4bec-a682-22f6a8f7bf31' }
+
+ let(:weekly_event) { 'g_analytics_contribution' }
+ let(:daily_event) { 'g_search' }
+ let(:different_aggregation) { 'different_aggregation' }
+
+ let(:known_events) do
+ [
+ { name: "g_analytics_contribution", redis_slot: "analytics", category: "analytics", expiry: 84, aggregation: "weekly" },
+ { name: "g_analytics_valuestream", redis_slot: "analytics", category: "analytics", expiry: 84, aggregation: "daily" },
+ { name: "g_analytics_productivity", redis_slot: "analytics", category: "productivity", expiry: 84, aggregation: "weekly" },
+ { name: "g_compliance_dashboard", redis_slot: "compliance", category: "compliance", aggregation: "weekly" },
+ { name: "g_search", category: "global", aggregation: "daily" },
+ { name: "different_aggregation", category: "global", aggregation: "monthly" }
+ ].map(&:with_indifferent_access)
+ end
+
+ before do
+ allow(described_class).to receive(:known_events).and_return(known_events)
+ end
+
+ around do |example|
+ # We need to freeze to a reference time
+ # because visits are grouped by the week number in the year
+ # Without freezing the time, the test may behave inconsistently
+ # depending on which day of the week test is run.
+ # Monday 6th of June
+ reference_time = Time.utc(2020, 6, 1)
+ Timecop.freeze(reference_time) { example.run }
+ end
+
+ describe '.track_event' do
+ it "raise error if metrics don't have same aggregation" do
+ expect { described_class.track_event(entity1, different_aggregation, Date.current) } .to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownAggregation)
+ end
+
+ it 'raise error if metrics of unknown aggregation' do
+ expect { described_class.track_event(entity1, 'unknown', Date.current) } .to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
+ end
+ end
+
+ describe '.unique_events' do
+ before do
+ # events in current week, should not be counted as week is not complete
+ described_class.track_event(entity1, weekly_event, Date.current)
+ described_class.track_event(entity2, weekly_event, Date.current)
+
+ # Events last week
+ described_class.track_event(entity1, weekly_event, 2.days.ago)
+ described_class.track_event(entity1, weekly_event, 2.days.ago)
+
+ # Events 2 weeks ago
+ described_class.track_event(entity1, weekly_event, 2.weeks.ago)
+
+ # Events 4 weeks ago
+ described_class.track_event(entity3, weekly_event, 4.weeks.ago)
+ described_class.track_event(entity4, weekly_event, 29.days.ago)
+
+ # events in current day should be counted in daily aggregation
+ described_class.track_event(entity1, daily_event, Date.current)
+ described_class.track_event(entity2, daily_event, Date.current)
+
+ # Events last week
+ described_class.track_event(entity1, daily_event, 2.days.ago)
+ described_class.track_event(entity1, daily_event, 2.days.ago)
+
+ # Events 2 weeks ago
+ described_class.track_event(entity1, daily_event, 14.days.ago)
+
+ # Events 4 weeks ago
+ described_class.track_event(entity3, daily_event, 28.days.ago)
+ described_class.track_event(entity4, daily_event, 29.days.ago)
+ end
+
+ it 'raise error if metrics are not in the same slot' do
+ expect { described_class.unique_events(event_names: %w(g_analytics_contribution g_compliance_dashboard), start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should be in same slot')
+ end
+
+ it 'raise error if metrics are not in the same category' do
+ expect { described_class.unique_events(event_names: %w(g_analytics_contribution g_analytics_productivity), start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should be in same category')
+ end
+
+ it "raise error if metrics don't have same aggregation" do
+ expect { described_class.unique_events(event_names: %w(g_analytics_contribution g_analytics_valuestream), start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should have same aggregation level')
+ end
+
+ context 'when data for the last complete week' do
+ it { expect(described_class.unique_events(event_names: weekly_event, start_date: 1.week.ago, end_date: Date.current)).to eq(1) }
+ end
+
+ context 'when data for the last 4 complete weeks' do
+ it { expect(described_class.unique_events(event_names: weekly_event, start_date: 4.weeks.ago, end_date: Date.current)).to eq(2) }
+ end
+
+ context 'when data for the week 4 weeks ago' do
+ it { expect(described_class.unique_events(event_names: weekly_event, start_date: 4.weeks.ago, end_date: 3.weeks.ago)).to eq(1) }
+ end
+
+ context 'when using daily aggregation' do
+ it { expect(described_class.unique_events(event_names: daily_event, start_date: 7.days.ago, end_date: Date.current)).to eq(2) }
+ it { expect(described_class.unique_events(event_names: daily_event, start_date: 28.days.ago, end_date: Date.current)).to eq(3) }
+ it { expect(described_class.unique_events(event_names: daily_event, start_date: 28.days.ago, end_date: 21.days.ago)).to eq(1) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/track_unique_actions_spec.rb b/spec/lib/gitlab/usage_data_counters/track_unique_actions_spec.rb
index 584d8407e79..bd348666729 100644
--- a/spec/lib/gitlab/usage_data_counters/track_unique_actions_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/track_unique_actions_spec.rb
@@ -7,20 +7,19 @@ RSpec.describe Gitlab::UsageDataCounters::TrackUniqueActions, :clean_gitlab_redi
let(:time) { Time.zone.now }
- def track_action(params)
- track_unique_events.track_action(params)
+ def track_event(params)
+ track_unique_events.track_event(params)
end
- def count_unique_events(params)
- track_unique_events.count_unique_events(params)
+ def count_unique(params)
+ track_unique_events.count_unique(params)
end
context 'tracking an event' do
context 'when tracking successfully' do
- context 'when the feature flag and the application setting is enabled' do
+ context 'when the application setting is enabled' do
context 'when the target and the action is valid' do
before do
- stub_feature_flags(described_class::FEATURE_FLAG => true)
stub_application_setting(usage_ping_enabled: true)
end
@@ -29,28 +28,28 @@ RSpec.describe Gitlab::UsageDataCounters::TrackUniqueActions, :clean_gitlab_redi
design = Event::TARGET_TYPES[:design]
wiki = Event::TARGET_TYPES[:wiki]
- expect(track_action(event_action: :pushed, event_target: project, author_id: 1)).to be_truthy
- expect(track_action(event_action: :pushed, event_target: project, author_id: 1)).to be_truthy
- expect(track_action(event_action: :pushed, event_target: project, author_id: 2)).to be_truthy
- expect(track_action(event_action: :pushed, event_target: project, author_id: 3)).to be_truthy
- expect(track_action(event_action: :pushed, event_target: project, author_id: 4, time: time - 3.days)).to be_truthy
- expect(track_action(event_action: :created, event_target: project, author_id: 5, time: time - 3.days)).to be_truthy
+ expect(track_event(event_action: :pushed, event_target: project, author_id: 1)).to be_truthy
+ expect(track_event(event_action: :pushed, event_target: project, author_id: 1)).to be_truthy
+ expect(track_event(event_action: :pushed, event_target: project, author_id: 2)).to be_truthy
+ expect(track_event(event_action: :pushed, event_target: project, author_id: 3)).to be_truthy
+ expect(track_event(event_action: :pushed, event_target: project, author_id: 4, time: time - 3.days)).to be_truthy
+ expect(track_event(event_action: :created, event_target: project, author_id: 5, time: time - 3.days)).to be_truthy
- expect(track_action(event_action: :destroyed, event_target: design, author_id: 3)).to be_truthy
- expect(track_action(event_action: :created, event_target: design, author_id: 4)).to be_truthy
- expect(track_action(event_action: :updated, event_target: design, author_id: 5)).to be_truthy
- expect(track_action(event_action: :pushed, event_target: design, author_id: 6)).to be_truthy
+ expect(track_event(event_action: :destroyed, event_target: design, author_id: 3)).to be_truthy
+ expect(track_event(event_action: :created, event_target: design, author_id: 4)).to be_truthy
+ expect(track_event(event_action: :updated, event_target: design, author_id: 5)).to be_truthy
+ expect(track_event(event_action: :pushed, event_target: design, author_id: 6)).to be_truthy
- expect(track_action(event_action: :destroyed, event_target: wiki, author_id: 5)).to be_truthy
- expect(track_action(event_action: :created, event_target: wiki, author_id: 3)).to be_truthy
- expect(track_action(event_action: :updated, event_target: wiki, author_id: 4)).to be_truthy
- expect(track_action(event_action: :pushed, event_target: wiki, author_id: 6)).to be_truthy
+ expect(track_event(event_action: :destroyed, event_target: wiki, author_id: 5)).to be_truthy
+ expect(track_event(event_action: :created, event_target: wiki, author_id: 3)).to be_truthy
+ expect(track_event(event_action: :updated, event_target: wiki, author_id: 4)).to be_truthy
+ expect(track_event(event_action: :pushed, event_target: wiki, author_id: 6)).to be_truthy
- expect(count_unique_events(event_action: described_class::PUSH_ACTION, date_from: time, date_to: Date.today)).to eq(3)
- expect(count_unique_events(event_action: described_class::PUSH_ACTION, date_from: time - 5.days, date_to: Date.tomorrow)).to eq(4)
- expect(count_unique_events(event_action: described_class::DESIGN_ACTION, date_from: time - 5.days, date_to: Date.today)).to eq(3)
- expect(count_unique_events(event_action: described_class::WIKI_ACTION, date_from: time - 5.days, date_to: Date.today)).to eq(3)
- expect(count_unique_events(event_action: described_class::PUSH_ACTION, date_from: time - 5.days, date_to: time - 2.days)).to eq(1)
+ expect(count_unique(event_action: described_class::PUSH_ACTION, date_from: time, date_to: Date.today)).to eq(3)
+ expect(count_unique(event_action: described_class::PUSH_ACTION, date_from: time - 5.days, date_to: Date.tomorrow)).to eq(4)
+ expect(count_unique(event_action: described_class::DESIGN_ACTION, date_from: time - 5.days, date_to: Date.today)).to eq(3)
+ expect(count_unique(event_action: described_class::WIKI_ACTION, date_from: time - 5.days, date_to: Date.today)).to eq(3)
+ expect(count_unique(event_action: described_class::PUSH_ACTION, date_from: time - 5.days, date_to: time - 2.days)).to eq(1)
end
end
end
@@ -59,22 +58,20 @@ RSpec.describe Gitlab::UsageDataCounters::TrackUniqueActions, :clean_gitlab_redi
context 'when tracking unsuccessfully' do
using RSpec::Parameterized::TableSyntax
- where(:feature_flag, :application_setting, :target, :action) do
- true | true | Project | :invalid_action
- false | true | Project | :pushed
- true | false | Project | :pushed
- true | true | :invalid_target | :pushed
+ where(:application_setting, :target, :action) do
+ true | Project | :invalid_action
+ false | Project | :pushed
+ true | :invalid_target | :pushed
end
with_them do
before do
stub_application_setting(usage_ping_enabled: application_setting)
- stub_feature_flags(described_class::FEATURE_FLAG => feature_flag)
end
it 'returns the expected values' do
- expect(track_action(event_action: action, event_target: target, author_id: 2)).to be_nil
- expect(count_unique_events(event_action: described_class::PUSH_ACTION, date_from: time, date_to: Date.today)).to eq(0)
+ expect(track_event(event_action: action, event_target: target, author_id: 2)).to be_nil
+ expect(count_unique(event_action: described_class::PUSH_ACTION, date_from: time, date_to: Date.today)).to eq(0)
end
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/wiki_page_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/wiki_page_counter_spec.rb
index 685f6ea0a85..0c8f89fdf07 100644
--- a/spec/lib/gitlab/usage_data_counters/wiki_page_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/wiki_page_counter_spec.rb
@@ -3,11 +3,13 @@
require 'spec_helper'
RSpec.describe Gitlab::UsageDataCounters::WikiPageCounter do
+ it_behaves_like 'a redis usage counter', 'Wiki Page', :view
it_behaves_like 'a redis usage counter', 'Wiki Page', :create
it_behaves_like 'a redis usage counter', 'Wiki Page', :update
it_behaves_like 'a redis usage counter', 'Wiki Page', :delete
it_behaves_like 'a redis usage counter with totals', :wiki_pages,
+ view: 8,
create: 5,
update: 3,
delete: 2
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index bca2f49eb33..3be8a770b2b 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -13,8 +13,14 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
describe '.uncached_data' do
describe '.usage_activity_by_stage' do
it 'includes usage_activity_by_stage data' do
- expect(described_class.uncached_data).to include(:usage_activity_by_stage)
- expect(described_class.uncached_data).to include(:usage_activity_by_stage_monthly)
+ uncached_data = described_class.uncached_data
+
+ expect(uncached_data).to include(:usage_activity_by_stage)
+ expect(uncached_data).to include(:usage_activity_by_stage_monthly)
+ expect(uncached_data[:usage_activity_by_stage])
+ .to include(:configure, :create, :manage, :monitor, :plan, :release, :verify)
+ expect(uncached_data[:usage_activity_by_stage_monthly])
+ .to include(:configure, :create, :manage, :monitor, :plan, :release, :verify)
end
it 'clears memoized values' do
@@ -30,269 +36,269 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
described_class.uncached_data
end
- context 'for configure' do
- it 'includes accurate usage_activity_by_stage data' do
- for_defined_days_back do
- user = create(:user)
- cluster = create(:cluster, user: user)
- create(:clusters_applications_cert_manager, :installed, cluster: cluster)
- create(:clusters_applications_helm, :installed, cluster: cluster)
- create(:clusters_applications_ingress, :installed, cluster: cluster)
- create(:clusters_applications_knative, :installed, cluster: cluster)
- create(:cluster, :disabled, user: user)
- create(:cluster_provider_gcp, :created)
- create(:cluster_provider_aws, :created)
- create(:cluster_platform_kubernetes)
- create(:cluster, :group, :disabled, user: user)
- create(:cluster, :group, user: user)
- create(:cluster, :instance, :disabled, :production_environment)
- create(:cluster, :instance, :production_environment)
- create(:cluster, :management_project)
- end
+ it 'merge_requests_users is included only in montly counters' do
+ uncached_data = described_class.uncached_data
- expect(described_class.uncached_data[:usage_activity_by_stage][:configure]).to include(
- clusters_applications_cert_managers: 2,
- clusters_applications_helm: 2,
- clusters_applications_ingress: 2,
- clusters_applications_knative: 2,
- clusters_management_project: 2,
- clusters_disabled: 4,
- clusters_enabled: 12,
- clusters_platforms_gke: 2,
- clusters_platforms_eks: 2,
- clusters_platforms_user: 2,
- instance_clusters_disabled: 2,
- instance_clusters_enabled: 2,
- group_clusters_disabled: 2,
- group_clusters_enabled: 2,
- project_clusters_disabled: 2,
- project_clusters_enabled: 10
- )
- expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:configure]).to include(
- clusters_applications_cert_managers: 1,
- clusters_applications_helm: 1,
- clusters_applications_ingress: 1,
- clusters_applications_knative: 1,
- clusters_management_project: 1,
- clusters_disabled: 2,
- clusters_enabled: 6,
- clusters_platforms_gke: 1,
- clusters_platforms_eks: 1,
- clusters_platforms_user: 1,
- instance_clusters_disabled: 1,
- instance_clusters_enabled: 1,
- group_clusters_disabled: 1,
- group_clusters_enabled: 1,
- project_clusters_disabled: 1,
- project_clusters_enabled: 5
- )
- end
+ expect(uncached_data[:usage_activity_by_stage][:create])
+ .not_to include(:merge_requests_users)
+ expect(uncached_data[:usage_activity_by_stage_monthly][:create])
+ .to include(:merge_requests_users)
end
+ end
- context 'for create' do
- it 'include usage_activity_by_stage data' do
- expect(described_class.uncached_data[:usage_activity_by_stage][:create])
- .not_to include(
- :merge_requests_users
- )
- end
-
- it 'includes monthly usage_activity_by_stage data' do
- expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:create])
- .to include(
- :merge_requests_users
- )
- end
-
- it 'includes accurate usage_activity_by_stage data' do
- for_defined_days_back do
- user = create(:user)
- project = create(:project, :repository_private,
- :test_repo, :remote_mirror, creator: user)
- create(:merge_request, source_project: project)
- create(:deploy_key, user: user)
- create(:key, user: user)
- create(:project, creator: user, disable_overriding_approvers_per_merge_request: true)
- create(:project, creator: user, disable_overriding_approvers_per_merge_request: false)
- create(:remote_mirror, project: project)
- create(:snippet, author: user)
- end
-
- expect(described_class.uncached_data[:usage_activity_by_stage][:create]).to include(
- deploy_keys: 2,
- keys: 2,
- merge_requests: 2,
- projects_with_disable_overriding_approvers_per_merge_request: 2,
- projects_without_disable_overriding_approvers_per_merge_request: 4,
- remote_mirrors: 2,
- snippets: 2
- )
- expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:create]).to include(
- deploy_keys: 1,
- keys: 1,
- merge_requests: 1,
- projects_with_disable_overriding_approvers_per_merge_request: 1,
- projects_without_disable_overriding_approvers_per_merge_request: 2,
- remote_mirrors: 1,
- snippets: 1
- )
- end
+ it 'ensures recorded_at is set before any other usage data calculation' do
+ %i(alt_usage_data redis_usage_data distinct_count count).each do |method|
+ expect(described_class).not_to receive(method)
end
+ expect(described_class).to receive(:recorded_at).and_raise(Exception.new('Stopped calculating recorded_at'))
- context 'for manage' do
- it 'includes accurate usage_activity_by_stage data' do
- stub_config(
- omniauth:
- { providers: omniauth_providers }
- )
-
- for_defined_days_back do
- user = create(:user)
- create(:event, author: user)
- create(:group_member, user: user)
- end
+ expect { described_class.uncached_data }.to raise_error('Stopped calculating recorded_at')
+ end
+ end
- expect(described_class.uncached_data[:usage_activity_by_stage][:manage]).to include(
- events: 2,
- groups: 2,
- users_created: Gitlab.ee? ? 6 : 5,
- omniauth_providers: ['google_oauth2']
- )
- expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:manage]).to include(
- events: 1,
- groups: 1,
- users_created: Gitlab.ee? ? 4 : 3,
- omniauth_providers: ['google_oauth2']
- )
- end
+ describe '.usage_activity_by_stage_configure' do
+ it 'includes accurate usage_activity_by_stage data' do
+ for_defined_days_back do
+ user = create(:user)
+ cluster = create(:cluster, user: user)
+ create(:clusters_applications_cert_manager, :installed, cluster: cluster)
+ create(:clusters_applications_helm, :installed, cluster: cluster)
+ create(:clusters_applications_ingress, :installed, cluster: cluster)
+ create(:clusters_applications_knative, :installed, cluster: cluster)
+ create(:cluster, :disabled, user: user)
+ create(:cluster_provider_gcp, :created)
+ create(:cluster_provider_aws, :created)
+ create(:cluster_platform_kubernetes)
+ create(:cluster, :group, :disabled, user: user)
+ create(:cluster, :group, user: user)
+ create(:cluster, :instance, :disabled, :production_environment)
+ create(:cluster, :instance, :production_environment)
+ create(:cluster, :management_project)
+ end
+
+ expect(described_class.usage_activity_by_stage_configure({})).to include(
+ clusters_applications_cert_managers: 2,
+ clusters_applications_helm: 2,
+ clusters_applications_ingress: 2,
+ clusters_applications_knative: 2,
+ clusters_management_project: 2,
+ clusters_disabled: 4,
+ clusters_enabled: 12,
+ clusters_platforms_gke: 2,
+ clusters_platforms_eks: 2,
+ clusters_platforms_user: 2,
+ instance_clusters_disabled: 2,
+ instance_clusters_enabled: 2,
+ group_clusters_disabled: 2,
+ group_clusters_enabled: 2,
+ project_clusters_disabled: 2,
+ project_clusters_enabled: 10
+ )
+ expect(described_class.usage_activity_by_stage_configure(described_class.last_28_days_time_period)).to include(
+ clusters_applications_cert_managers: 1,
+ clusters_applications_helm: 1,
+ clusters_applications_ingress: 1,
+ clusters_applications_knative: 1,
+ clusters_management_project: 1,
+ clusters_disabled: 2,
+ clusters_enabled: 6,
+ clusters_platforms_gke: 1,
+ clusters_platforms_eks: 1,
+ clusters_platforms_user: 1,
+ instance_clusters_disabled: 1,
+ instance_clusters_enabled: 1,
+ group_clusters_disabled: 1,
+ group_clusters_enabled: 1,
+ project_clusters_disabled: 1,
+ project_clusters_enabled: 5
+ )
+ end
+ end
- def omniauth_providers
- [
- OpenStruct.new(name: 'google_oauth2'),
- OpenStruct.new(name: 'ldapmain'),
- OpenStruct.new(name: 'group_saml')
- ]
- end
- end
+ describe 'usage_activity_by_stage_create' do
+ it 'includes accurate usage_activity_by_stage data' do
+ for_defined_days_back do
+ user = create(:user)
+ project = create(:project, :repository_private,
+ :test_repo, :remote_mirror, creator: user)
+ create(:merge_request, source_project: project)
+ create(:deploy_key, user: user)
+ create(:key, user: user)
+ create(:project, creator: user, disable_overriding_approvers_per_merge_request: true)
+ create(:project, creator: user, disable_overriding_approvers_per_merge_request: false)
+ create(:remote_mirror, project: project)
+ create(:snippet, author: user)
+ end
+
+ expect(described_class.usage_activity_by_stage_create({})).to include(
+ deploy_keys: 2,
+ keys: 2,
+ merge_requests: 2,
+ projects_with_disable_overriding_approvers_per_merge_request: 2,
+ projects_without_disable_overriding_approvers_per_merge_request: 4,
+ remote_mirrors: 2,
+ snippets: 2
+ )
+ expect(described_class.usage_activity_by_stage_create(described_class.last_28_days_time_period)).to include(
+ deploy_keys: 1,
+ keys: 1,
+ merge_requests: 1,
+ projects_with_disable_overriding_approvers_per_merge_request: 1,
+ projects_without_disable_overriding_approvers_per_merge_request: 2,
+ remote_mirrors: 1,
+ snippets: 1
+ )
+ end
+ end
- context 'for monitor' do
- it 'includes accurate usage_activity_by_stage data' do
- for_defined_days_back do
- user = create(:user, dashboard: 'operations')
- cluster = create(:cluster, user: user)
- create(:project, creator: user)
- create(:clusters_applications_prometheus, :installed, cluster: cluster)
- end
+ describe 'usage_activity_by_stage_manage' do
+ it 'includes accurate usage_activity_by_stage data' do
+ stub_config(
+ omniauth:
+ { providers: omniauth_providers }
+ )
- expect(described_class.uncached_data[:usage_activity_by_stage][:monitor]).to include(
- clusters: 2,
- clusters_applications_prometheus: 2,
- operations_dashboard_default_dashboard: 2
- )
- expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:monitor]).to include(
- clusters: 1,
- clusters_applications_prometheus: 1,
- operations_dashboard_default_dashboard: 1
- )
- end
+ for_defined_days_back do
+ user = create(:user)
+ create(:event, author: user)
+ create(:group_member, user: user)
end
- context 'for plan' do
- it 'includes accurate usage_activity_by_stage data' do
- for_defined_days_back do
- user = create(:user)
- project = create(:project, creator: user)
- issue = create(:issue, project: project, author: user)
- create(:note, project: project, noteable: issue, author: user)
- create(:todo, project: project, target: issue, author: user)
- end
-
- expect(described_class.uncached_data[:usage_activity_by_stage][:plan]).to include(
- issues: 2,
- notes: 2,
- projects: 2,
- todos: 2
- )
- expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:plan]).to include(
- issues: 1,
- notes: 1,
- projects: 1,
- todos: 1
- )
- end
- end
+ expect(described_class.usage_activity_by_stage_manage({})).to include(
+ events: 2,
+ groups: 2,
+ users_created: 4,
+ omniauth_providers: ['google_oauth2']
+ )
+ expect(described_class.usage_activity_by_stage_manage(described_class.last_28_days_time_period)).to include(
+ events: 1,
+ groups: 1,
+ users_created: 2,
+ omniauth_providers: ['google_oauth2']
+ )
+ end
- context 'for release' do
- it 'includes accurate usage_activity_by_stage data' do
- for_defined_days_back do
- user = create(:user)
- create(:deployment, :failed, user: user)
- create(:release, author: user)
- create(:deployment, :success, user: user)
- end
+ def omniauth_providers
+ [
+ OpenStruct.new(name: 'google_oauth2'),
+ OpenStruct.new(name: 'ldapmain'),
+ OpenStruct.new(name: 'group_saml')
+ ]
+ end
+ end
- expect(described_class.uncached_data[:usage_activity_by_stage][:release]).to include(
- deployments: 2,
- failed_deployments: 2,
- releases: 2,
- successful_deployments: 2
- )
- expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:release]).to include(
- deployments: 1,
- failed_deployments: 1,
- releases: 1,
- successful_deployments: 1
- )
- end
+ describe 'usage_activity_by_stage_monitor' do
+ it 'includes accurate usage_activity_by_stage data' do
+ for_defined_days_back do
+ user = create(:user, dashboard: 'operations')
+ cluster = create(:cluster, user: user)
+ create(:project, creator: user)
+ create(:clusters_applications_prometheus, :installed, cluster: cluster)
end
- context 'for verify' do
- it 'includes accurate usage_activity_by_stage data' do
- for_defined_days_back do
- user = create(:user)
- create(:ci_build, user: user)
- create(:ci_empty_pipeline, source: :external, user: user)
- create(:ci_empty_pipeline, user: user)
- create(:ci_pipeline, :auto_devops_source, user: user)
- create(:ci_pipeline, :repository_source, user: user)
- create(:ci_pipeline_schedule, owner: user)
- create(:ci_trigger, owner: user)
- create(:clusters_applications_runner, :installed)
- end
+ expect(described_class.usage_activity_by_stage_monitor({})).to include(
+ clusters: 2,
+ clusters_applications_prometheus: 2,
+ operations_dashboard_default_dashboard: 2
+ )
+ expect(described_class.usage_activity_by_stage_monitor(described_class.last_28_days_time_period)).to include(
+ clusters: 1,
+ clusters_applications_prometheus: 1,
+ operations_dashboard_default_dashboard: 1
+ )
+ end
+ end
- expect(described_class.uncached_data[:usage_activity_by_stage][:verify]).to include(
- ci_builds: 2,
- ci_external_pipelines: 2,
- ci_internal_pipelines: 2,
- ci_pipeline_config_auto_devops: 2,
- ci_pipeline_config_repository: 2,
- ci_pipeline_schedules: 2,
- ci_pipelines: 2,
- ci_triggers: 2,
- clusters_applications_runner: 2
- )
- expect(described_class.uncached_data[:usage_activity_by_stage_monthly][:verify]).to include(
- ci_builds: 1,
- ci_external_pipelines: 1,
- ci_internal_pipelines: 1,
- ci_pipeline_config_auto_devops: 1,
- ci_pipeline_config_repository: 1,
- ci_pipeline_schedules: 1,
- ci_pipelines: 1,
- ci_triggers: 1,
- clusters_applications_runner: 1
- )
- end
- end
+ describe 'usage_activity_by_stage_plan' do
+ it 'includes accurate usage_activity_by_stage data' do
+ for_defined_days_back do
+ user = create(:user)
+ project = create(:project, creator: user)
+ issue = create(:issue, project: project, author: user)
+ create(:issue, project: project, author: User.support_bot)
+ create(:note, project: project, noteable: issue, author: user)
+ create(:todo, project: project, target: issue, author: user)
+ end
+
+ expect(described_class.usage_activity_by_stage_plan({})).to include(
+ issues: 3,
+ notes: 2,
+ projects: 2,
+ todos: 2,
+ service_desk_enabled_projects: 2,
+ service_desk_issues: 2
+ )
+ expect(described_class.usage_activity_by_stage_plan(described_class.last_28_days_time_period)).to include(
+ issues: 2,
+ notes: 1,
+ projects: 1,
+ todos: 1,
+ service_desk_enabled_projects: 1,
+ service_desk_issues: 1
+ )
end
+ end
- it 'ensures recorded_at is set before any other usage data calculation' do
- %i(alt_usage_data redis_usage_data distinct_count count).each do |method|
- expect(described_class).not_to receive(method)
+ describe 'usage_activity_by_stage_release' do
+ it 'includes accurate usage_activity_by_stage data' do
+ for_defined_days_back do
+ user = create(:user)
+ create(:deployment, :failed, user: user)
+ create(:release, author: user)
+ create(:deployment, :success, user: user)
end
- expect(described_class).to receive(:recorded_at).and_raise(Exception.new('Stopped calculating recorded_at'))
- expect { described_class.uncached_data }.to raise_error('Stopped calculating recorded_at')
+ expect(described_class.usage_activity_by_stage_release({})).to include(
+ deployments: 2,
+ failed_deployments: 2,
+ releases: 2,
+ successful_deployments: 2
+ )
+ expect(described_class.usage_activity_by_stage_release(described_class.last_28_days_time_period)).to include(
+ deployments: 1,
+ failed_deployments: 1,
+ releases: 1,
+ successful_deployments: 1
+ )
+ end
+ end
+
+ describe 'usage_activity_by_stage_verify' do
+ it 'includes accurate usage_activity_by_stage data' do
+ for_defined_days_back do
+ user = create(:user)
+ create(:ci_build, user: user)
+ create(:ci_empty_pipeline, source: :external, user: user)
+ create(:ci_empty_pipeline, user: user)
+ create(:ci_pipeline, :auto_devops_source, user: user)
+ create(:ci_pipeline, :repository_source, user: user)
+ create(:ci_pipeline_schedule, owner: user)
+ create(:ci_trigger, owner: user)
+ create(:clusters_applications_runner, :installed)
+ end
+
+ expect(described_class.usage_activity_by_stage_verify({})).to include(
+ ci_builds: 2,
+ ci_external_pipelines: 2,
+ ci_internal_pipelines: 2,
+ ci_pipeline_config_auto_devops: 2,
+ ci_pipeline_config_repository: 2,
+ ci_pipeline_schedules: 2,
+ ci_pipelines: 2,
+ ci_triggers: 2,
+ clusters_applications_runner: 2
+ )
+ expect(described_class.usage_activity_by_stage_verify(described_class.last_28_days_time_period)).to include(
+ ci_builds: 1,
+ ci_external_pipelines: 1,
+ ci_internal_pipelines: 1,
+ ci_pipeline_config_auto_devops: 1,
+ ci_pipeline_config_repository: 1,
+ ci_pipeline_schedules: 1,
+ ci_pipelines: 1,
+ ci_triggers: 1,
+ clusters_applications_runner: 1
+ )
end
end
@@ -338,13 +344,18 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(count_data[:projects_slack_active]).to eq(2)
expect(count_data[:projects_slack_slash_commands_active]).to eq(1)
expect(count_data[:projects_custom_issue_tracker_active]).to eq(1)
- expect(count_data[:projects_mattermost_active]).to eq(0)
+ expect(count_data[:projects_mattermost_active]).to eq(1)
+ expect(count_data[:templates_mattermost_active]).to eq(1)
+ expect(count_data[:instances_mattermost_active]).to eq(1)
+ expect(count_data[:projects_inheriting_instance_mattermost_active]).to eq(1)
expect(count_data[:projects_with_repositories_enabled]).to eq(3)
expect(count_data[:projects_with_error_tracking_enabled]).to eq(1)
expect(count_data[:projects_with_alerts_service_enabled]).to eq(1)
expect(count_data[:projects_with_prometheus_alerts]).to eq(2)
expect(count_data[:projects_with_terraform_reports]).to eq(2)
expect(count_data[:projects_with_terraform_states]).to eq(2)
+ expect(count_data[:protected_branches]).to eq(2)
+ expect(count_data[:protected_branches_except_default]).to eq(1)
expect(count_data[:terraform_reports]).to eq(6)
expect(count_data[:terraform_states]).to eq(3)
expect(count_data[:issues_created_from_gitlab_error_tracking_ui]).to eq(1)
@@ -574,9 +585,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
subject { described_class.components_usage_data }
it 'gathers basic components usage data' do
- stub_runtime(:puma)
+ stub_application_setting(container_registry_vendor: 'gitlab', container_registry_version: 'x.y.z')
- expect(subject[:app_server][:type]).to eq('puma')
expect(subject[:gitlab_pages][:enabled]).to eq(Gitlab.config.pages.enabled)
expect(subject[:gitlab_pages][:version]).to eq(Gitlab::Pages::VERSION)
expect(subject[:git][:version]).to eq(Gitlab::Git.version)
@@ -587,32 +597,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
expect(subject[:gitaly][:clusters]).to be >= 0
expect(subject[:gitaly][:filesystems]).to be_an(Array)
expect(subject[:gitaly][:filesystems].first).to be_a(String)
- end
-
- def stub_runtime(runtime)
- allow(Gitlab::Runtime).to receive(:identify).and_return(runtime)
- end
- end
-
- describe '.app_server_type' do
- subject { described_class.app_server_type }
-
- it 'successfully identifies runtime and returns the identifier' do
- expect(Gitlab::Runtime).to receive(:identify).and_return(:runtime_identifier)
-
- is_expected.to eq('runtime_identifier')
- end
-
- context 'when runtime is not identified' do
- let(:exception) { Gitlab::Runtime::IdentificationError.new('exception message from runtime identify') }
-
- it 'logs the exception and returns unknown app server type' do
- expect(Gitlab::Runtime).to receive(:identify).and_raise(exception)
-
- expect(Gitlab::AppLogger).to receive(:error).with(exception.message)
- expect(Gitlab::ErrorTracking).to receive(:track_exception).with(exception)
- expect(subject).to eq('unknown_app_server_type')
- end
+ expect(subject[:container_registry_server][:vendor]).to eq('gitlab')
+ expect(subject[:container_registry_server][:version]).to eq('x.y.z')
end
end
@@ -926,45 +912,29 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
let(:time) { Time.zone.now }
before do
- stub_feature_flags(Gitlab::UsageDataCounters::TrackUniqueActions::FEATURE_FLAG => feature_flag)
- end
-
- context 'when the feature flag is enabled' do
- let(:feature_flag) { true }
-
- before do
- counter = Gitlab::UsageDataCounters::TrackUniqueActions
- project = Event::TARGET_TYPES[:project]
- wiki = Event::TARGET_TYPES[:wiki]
- design = Event::TARGET_TYPES[:design]
-
- counter.track_action(event_action: :pushed, event_target: project, author_id: 1)
- counter.track_action(event_action: :pushed, event_target: project, author_id: 1)
- counter.track_action(event_action: :pushed, event_target: project, author_id: 2)
- counter.track_action(event_action: :pushed, event_target: project, author_id: 3)
- counter.track_action(event_action: :pushed, event_target: project, author_id: 4, time: time - 3.days)
- counter.track_action(event_action: :created, event_target: project, author_id: 5, time: time - 3.days)
- counter.track_action(event_action: :created, event_target: wiki, author_id: 3)
- counter.track_action(event_action: :created, event_target: design, author_id: 3)
- end
-
- it 'returns the distinct count of user actions within the specified time period' do
- expect(described_class.action_monthly_active_users(time_period)).to eq(
- {
- action_monthly_active_users_design_management: 1,
- action_monthly_active_users_project_repo: 3,
- action_monthly_active_users_wiki_repo: 1
- }
- )
- end
- end
-
- context 'when the feature flag is disabled' do
- let(:feature_flag) { false }
-
- it 'returns an empty hash' do
- expect(described_class.action_monthly_active_users(time_period)).to eq({})
- end
+ counter = Gitlab::UsageDataCounters::TrackUniqueActions
+ project = Event::TARGET_TYPES[:project]
+ wiki = Event::TARGET_TYPES[:wiki]
+ design = Event::TARGET_TYPES[:design]
+
+ counter.track_event(event_action: :pushed, event_target: project, author_id: 1)
+ counter.track_event(event_action: :pushed, event_target: project, author_id: 1)
+ counter.track_event(event_action: :pushed, event_target: project, author_id: 2)
+ counter.track_event(event_action: :pushed, event_target: project, author_id: 3)
+ counter.track_event(event_action: :pushed, event_target: project, author_id: 4, time: time - 3.days)
+ counter.track_event(event_action: :created, event_target: project, author_id: 5, time: time - 3.days)
+ counter.track_event(event_action: :created, event_target: wiki, author_id: 3)
+ counter.track_event(event_action: :created, event_target: design, author_id: 3)
+ end
+
+ it 'returns the distinct count of user actions within the specified time period' do
+ expect(described_class.action_monthly_active_users(time_period)).to eq(
+ {
+ action_monthly_active_users_design_management: 1,
+ action_monthly_active_users_project_repo: 3,
+ action_monthly_active_users_wiki_repo: 1
+ }
+ )
end
end
@@ -972,11 +942,12 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
subject { described_class.analytics_unique_visits_data }
it 'returns the number of unique visits to pages with analytics features' do
- ::Gitlab::Analytics::UniqueVisits::TARGET_IDS.each do |target_id|
- expect_any_instance_of(::Gitlab::Analytics::UniqueVisits).to receive(:weekly_unique_visits_for_target).with(target_id).and_return(123)
+ ::Gitlab::Analytics::UniqueVisits.analytics_ids.each do |target_id|
+ expect_any_instance_of(::Gitlab::Analytics::UniqueVisits).to receive(:unique_visits_for).with(targets: target_id).and_return(123)
end
- expect_any_instance_of(::Gitlab::Analytics::UniqueVisits).to receive(:weekly_unique_visits_for_any_target).and_return(543)
+ expect_any_instance_of(::Gitlab::Analytics::UniqueVisits).to receive(:unique_visits_for).with(targets: :analytics).and_return(543)
+ expect_any_instance_of(::Gitlab::Analytics::UniqueVisits).to receive(:unique_visits_for).with(targets: :analytics, start_date: 4.weeks.ago.to_date, end_date: Date.current).and_return(987)
expect(subject).to eq({
analytics_unique_visits: {
@@ -991,12 +962,56 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
'p_analytics_insights' => 123,
'p_analytics_issues' => 123,
'p_analytics_repo' => 123,
- 'u_analytics_todos' => 123,
'i_analytics_cohorts' => 123,
'i_analytics_dev_ops_score' => 123,
- 'analytics_unique_visits_for_any_target' => 543
+ 'analytics_unique_visits_for_any_target' => 543,
+ 'analytics_unique_visits_for_any_target_monthly' => 987
+ }
+ })
+ end
+ end
+
+ describe '.compliance_unique_visits_data' do
+ subject { described_class.compliance_unique_visits_data }
+
+ before do
+ described_class.clear_memoization(:unique_visit_service)
+
+ allow_next_instance_of(::Gitlab::Analytics::UniqueVisits) do |instance|
+ ::Gitlab::Analytics::UniqueVisits.compliance_ids.each do |target_id|
+ allow(instance).to receive(:unique_visits_for).with(targets: target_id).and_return(123)
+ end
+
+ allow(instance).to receive(:unique_visits_for).with(targets: :compliance).and_return(543)
+
+ allow(instance).to receive(:unique_visits_for).with(targets: :compliance, start_date: 4.weeks.ago.to_date, end_date: Date.current).and_return(987)
+ end
+ end
+
+ it 'returns the number of unique visits to pages with compliance features' do
+ expect(subject).to eq({
+ compliance_unique_visits: {
+ 'g_compliance_dashboard' => 123,
+ 'g_compliance_audit_events' => 123,
+ 'i_compliance_credential_inventory' => 123,
+ 'i_compliance_audit_events' => 123,
+ 'compliance_unique_visits_for_any_target' => 543,
+ 'compliance_unique_visits_for_any_target_monthly' => 987
}
})
end
end
+
+ describe '.service_desk_counts' do
+ subject { described_class.send(:service_desk_counts) }
+
+ let(:project) { create(:project, :service_desk_enabled) }
+
+ it 'gathers Service Desk data' do
+ create_list(:issue, 2, :confidential, author: User.support_bot, project: project)
+
+ expect(subject).to eq(service_desk_enabled_projects: 1,
+ service_desk_issues: 2)
+ end
+ end
end
diff --git a/spec/lib/gitlab/user_access_spec.rb b/spec/lib/gitlab/user_access_spec.rb
index 1a81d0127dc..d6b1e3b2d4b 100644
--- a/spec/lib/gitlab/user_access_spec.rb
+++ b/spec/lib/gitlab/user_access_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::UserAccess do
include ProjectForksHelper
- let(:access) { described_class.new(user, project: project) }
+ let(:access) { described_class.new(user, container: project) }
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
@@ -43,7 +43,7 @@ RSpec.describe Gitlab::UserAccess do
describe 'push to empty project' do
let(:empty_project) { create(:project_empty_repo) }
- let(:project_access) { described_class.new(user, project: empty_project) }
+ let(:project_access) { described_class.new(user, container: empty_project) }
it 'returns true for admins' do
user.update!(admin: true)
diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb
index 7940c9af6ff..4675cbd7fa1 100644
--- a/spec/lib/gitlab/utils/usage_data_spec.rb
+++ b/spec/lib/gitlab/utils/usage_data_spec.rb
@@ -122,7 +122,7 @@ RSpec.describe Gitlab::Utils::UsageData do
freeze_time do
result = described_class.with_finished_at(:current_time) { { a: 1 } }
- expect(result).to eq(a: 1, current_time: Time.now)
+ expect(result).to eq(a: 1, current_time: Time.current)
end
end
end
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index 7a0d40ff0d2..1eaceec1d8a 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -5,39 +5,93 @@ require 'spec_helper'
RSpec.describe Gitlab::Utils do
delegate :to_boolean, :boolean_to_yes_no, :slugify, :random_string, :which,
:ensure_array_from_string, :to_exclusive_sentence, :bytes_to_megabytes,
- :append_path, :check_path_traversal!, :ms_to_round_sec, to: :described_class
+ :append_path, :check_path_traversal!, :allowlisted?, :check_allowed_absolute_path!, :decode_path, :ms_to_round_sec, to: :described_class
describe '.check_path_traversal!' do
+ it 'detects path traversal in string without any separators' do
+ expect { check_path_traversal!('.') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('..') }.to raise_error(/Invalid path/)
+ end
+
it 'detects path traversal at the start of the string' do
expect { check_path_traversal!('../foo') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('..\\foo') }.to raise_error(/Invalid path/)
end
it 'detects path traversal at the start of the string, even to just the subdirectory' do
expect { check_path_traversal!('../') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('..\\') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('/../') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('\\..\\') }.to raise_error(/Invalid path/)
end
it 'detects path traversal in the middle of the string' do
expect { check_path_traversal!('foo/../../bar') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('foo\\..\\..\\bar') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('foo/..\\bar') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('foo\\../bar') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('foo/..\\..\\..\\..\\../bar') }.to raise_error(/Invalid path/)
end
it 'detects path traversal at the end of the string when slash-terminates' do
expect { check_path_traversal!('foo/../') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('foo\\..\\') }.to raise_error(/Invalid path/)
end
it 'detects path traversal at the end of the string' do
expect { check_path_traversal!('foo/..') }.to raise_error(/Invalid path/)
+ expect { check_path_traversal!('foo\\..') }.to raise_error(/Invalid path/)
end
it 'does nothing for a safe string' do
expect(check_path_traversal!('./foo')).to eq('./foo')
+ expect(check_path_traversal!('.test/foo')).to eq('.test/foo')
+ expect(check_path_traversal!('..test/foo')).to eq('..test/foo')
+ expect(check_path_traversal!('dir/..foo.rb')).to eq('dir/..foo.rb')
+ expect(check_path_traversal!('dir/.foo.rb')).to eq('dir/.foo.rb')
+ end
+ end
+
+ describe '.allowlisted?' do
+ let(:allowed_paths) { ['/home/foo', '/foo/bar', '/etc/passwd']}
+
+ it 'returns true if path is allowed' do
+ expect(allowlisted?('/foo/bar', allowed_paths)).to be(true)
+ end
+
+ it 'returns false if path is not allowed' do
+ expect(allowlisted?('/test/test', allowed_paths)).to be(false)
+ end
+ end
+
+ describe '.check_allowed_absolute_path!' do
+ let(:allowed_paths) { ['/home/foo'] }
+
+ it 'raises an exception if an absolute path is not allowed' do
+ expect { check_allowed_absolute_path!('/etc/passwd', allowed_paths) }.to raise_error(StandardError)
end
- it 'does nothing if an absolute path is allowed' do
- expect(check_path_traversal!('/etc/folder/path', allowed_absolute: true)). to eq('/etc/folder/path')
+ it 'does nothing for an allowed absolute path' do
+ expect(check_allowed_absolute_path!('/home/foo', allowed_paths)).to be_nil
end
+ end
- it 'raises exception if an absolute path is not allowed' do
- expect { check_path_traversal!('/etc/folder/path') }.to raise_error(/Invalid path/)
+ describe '.decode_path' do
+ it 'returns path unencoded for singled-encoded paths' do
+ expect(decode_path('%2Fhome%2Fbar%3Fasd%3Dqwe')).to eq('/home/bar?asd=qwe')
+ end
+
+ it 'returns path when it is unencoded' do
+ expect(decode_path('/home/bar?asd=qwe')).to eq('/home/bar?asd=qwe')
+ end
+
+ [
+ '..%252F..%252F..%252Fetc%252Fpasswd',
+ '%25252Fresult%25252Fchosennickname%25253D%252522jj%252522'
+ ].each do |multiple_encoded_path|
+ it 'raises an exception when the path is multiple-encoded' do
+ expect { decode_path(multiple_encoded_path) }.to raise_error(/path #{multiple_encoded_path} is not allowed/)
+ end
end
end
diff --git a/spec/lib/gitlab/view/presenter/base_spec.rb b/spec/lib/gitlab/view/presenter/base_spec.rb
index 1ab6973e279..97d5e2b280d 100644
--- a/spec/lib/gitlab/view/presenter/base_spec.rb
+++ b/spec/lib/gitlab/view/presenter/base_spec.rb
@@ -57,4 +57,32 @@ RSpec.describe Gitlab::View::Presenter::Base do
expect(presenter.present).to eq(presenter)
end
end
+
+ describe '#url_builder' do
+ it 'returns the UrlBuilder instance' do
+ presenter = presenter_class.new(project)
+
+ expect(presenter.url_builder).to eq(Gitlab::UrlBuilder.instance)
+ end
+ end
+
+ describe '#web_url' do
+ it 'delegates to the UrlBuilder' do
+ presenter = presenter_class.new(project)
+
+ expect(presenter.url_builder).to receive(:build).with(project)
+
+ presenter.web_url
+ end
+ end
+
+ describe '#web_path' do
+ it 'delegates to the UrlBuilder' do
+ presenter = presenter_class.new(project)
+
+ expect(presenter.url_builder).to receive(:build).with(project, only_path: true)
+
+ presenter.web_path
+ end
+ end
end
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index 5cb08ac1e76..da327ce7706 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -421,6 +421,24 @@ RSpec.describe Gitlab::Workhorse do
end
end
+ describe '.send_scaled_image' do
+ let(:location) { 'http://example.com/avatar.png' }
+ let(:width) { '150' }
+
+ subject { described_class.send_scaled_image(location, width) }
+
+ it 'sets the header correctly' do
+ key, command, params = decode_workhorse_header(subject)
+
+ expect(key).to eq("Gitlab-Workhorse-Send-Data")
+ expect(command).to eq("send-scaled-img")
+ expect(params).to eq({
+ 'Location' => location,
+ 'Width' => width
+ }.deep_stringify_keys)
+ end
+ end
+
describe '.send_git_snapshot' do
let(:url) { 'http://example.com' }
diff --git a/spec/lib/json_web_token/rsa_token_spec.rb b/spec/lib/json_web_token/rsa_token_spec.rb
index f19471917c2..8f0d62d8f0c 100644
--- a/spec/lib/json_web_token/rsa_token_spec.rb
+++ b/spec/lib/json_web_token/rsa_token_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe JSONWebToken::RSAToken do
-----END RSA PRIVATE KEY-----
eos
end
+
let(:rsa_token) { described_class.new(nil) }
let(:rsa_encoded) { rsa_token.encoded }
diff --git a/spec/lib/mattermost/session_spec.rb b/spec/lib/mattermost/session_spec.rb
index 5110d3cdfa3..93422b01ca7 100644
--- a/spec/lib/mattermost/session_spec.rb
+++ b/spec/lib/mattermost/session_spec.rb
@@ -61,6 +61,7 @@ RSpec.describe Mattermost::Session, type: :request do
redirect_uri: "#{mattermost_url}/signup/gitlab/complete",
state: state }
end
+
let(:location) do
"#{gitlab_url}/oauth/authorize?#{URI.encode_www_form(params)}"
end
diff --git a/spec/lib/object_storage/config_spec.rb b/spec/lib/object_storage/config_spec.rb
new file mode 100644
index 00000000000..a48b5100065
--- /dev/null
+++ b/spec/lib/object_storage/config_spec.rb
@@ -0,0 +1,179 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+
+RSpec.describe ObjectStorage::Config do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:region) { 'us-east-1' }
+ let(:bucket_name) { 'test-bucket' }
+ let(:credentials) do
+ {
+ provider: 'AWS',
+ aws_access_key_id: 'AWS_ACCESS_KEY_ID',
+ aws_secret_access_key: 'AWS_SECRET_ACCESS_KEY',
+ region: region
+ }
+ end
+
+ let(:storage_options) do
+ {
+ server_side_encryption: 'AES256',
+ server_side_encryption_kms_key_id: 'arn:aws:12345'
+ }
+ end
+
+ let(:raw_config) do
+ {
+ enabled: true,
+ connection: credentials,
+ remote_directory: bucket_name,
+ storage_options: storage_options
+ }
+ end
+
+ subject { described_class.new(raw_config.as_json) }
+
+ describe '#credentials' do
+ it { expect(subject.credentials).to eq(credentials) }
+ end
+
+ describe '#storage_options' do
+ it { expect(subject.storage_options).to eq(storage_options) }
+ end
+
+ describe '#enabled?' do
+ it { expect(subject.enabled?).to eq(true) }
+ end
+
+ describe '#bucket' do
+ it { expect(subject.bucket).to eq(bucket_name) }
+ end
+
+ describe '#use_iam_profile' do
+ it { expect(subject.use_iam_profile?).to be false }
+ end
+
+ describe '#use_path_style' do
+ it { expect(subject.use_path_style?).to be false }
+ end
+
+ context 'with unconsolidated settings' do
+ describe 'consolidated_settings? returns false' do
+ it { expect(subject.consolidated_settings?).to be false }
+ end
+ end
+
+ context 'with consolidated settings' do
+ before do
+ raw_config[:consolidated_settings] = true
+ end
+
+ describe 'consolidated_settings? returns true' do
+ it { expect(subject.consolidated_settings?).to be true }
+ end
+ end
+
+ context 'with IAM profile configured' do
+ where(:value, :expected) do
+ true | true
+ "true" | true
+ "yes" | true
+ false | false
+ "false" | false
+ "no" | false
+ nil | false
+ end
+
+ with_them do
+ before do
+ credentials[:use_iam_profile] = value
+ end
+
+ it 'coerces the value to a boolean' do
+ expect(subject.use_iam_profile?).to be expected
+ end
+ end
+ end
+
+ context 'with path style configured' do
+ where(:value, :expected) do
+ true | true
+ "true" | true
+ "yes" | true
+ false | false
+ "false" | false
+ "no" | false
+ nil | false
+ end
+
+ with_them do
+ before do
+ credentials[:path_style] = value
+ end
+
+ it 'coerces the value to a boolean' do
+ expect(subject.use_path_style?).to be expected
+ end
+ end
+ end
+
+ context 'with hostname style access' do
+ it '#use_path_style? returns false' do
+ expect(subject.use_path_style?).to be false
+ end
+ end
+
+ context 'with AWS credentials' do
+ it { expect(subject.provider).to eq('AWS') }
+ it { expect(subject.aws?).to be true }
+ it { expect(subject.google?).to be false }
+ end
+
+ context 'with Google credentials' do
+ let(:credentials) do
+ {
+ provider: 'Google',
+ google_client_email: 'foo@gcp-project.example.com',
+ google_json_key_location: '/path/to/gcp.json'
+ }
+ end
+
+ it { expect(subject.provider).to eq('Google') }
+ it { expect(subject.aws?).to be false }
+ it { expect(subject.google?).to be true }
+ it { expect(subject.fog_attributes).to eq({}) }
+ end
+
+ context 'with SSE-KMS enabled' do
+ it { expect(subject.server_side_encryption).to eq('AES256') }
+ it { expect(subject.server_side_encryption_kms_key_id).to eq('arn:aws:12345') }
+ it { expect(subject.fog_attributes.keys).to match_array(%w(x-amz-server-side-encryption x-amz-server-side-encryption-aws-kms-key-id)) }
+ end
+
+ context 'with only server side encryption enabled' do
+ let(:storage_options) { { server_side_encryption: 'AES256' } }
+
+ it { expect(subject.server_side_encryption).to eq('AES256') }
+ it { expect(subject.server_side_encryption_kms_key_id).to be_nil }
+ it { expect(subject.fog_attributes).to eq({ 'x-amz-server-side-encryption' => 'AES256' }) }
+ end
+
+ context 'without encryption enabled' do
+ let(:storage_options) { {} }
+
+ it { expect(subject.server_side_encryption).to be_nil }
+ it { expect(subject.server_side_encryption_kms_key_id).to be_nil }
+ it { expect(subject.fog_attributes).to eq({}) }
+ end
+
+ context 'with object storage disabled' do
+ before do
+ raw_config['enabled'] = false
+ end
+
+ it { expect(subject.enabled?).to be false }
+ it { expect(subject.fog_attributes).to eq({}) }
+ end
+end
diff --git a/spec/lib/object_storage/direct_upload_spec.rb b/spec/lib/object_storage/direct_upload_spec.rb
index 1c1455e2456..b11926aeb49 100644
--- a/spec/lib/object_storage/direct_upload_spec.rb
+++ b/spec/lib/object_storage/direct_upload_spec.rb
@@ -18,13 +18,25 @@ RSpec.describe ObjectStorage::DirectUpload do
}
end
+ let(:storage_options) { {} }
+ let(:raw_config) do
+ {
+ enabled: true,
+ connection: credentials,
+ remote_directory: bucket_name,
+ storage_options: storage_options,
+ consolidated_settings: consolidated_settings
+ }
+ end
+
+ let(:config) { ObjectStorage::Config.new(raw_config) }
let(:storage_url) { 'https://uploads.s3.amazonaws.com/' }
let(:bucket_name) { 'uploads' }
let(:object_name) { 'tmp/uploads/my-file' }
let(:maximum_size) { 1.gigabyte }
- let(:direct_upload) { described_class.new(credentials, bucket_name, object_name, has_length: has_length, maximum_size: maximum_size, consolidated_settings: consolidated_settings) }
+ let(:direct_upload) { described_class.new(config, object_name, has_length: has_length, maximum_size: maximum_size) }
before do
Fog.unmock!
@@ -62,7 +74,7 @@ RSpec.describe ObjectStorage::DirectUpload do
end
describe '#get_url' do
- subject { described_class.new(credentials, bucket_name, object_name, has_length: true) }
+ subject { described_class.new(config, object_name, has_length: true) }
context 'when AWS is used' do
it 'calls the proper method' do
@@ -93,7 +105,7 @@ RSpec.describe ObjectStorage::DirectUpload do
end
end
- describe '#to_hash' do
+ describe '#to_hash', :aggregate_failures do
subject { direct_upload.to_hash }
shared_examples 'a valid S3 upload' do
@@ -111,6 +123,7 @@ RSpec.describe ObjectStorage::DirectUpload do
expect(s3_config[:Region]).to eq(region)
expect(s3_config[:PathStyle]).to eq(path_style)
expect(s3_config[:UseIamProfile]).to eq(use_iam_profile)
+ expect(s3_config.keys).not_to include(%i(ServerSideEncryption SSEKMSKeyID))
end
context 'when feature flag is disabled' do
@@ -150,6 +163,33 @@ RSpec.describe ObjectStorage::DirectUpload do
expect(subject[:UseWorkhorseClient]).to be true
end
end
+
+ context 'when only server side encryption is used' do
+ let(:storage_options) { { server_side_encryption: 'AES256' } }
+
+ it 'sends server side encryption settings' do
+ s3_config = subject[:ObjectStorage][:S3Config]
+
+ expect(s3_config[:ServerSideEncryption]).to eq('AES256')
+ expect(s3_config.keys).not_to include(:SSEKMSKeyID)
+ end
+ end
+
+ context 'when SSE-KMS is used' do
+ let(:storage_options) do
+ {
+ server_side_encryption: 'AES256',
+ server_side_encryption_kms_key_id: 'arn:aws:12345'
+ }
+ end
+
+ it 'sends server side encryption settings' do
+ s3_config = subject[:ObjectStorage][:S3Config]
+
+ expect(s3_config[:ServerSideEncryption]).to eq('AES256')
+ expect(s3_config[:SSEKMSKeyID]).to eq('arn:aws:12345')
+ end
+ end
end
shared_examples 'a valid Google upload' do
@@ -160,6 +200,21 @@ RSpec.describe ObjectStorage::DirectUpload do
end
end
+ shared_examples 'a valid AzureRM upload' do
+ before do
+ require 'fog/azurerm'
+ end
+
+ it_behaves_like 'a valid upload'
+
+ it 'enables the Workhorse client' do
+ expect(subject[:UseWorkhorseClient]).to be true
+ expect(subject[:RemoteTempObjectID]).to eq(object_name)
+ expect(subject[:ObjectStorage][:Provider]).to eq('AzureRM')
+ expect(subject[:ObjectStorage][:GoCloudConfig]).to eq({ URL: "azblob://#{bucket_name}" })
+ end
+ end
+
shared_examples 'a valid upload' do
it "returns valid structure" do
expect(subject).to have_key(:Timeout)
@@ -330,5 +385,31 @@ RSpec.describe ObjectStorage::DirectUpload do
it_behaves_like 'a valid upload without multipart data'
end
end
+
+ context 'when AzureRM is used' do
+ let(:credentials) do
+ {
+ provider: 'AzureRM',
+ azure_storage_account_name: 'azuretest',
+ azure_storage_access_key: 'ABCD1234'
+ }
+ end
+
+ let(:storage_url) { 'https://azuretest.blob.core.windows.net' }
+
+ context 'when length is known' do
+ let(:has_length) { true }
+
+ it_behaves_like 'a valid AzureRM upload'
+ it_behaves_like 'a valid upload without multipart data'
+ end
+
+ context 'when length is unknown' do
+ let(:has_length) { false }
+
+ it_behaves_like 'a valid AzureRM upload'
+ it_behaves_like 'a valid upload without multipart data'
+ end
+ end
end
end
diff --git a/spec/lib/omni_auth/strategies/jwt_spec.rb b/spec/lib/omni_auth/strategies/jwt_spec.rb
index 0f4528d4fbe..b29e48b0de5 100644
--- a/spec/lib/omni_auth/strategies/jwt_spec.rb
+++ b/spec/lib/omni_auth/strategies/jwt_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe OmniAuth::Strategies::Jwt do
iat: timestamp
}
end
+
let(:algorithm) { 'HS256' }
let(:secret) { jwt_config.strategy.secret }
let(:private_key) { secret }
@@ -61,6 +62,7 @@ RSpec.describe OmniAuth::Strategies::Jwt do
private_key_class.new(jwt_config.strategy.secret)
end
end
+
let(:private_key) { private_key_class ? private_key_class.new(secret) : secret }
it 'decodes the user information' do
diff --git a/spec/lib/product_analytics/event_params_spec.rb b/spec/lib/product_analytics/event_params_spec.rb
index d6c098599d6..e560fd10dfd 100644
--- a/spec/lib/product_analytics/event_params_spec.rb
+++ b/spec/lib/product_analytics/event_params_spec.rb
@@ -23,7 +23,12 @@ RSpec.describe ProductAnalytics::EventParams do
br_lang: 'en-US',
br_cookies: true,
os_timezone: 'America/Los_Angeles',
- doc_charset: 'UTF-8'
+ doc_charset: 'UTF-8',
+ se_category: 'category',
+ se_action: 'action',
+ se_label: 'label',
+ se_property: 'property',
+ se_value: 12.34
}
expect(subject).to include(expected_params)
diff --git a/spec/lib/product_analytics/tracker_spec.rb b/spec/lib/product_analytics/tracker_spec.rb
new file mode 100644
index 00000000000..d5e85e6e1cd
--- /dev/null
+++ b/spec/lib/product_analytics/tracker_spec.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe ProductAnalytics::Tracker do
+ it { expect(described_class::URL).to eq('http://localhost/-/sp.js') }
+ it { expect(described_class::COLLECTOR_URL).to eq('localhost/-/collector') }
+end
diff --git a/spec/lib/rspec_flaky/example_spec.rb b/spec/lib/rspec_flaky/example_spec.rb
index aaf5ddc6f74..4b45a15c463 100644
--- a/spec/lib/rspec_flaky/example_spec.rb
+++ b/spec/lib/rspec_flaky/example_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe RspecFlaky::Example do
attempts: 1
}
end
+
let(:rspec_example) { double(example_attrs) }
describe '#initialize' do
diff --git a/spec/lib/rspec_flaky/flaky_example_spec.rb b/spec/lib/rspec_flaky/flaky_example_spec.rb
index 8ac323475d6..b1647d5830a 100644
--- a/spec/lib/rspec_flaky/flaky_example_spec.rb
+++ b/spec/lib/rspec_flaky/flaky_example_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe RspecFlaky::FlakyExample, :aggregate_failures do
flaky_reports: 1
}
end
+
let(:example_attrs) do
{
uid: 'abc123',
@@ -28,6 +29,7 @@ RSpec.describe RspecFlaky::FlakyExample, :aggregate_failures do
attempts: flaky_example_attrs[:last_attempts_count]
}
end
+
let(:example) { double(example_attrs) }
before do
diff --git a/spec/lib/rspec_flaky/flaky_examples_collection_spec.rb b/spec/lib/rspec_flaky/flaky_examples_collection_spec.rb
index 5718d8211af..b2fd1d3733a 100644
--- a/spec/lib/rspec_flaky/flaky_examples_collection_spec.rb
+++ b/spec/lib/rspec_flaky/flaky_examples_collection_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe RspecFlaky::FlakyExamplesCollection, :aggregate_failures do
b: { example_id: 'spec/foo/baz_spec.rb:3' }
}
end
+
let(:collection_report) do
{
a: {
diff --git a/spec/lib/rspec_flaky/listener_spec.rb b/spec/lib/rspec_flaky/listener_spec.rb
index 2438ae171d3..10ed724d4de 100644
--- a/spec/lib/rspec_flaky/listener_spec.rb
+++ b/spec/lib/rspec_flaky/listener_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe RspecFlaky::Listener, :aggregate_failures do
}
}
end
+
let(:already_flaky_example_attrs) do
{
id: 'spec/foo/bar_spec.rb:2',
@@ -30,6 +31,7 @@ RSpec.describe RspecFlaky::Listener, :aggregate_failures do
execution_result: double(status: 'passed', exception: nil)
}
end
+
let(:already_flaky_example) { RspecFlaky::FlakyExample.new(suite_flaky_example_report[already_flaky_example_uid]) }
let(:new_example_attrs) do
{
diff --git a/spec/lib/rspec_flaky/report_spec.rb b/spec/lib/rspec_flaky/report_spec.rb
index e735329a8a3..5cacfdb82fb 100644
--- a/spec/lib/rspec_flaky/report_spec.rb
+++ b/spec/lib/rspec_flaky/report_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe RspecFlaky::Report, :aggregate_failures do
b: { example_id: 'spec/foo/baz_spec.rb:3', first_flaky_at: (Time.now - thirty_one_days).to_s, last_flaky_at: (Time.now - thirty_one_days).to_s }
}
end
+
let(:suite_flaky_example_report) do
{
'6e869794f4cfd2badd93eb68719371d1': {
@@ -25,6 +26,7 @@ RSpec.describe RspecFlaky::Report, :aggregate_failures do
}
}
end
+
let(:flaky_examples) { RspecFlaky::FlakyExamplesCollection.new(collection_hash) }
let(:report) { described_class.new(flaky_examples) }
diff --git a/spec/lib/sentry/client/event_spec.rb b/spec/lib/sentry/client/event_spec.rb
index af1e28d09bb..07ed331c44c 100644
--- a/spec/lib/sentry/client/event_spec.rb
+++ b/spec/lib/sentry/client/event_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe Sentry::Client do
headers: { "Authorization" => "Bearer test-token" }
}
end
+
let(:client) { described_class.new(sentry_url, token) }
describe '#issue_latest_event' do
@@ -21,6 +22,7 @@ RSpec.describe Sentry::Client do
Gitlab::Json.parse(fixture_file('sentry/issue_latest_event_sample_response.json'))
)
end
+
let(:issue_id) { '1234' }
let(:sentry_api_response) { sample_response }
let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0' }
diff --git a/spec/lib/sentry/client/issue_spec.rb b/spec/lib/sentry/client/issue_spec.rb
index a6279aeadd2..dedef905c95 100644
--- a/spec/lib/sentry/client/issue_spec.rb
+++ b/spec/lib/sentry/client/issue_spec.rb
@@ -65,6 +65,7 @@ RSpec.describe Sentry::Client::Issue do
link: '<https://sentrytest.gitlab.com>; rel="previous"; results="true"; cursor="1573556671000:0:1", <https://sentrytest.gitlab.com>; rel="next"; results="true"; cursor="1572959139000:0:0"'
}
end
+
let!(:sentry_api_request) { stub_sentry_request(sentry_request_url, body: sentry_api_response, headers: headers) }
it 'parses the pagination' do
@@ -114,6 +115,7 @@ RSpec.describe Sentry::Client::Issue do
'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project/' \
'issues/?limit=20&query=is:unresolved&sort=freq'
end
+
let!(:sentry_api_request) { stub_sentry_request(sentry_request_url, body: sentry_api_response) }
subject { client.list_issues(issue_status: issue_status, limit: limit, sort: 'frequency') }
diff --git a/spec/mailers/emails/profile_spec.rb b/spec/mailers/emails/profile_spec.rb
index ee91df360b6..fbbdef5feee 100644
--- a/spec/mailers/emails/profile_spec.rb
+++ b/spec/mailers/emails/profile_spec.rb
@@ -157,6 +157,56 @@ RSpec.describe Emails::Profile do
end
end
+ describe 'user personal access token has expired' do
+ let_it_be(:user) { create(:user) }
+
+ context 'when valid' do
+ subject { Notify.access_token_expired_email(user) }
+
+ it_behaves_like 'an email sent from GitLab'
+ it_behaves_like 'it should not have Gmail Actions links'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
+
+ it 'is sent to the user' do
+ is_expected.to deliver_to user.email
+ end
+
+ it 'has the correct subject' do
+ is_expected.to have_subject /Your personal access token has expired/
+ end
+
+ it 'mentions the access token has expired' do
+ is_expected.to have_body_text /One or more of your personal access tokens has expired/
+ end
+
+ it 'includes a link to personal access tokens page' do
+ is_expected.to have_body_text /#{profile_personal_access_tokens_path}/
+ end
+
+ it 'includes the email reason' do
+ is_expected.to have_body_text /You're receiving this email because of your account on localhost/
+ end
+ end
+
+ context 'when invalid' do
+ context 'when user does not exist' do
+ it do
+ expect { Notify.access_token_expired_email(nil) }.not_to change { ActionMailer::Base.deliveries.count }
+ end
+ end
+
+ context 'when user is not active' do
+ before do
+ user.block!
+ end
+
+ it do
+ expect { Notify.access_token_expired_email(user) }.not_to change { ActionMailer::Base.deliveries.count }
+ end
+ end
+ end
+ end
+
describe 'user unknown sign in email' do
let_it_be(:user) { create(:user) }
let_it_be(:ip) { '169.0.0.1' }
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index 9dde80f58d5..7bd1fae8f91 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -1373,6 +1373,7 @@ RSpec.describe Notify do
group.request_access(user)
group.requesters.find_by(user_id: user.id)
end
+
let(:recipient) { user }
subject { described_class.member_access_denied_email('group', group.id, user.id) }
diff --git a/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb b/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb
index 750d3d1fa30..6c957ee1428 100644
--- a/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb
+++ b/spec/migrations/20190924152703_migrate_issue_trackers_data_spec.rb
@@ -13,27 +13,35 @@ RSpec.describe MigrateIssueTrackersData do
'url' => 'http://example.com'
}
end
+
let!(:jira_service) do
services.create(type: 'JiraService', properties: properties, category: 'issue_tracker')
end
+
let!(:jira_service_nil) do
services.create(type: 'JiraService', properties: nil, category: 'issue_tracker')
end
+
let!(:bugzilla_service) do
services.create(type: 'BugzillaService', properties: properties, category: 'issue_tracker')
end
+
let!(:youtrack_service) do
services.create(type: 'YoutrackService', properties: properties, category: 'issue_tracker')
end
+
let!(:youtrack_service_empty) do
services.create(type: 'YoutrackService', properties: '', category: 'issue_tracker')
end
+
let!(:gitlab_service) do
services.create(type: 'GitlabIssueTrackerService', properties: properties, category: 'issue_tracker')
end
+
let!(:gitlab_service_empty) do
services.create(type: 'GitlabIssueTrackerService', properties: {}, category: 'issue_tracker')
end
+
let!(:other_service) do
services.create(type: 'OtherService', properties: properties, category: 'other_category')
end
diff --git a/spec/migrations/20200123155929_remove_invalid_jira_data_spec.rb b/spec/migrations/20200123155929_remove_invalid_jira_data_spec.rb
index cae8e858af9..e69a30752db 100644
--- a/spec/migrations/20200123155929_remove_invalid_jira_data_spec.rb
+++ b/spec/migrations/20200123155929_remove_invalid_jira_data_spec.rb
@@ -28,32 +28,39 @@ RSpec.describe RemoveInvalidJiraData do
data[:encrypted_api_url_iv] = nil
jira_tracker_data.create(data)
end
+
let!(:missing_api_url) do
data[:encrypted_api_url] = ''
data[:encrypted_api_url_iv] = nil
jira_tracker_data.create(data)
end
+
let!(:invalid_url) do
data[:encrypted_url_iv] = nil
jira_tracker_data.create(data)
end
+
let!(:missing_url) do
data[:encrypted_url] = ''
jira_tracker_data.create(data)
end
+
let!(:invalid_username) do
data[:encrypted_username_iv] = nil
jira_tracker_data.create(data)
end
+
let!(:missing_username) do
data[:encrypted_username] = nil
data[:encrypted_username_iv] = nil
jira_tracker_data.create(data)
end
+
let!(:invalid_password) do
data[:encrypted_password_iv] = nil
jira_tracker_data.create(data)
end
+
let!(:missing_password) do
data[:encrypted_password] = nil
data[:encrypted_username_iv] = nil
diff --git a/spec/migrations/20200127090233_remove_invalid_issue_tracker_data_spec.rb b/spec/migrations/20200127090233_remove_invalid_issue_tracker_data_spec.rb
index a86b5d83a0b..e7917cf5d72 100644
--- a/spec/migrations/20200127090233_remove_invalid_issue_tracker_data_spec.rb
+++ b/spec/migrations/20200127090233_remove_invalid_issue_tracker_data_spec.rb
@@ -26,23 +26,28 @@ RSpec.describe RemoveInvalidIssueTrackerData do
data[:encrypted_issues_url_iv] = nil
issue_tracker_data.create(data)
end
+
let!(:missing_issues_url) do
data[:encrypted_issues_url] = ''
data[:encrypted_issues_url_iv] = nil
issue_tracker_data.create(data)
end
+
let!(:invalid_new_isue_url) do
data[:encrypted_new_issue_url_iv] = nil
issue_tracker_data.create(data)
end
+
let!(:missing_new_issue_url) do
data[:encrypted_new_issue_url] = ''
issue_tracker_data.create(data)
end
+
let!(:invalid_project_url) do
data[:encrypted_project_url_iv] = nil
issue_tracker_data.create(data)
end
+
let!(:missing_project_url) do
data[:encrypted_project_url] = nil
data[:encrypted_project_url_iv] = nil
diff --git a/spec/migrations/20200130145430_reschedule_migrate_issue_trackers_data_spec.rb b/spec/migrations/20200130145430_reschedule_migrate_issue_trackers_data_spec.rb
index 146302ba650..d5208976928 100644
--- a/spec/migrations/20200130145430_reschedule_migrate_issue_trackers_data_spec.rb
+++ b/spec/migrations/20200130145430_reschedule_migrate_issue_trackers_data_spec.rb
@@ -13,27 +13,35 @@ RSpec.describe RescheduleMigrateIssueTrackersData do
'url' => 'http://example.com'
}
end
+
let!(:jira_service) do
services.create(id: 10, type: 'JiraService', properties: properties, category: 'issue_tracker')
end
+
let!(:jira_service_nil) do
services.create(id: 11, type: 'JiraService', properties: nil, category: 'issue_tracker')
end
+
let!(:bugzilla_service) do
services.create(id: 12, type: 'BugzillaService', properties: properties, category: 'issue_tracker')
end
+
let!(:youtrack_service) do
services.create(id: 13, type: 'YoutrackService', properties: properties, category: 'issue_tracker')
end
+
let!(:youtrack_service_empty) do
services.create(id: 14, type: 'YoutrackService', properties: '', category: 'issue_tracker')
end
+
let!(:gitlab_service) do
services.create(id: 15, type: 'GitlabIssueTrackerService', properties: properties, category: 'issue_tracker')
end
+
let!(:gitlab_service_empty) do
services.create(id: 16, type: 'GitlabIssueTrackerService', properties: {}, category: 'issue_tracker')
end
+
let!(:other_service) do
services.create(id: 17, type: 'OtherService', properties: properties, category: 'other_category')
end
@@ -67,6 +75,7 @@ RSpec.describe RescheduleMigrateIssueTrackersData do
encrypted_issues_url_iv: 'somevalue'
)
end
+
let!(:invalid_issue_tracker_data) do
issue_tracker_data.create(
service_id: bugzilla_service.id,
@@ -74,6 +83,7 @@ RSpec.describe RescheduleMigrateIssueTrackersData do
encrypted_issues_url_iv: nil
)
end
+
let!(:valid_jira_tracker_data) do
jira_tracker_data.create(
service_id: bugzilla_service.id,
@@ -81,6 +91,7 @@ RSpec.describe RescheduleMigrateIssueTrackersData do
encrypted_url_iv: 'somevalue'
)
end
+
let!(:invalid_jira_tracker_data) do
jira_tracker_data.create(
service_id: bugzilla_service.id,
diff --git a/spec/migrations/20200728080250_replace_unique_index_on_cycle_analytics_stages_spec.rb b/spec/migrations/20200728080250_replace_unique_index_on_cycle_analytics_stages_spec.rb
new file mode 100644
index 00000000000..f9a56bf649d
--- /dev/null
+++ b/spec/migrations/20200728080250_replace_unique_index_on_cycle_analytics_stages_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20200728080250_replace_unique_index_on_cycle_analytics_stages.rb')
+
+RSpec.describe ReplaceUniqueIndexOnCycleAnalyticsStages, :migration, schema: 20200728080250 do
+ let(:namespaces) { table(:namespaces) }
+ let(:group_value_streams) { table(:analytics_cycle_analytics_group_value_streams) }
+ let(:group_stages) { table(:analytics_cycle_analytics_group_stages) }
+
+ let(:group) { namespaces.create!(type: 'Group', name: 'test', path: 'test') }
+
+ let(:value_stream_1) { group_value_streams.create!(group_id: group.id, name: 'vs1') }
+ let(:value_stream_2) { group_value_streams.create!(group_id: group.id, name: 'vs2') }
+
+ let(:duplicated_stage_1) { group_stages.create!(group_id: group.id, group_value_stream_id: value_stream_1.id, name: 'stage', start_event_identifier: 1, end_event_identifier: 1) }
+ let(:duplicated_stage_2) { group_stages.create!(group_id: group.id, group_value_stream_id: value_stream_2.id, name: 'stage', start_event_identifier: 1, end_event_identifier: 1) }
+
+ let(:stage_record) { group_stages.create!(group_id: group.id, group_value_stream_id: value_stream_2.id, name: 'other stage', start_event_identifier: 1, end_event_identifier: 1) }
+
+ describe '#down' do
+ subject { described_class.new.down }
+
+ before do
+ described_class.new.up
+
+ duplicated_stage_1
+ duplicated_stage_2
+ stage_record
+ end
+
+ it 'removes duplicated stage records' do
+ subject
+
+ stage = group_stages.find_by_id(duplicated_stage_2.id)
+ expect(stage).to be_nil
+ end
+
+ it 'does not change the first duplicated stage record' do
+ expect { subject }.not_to change { duplicated_stage_1.reload.attributes }
+ end
+
+ it 'does not change not duplicated stage record' do
+ expect { subject }.not_to change { stage_record.reload.attributes }
+ end
+ end
+end
diff --git a/spec/migrations/migrate_all_merge_request_user_mentions_to_db_spec.rb b/spec/migrations/migrate_all_merge_request_user_mentions_to_db_spec.rb
new file mode 100644
index 00000000000..f7789469efb
--- /dev/null
+++ b/spec/migrations/migrate_all_merge_request_user_mentions_to_db_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200601120434_migrate_all_merge_request_user_mentions_to_db')
+
+RSpec.describe MigrateAllMergeRequestUserMentionsToDb, :migration do
+ let(:users) { table(:users) }
+ let(:projects) { table(:projects) }
+ let(:namespaces) { table(:namespaces) }
+ let(:merge_requests) { table(:merge_requests) }
+ let(:merge_request_user_mentions) { table(:merge_request_user_mentions) }
+
+ let(:user) { users.create!(name: 'root', email: 'root@example.com', username: 'root', projects_limit: 0) }
+ let(:group) { namespaces.create!(name: 'group1', path: 'group1', owner_id: user.id, type: 'Group') }
+ let(:project) { projects.create!(name: 'gitlab1', path: 'gitlab1', namespace_id: group.id, visibility_level: 0) }
+
+ let(:opened_state) { 1 }
+ let(:closed_state) { 2 }
+ let(:merged_state) { 3 }
+
+ # migrateable resources
+ let(:common_args) { { source_branch: 'master', source_project_id: project.id, target_project_id: project.id, author_id: user.id, description: 'mr description with @root mention' } }
+ let!(:resource1) { merge_requests.create!(common_args.merge(title: "title 1", state_id: opened_state, target_branch: 'feature1')) }
+ let!(:resource2) { merge_requests.create!(common_args.merge(title: "title 2", state_id: closed_state, target_branch: 'feature2')) }
+ let!(:resource3) { merge_requests.create!(common_args.merge(title: "title 3", state_id: merged_state, target_branch: 'feature3')) }
+
+ # non-migrateable resources
+ # this merge request is already migrated, as it has a record in the merge_request_user_mentions table
+ let!(:resource4) { merge_requests.create!(common_args.merge(title: "title 3", state_id: opened_state, target_branch: 'feature4')) }
+ let!(:user_mention) { merge_request_user_mentions.create!(merge_request_id: resource4.id, mentioned_users_ids: [1]) }
+
+ let!(:resource5) { merge_requests.create!(common_args.merge(title: "title 3", description: 'description with no mention', state_id: opened_state, target_branch: 'feature5')) }
+
+ it_behaves_like 'schedules resource mentions migration', MergeRequest, false
+end
diff --git a/spec/migrations/migrate_incident_issues_to_incident_type_spec.rb b/spec/migrations/migrate_incident_issues_to_incident_type_spec.rb
new file mode 100644
index 00000000000..dc38695c7fe
--- /dev/null
+++ b/spec/migrations/migrate_incident_issues_to_incident_type_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200723040950_migrate_incident_issues_to_incident_type.rb')
+
+RSpec.describe MigrateIncidentIssuesToIncidentType do
+ let(:migration) { described_class.new }
+
+ let(:projects) { table(:projects) }
+ let(:namespaces) { table(:namespaces) }
+ let(:labels) { table(:labels) }
+ let(:issues) { table(:issues) }
+ let(:label_links) { table(:label_links) }
+ let(:label_props) { IncidentManagement::CreateIncidentLabelService::LABEL_PROPERTIES }
+
+ let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
+ let!(:project) { projects.create!(namespace_id: namespace.id) }
+ let(:label) { labels.create!(project_id: project.id, **label_props) }
+ let!(:incident_issue) { issues.create!(project_id: project.id) }
+ let!(:other_issue) { issues.create!(project_id: project.id) }
+
+ # Issue issue_type enum
+ let(:issue_type) { 0 }
+ let(:incident_type) { 1 }
+
+ before do
+ label_links.create!(target_id: incident_issue.id, label_id: label.id, target_type: 'Issue')
+ end
+
+ describe '#up' do
+ it 'updates the incident issue type' do
+ expect { migrate! }
+ .to change { incident_issue.reload.issue_type }
+ .from(issue_type)
+ .to(incident_type)
+
+ expect(other_issue.reload.issue_type).to eql(issue_type)
+ end
+ end
+
+ describe '#down' do
+ let!(:incident_issue) { issues.create!(project_id: project.id, issue_type: issue_type) }
+
+ it 'updates the incident issue type' do
+ migration.up
+
+ expect { migration.down }
+ .to change { incident_issue.reload.issue_type }
+ .from(incident_type)
+ .to(issue_type)
+
+ expect(other_issue.reload.issue_type).to eql(issue_type)
+ end
+ end
+end
diff --git a/spec/migrations/migrate_k8s_service_integration_spec.rb b/spec/migrations/migrate_k8s_service_integration_spec.rb
index a1b2f2ae841..66e30b29f61 100644
--- a/spec/migrations/migrate_k8s_service_integration_spec.rb
+++ b/spec/migrations/migrate_k8s_service_integration_spec.rb
@@ -125,6 +125,7 @@ RSpec.describe MigrateK8sServiceIntegration do
platform_type: :kubernetes
)
end
+
let(:new_cluster) { MigrateK8sServiceIntegration::Cluster.instance_type.last! }
let(:platform) { new_cluster.platform_kubernetes }
diff --git a/spec/migrations/remove_duplicate_labels_from_project_spec.rb b/spec/migrations/remove_duplicate_labels_from_project_spec.rb
index 5e287eaa4ed..5eb8ba96aae 100644
--- a/spec/migrations/remove_duplicate_labels_from_project_spec.rb
+++ b/spec/migrations/remove_duplicate_labels_from_project_spec.rb
@@ -27,6 +27,7 @@ RSpec.describe RemoveDuplicateLabelsFromProject do
projects_table.create!(id: 1, name: 'project', path: 'project',
visibility_level: 0, namespace_id: group_one.id)
end
+
let(:label_title) { 'bug' }
let(:label_color) { 'red' }
let(:label_description) { 'nice label' }
diff --git a/spec/migrations/remove_orphaned_invited_members_spec.rb b/spec/migrations/remove_orphaned_invited_members_spec.rb
index 0474b5362be..10da63518c2 100644
--- a/spec/migrations/remove_orphaned_invited_members_spec.rb
+++ b/spec/migrations/remove_orphaned_invited_members_spec.rb
@@ -22,6 +22,7 @@ RSpec.describe RemoveOrphanedInvitedMembers do
invite_token: SecureRandom.hex, invite_accepted_at: Time.now,
access_level: 20)
end
+
let!(:invited_member2) do
create_member(user_id: nil, source_type: 'Group', source_id: group.id,
invite_token: SecureRandom.hex, invite_accepted_at: Time.now,
@@ -32,6 +33,7 @@ RSpec.describe RemoveOrphanedInvitedMembers do
create_member(user_id: nil, source_type: 'Project', source_id: project.id,
invite_accepted_at: Time.now, access_level: 30)
end
+
let!(:orphaned_member2) do
create_member(user_id: nil, source_type: 'Group', source_id: group.id,
invite_accepted_at: Time.now, access_level: 20)
diff --git a/spec/migrations/schedule_populate_personal_snippet_statistics_spec.rb b/spec/migrations/schedule_populate_personal_snippet_statistics_spec.rb
new file mode 100644
index 00000000000..ce618449884
--- /dev/null
+++ b/spec/migrations/schedule_populate_personal_snippet_statistics_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require Rails.root.join('db', 'post_migrate', '20200714075739_schedule_populate_personal_snippet_statistics.rb')
+
+RSpec.describe SchedulePopulatePersonalSnippetStatistics do
+ let(:users) { table(:users) }
+ let(:snippets) { table(:snippets) }
+ let(:projects) { table(:projects) }
+ let(:user1) { users.create!(id: 1, email: 'user1@example.com', projects_limit: 10, username: 'test1', name: 'Test1', state: 'active') }
+ let(:user2) { users.create!(id: 2, email: 'user2@example.com', projects_limit: 10, username: 'test2', name: 'Test2', state: 'active') }
+ let(:user3) { users.create!(id: 3, email: 'user3@example.com', projects_limit: 10, username: 'test3', name: 'Test3', state: 'active') }
+
+ def create_snippet(id, user_id, type = 'PersonalSnippet')
+ params = {
+ id: id,
+ type: type,
+ author_id: user_id,
+ file_name: 'foo',
+ content: 'bar'
+ }
+
+ snippets.create!(params)
+ end
+
+ it 'correctly schedules background migrations' do
+ # Creating the snippets in different order
+ create_snippet(1, user1.id)
+ create_snippet(2, user2.id)
+ create_snippet(3, user1.id)
+ create_snippet(4, user3.id)
+ create_snippet(5, user3.id)
+ create_snippet(6, user1.id)
+ # Creating a project snippet to ensure we don't pick it
+ create_snippet(7, user1.id, 'ProjectSnippet')
+
+ stub_const("#{described_class}::BATCH_SIZE", 4)
+
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ aggregate_failures do
+ expect(described_class::MIGRATION)
+ .to be_scheduled_migration([1, 3, 6, 2])
+
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(2.minutes, [4, 5])
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/ability_spec.rb b/spec/models/ability_spec.rb
index 9206f14fd37..3418d7d39ad 100644
--- a/spec/models/ability_spec.rb
+++ b/spec/models/ability_spec.rb
@@ -194,6 +194,7 @@ RSpec.describe Ability do
let(:cross_project_merge_request) do
create(:merge_request, source_project: create(:project, :public))
end
+
let(:other_merge_request) { create(:merge_request) }
let(:all_merge_requests) do
[merge_request, cross_project_merge_request, other_merge_request]
diff --git a/spec/models/alert_management/alert_spec.rb b/spec/models/alert_management/alert_spec.rb
index becc5475c15..f937a879400 100644
--- a/spec/models/alert_management/alert_spec.rb
+++ b/spec/models/alert_management/alert_spec.rb
@@ -3,6 +3,13 @@
require 'spec_helper'
RSpec.describe AlertManagement::Alert do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:project2) { create(:project) }
+ let_it_be(:triggered_alert, reload: true) { create(:alert_management_alert, :triggered, project: project) }
+ let_it_be(:acknowledged_alert, reload: true) { create(:alert_management_alert, :acknowledged, project: project) }
+ let_it_be(:resolved_alert, reload: true) { create(:alert_management_alert, :resolved, project: project2) }
+ let_it_be(:ignored_alert, reload: true) { create(:alert_management_alert, :ignored, project: project2) }
+
describe 'associations' do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:issue).optional }
@@ -27,63 +34,70 @@ RSpec.describe AlertManagement::Alert do
it { is_expected.to validate_length_of(:monitoring_tool).is_at_most(100) }
context 'when status is triggered' do
- context 'when ended_at is blank' do
- subject { build(:alert_management_alert) }
+ subject { triggered_alert }
+ context 'when ended_at is blank' do
it { is_expected.to be_valid }
end
context 'when ended_at is present' do
- subject { build(:alert_management_alert, ended_at: Time.current) }
+ before do
+ triggered_alert.ended_at = Time.current
+ end
it { is_expected.to be_invalid }
end
end
context 'when status is acknowledged' do
- context 'when ended_at is blank' do
- subject { build(:alert_management_alert, :acknowledged) }
+ subject { acknowledged_alert }
+ context 'when ended_at is blank' do
it { is_expected.to be_valid }
end
context 'when ended_at is present' do
- subject { build(:alert_management_alert, :acknowledged, ended_at: Time.current) }
+ before do
+ acknowledged_alert.ended_at = Time.current
+ end
it { is_expected.to be_invalid }
end
end
context 'when status is resolved' do
+ subject { resolved_alert }
+
context 'when ended_at is blank' do
- subject { build(:alert_management_alert, :resolved, ended_at: nil) }
+ before do
+ resolved_alert.ended_at = nil
+ end
it { is_expected.to be_invalid }
end
context 'when ended_at is present' do
- subject { build(:alert_management_alert, :resolved, ended_at: Time.current) }
-
it { is_expected.to be_valid }
end
end
context 'when status is ignored' do
- context 'when ended_at is blank' do
- subject { build(:alert_management_alert, :ignored) }
+ subject { ignored_alert }
+ context 'when ended_at is blank' do
it { is_expected.to be_valid }
end
context 'when ended_at is present' do
- subject { build(:alert_management_alert, :ignored, ended_at: Time.current) }
+ before do
+ ignored_alert.ended_at = Time.current
+ end
it { is_expected.to be_invalid }
end
end
describe 'fingerprint' do
- let_it_be(:project) { create(:project) }
let_it_be(:fingerprint) { 'fingerprint' }
let(:new_alert) { build(:alert_management_alert, fingerprint: fingerprint, project: project) }
@@ -93,6 +107,8 @@ RSpec.describe AlertManagement::Alert do
context 'same project, various states' do
using RSpec::Parameterized::TableSyntax
+ let_it_be(:existing_alert) { create(:alert_management_alert, fingerprint: fingerprint, project: project) }
+
# We are only validating uniqueness for non-resolved alerts
where(:existing_status, :new_status, :valid) do
:resolved | :triggered | true
@@ -114,9 +130,12 @@ RSpec.describe AlertManagement::Alert do
end
with_them do
- let!(:existing_alert) { create(:alert_management_alert, existing_status, fingerprint: fingerprint, project: project) }
let(:new_alert) { build(:alert_management_alert, new_status, fingerprint: fingerprint, project: project) }
+ before do
+ existing_alert.public_send(described_class::STATUS_EVENTS[existing_status])
+ end
+
if params[:valid]
it { is_expected.to be_valid }
else
@@ -126,7 +145,7 @@ RSpec.describe AlertManagement::Alert do
end
context 'different project' do
- let!(:existing_alert) { create(:alert_management_alert, fingerprint: fingerprint) }
+ let_it_be(:existing_alert) { create(:alert_management_alert, fingerprint: fingerprint, project: project2) }
it { is_expected.to be_valid }
end
@@ -134,7 +153,11 @@ RSpec.describe AlertManagement::Alert do
end
describe 'hosts' do
- subject(:alert) { build(:alert_management_alert, hosts: hosts) }
+ subject(:alert) { triggered_alert }
+
+ before do
+ triggered_alert.hosts = hosts
+ end
context 'over 255 total chars' do
let(:hosts) { ['111.111.111.111'] * 18 }
@@ -159,13 +182,8 @@ RSpec.describe AlertManagement::Alert do
end
describe 'scopes' do
- let_it_be(:project) { create(:project) }
- let_it_be(:triggered_alert) { create(:alert_management_alert, project: project) }
- let_it_be(:resolved_alert) { create(:alert_management_alert, :resolved, project: project) }
- let_it_be(:ignored_alert) { create(:alert_management_alert, :ignored, project: project) }
-
describe '.for_iid' do
- subject { AlertManagement::Alert.for_iid(triggered_alert.iid) }
+ subject { project.alert_management_alerts.for_iid(triggered_alert.iid) }
it { is_expected.to match_array(triggered_alert) }
end
@@ -185,30 +203,51 @@ RSpec.describe AlertManagement::Alert do
end
describe '.for_fingerprint' do
- let_it_be(:fingerprint) { SecureRandom.hex }
- let_it_be(:alert_with_fingerprint) { create(:alert_management_alert, project: project, fingerprint: fingerprint) }
- let_it_be(:unrelated_alert_with_finger_print) { create(:alert_management_alert, fingerprint: fingerprint) }
+ let(:fingerprint) { SecureRandom.hex }
+ let(:alert_with_fingerprint) { triggered_alert }
+ let(:unrelated_alert_with_finger_print) { resolved_alert }
subject { described_class.for_fingerprint(project, fingerprint) }
+ before do
+ alert_with_fingerprint.update!(fingerprint: fingerprint)
+ unrelated_alert_with_finger_print.update!(fingerprint: fingerprint)
+ end
+
it { is_expected.to contain_exactly(alert_with_fingerprint) }
end
describe '.for_environment' do
let(:environment) { create(:environment, project: project) }
- let!(:env_alert) { create(:alert_management_alert, project: project, environment: environment) }
+ let(:env_alert) { triggered_alert }
subject { described_class.for_environment(environment) }
+ before do
+ triggered_alert.update!(environment: environment)
+ end
+
it { is_expected.to match_array(env_alert) }
end
+ describe '.order_severity_with_open_prometheus_alert' do
+ subject { described_class.where(project: alert_project).order_severity_with_open_prometheus_alert }
+
+ let_it_be(:alert_project) { create(:project) }
+ let_it_be(:resolved_critical_alert) { create(:alert_management_alert, :resolved, :critical, project: alert_project) }
+ let_it_be(:triggered_critical_alert) { create(:alert_management_alert, :triggered, :critical, project: alert_project) }
+ let_it_be(:triggered_high_alert) { create(:alert_management_alert, :triggered, :high, project: alert_project) }
+
+ it { is_expected.to eq([triggered_critical_alert, triggered_high_alert]) }
+ end
+
describe '.counts_by_status' do
subject { described_class.counts_by_status }
it do
is_expected.to eq(
triggered_alert.status => 1,
+ acknowledged_alert.status => 1,
resolved_alert.status => 1,
ignored_alert.status => 1
)
@@ -218,12 +257,10 @@ RSpec.describe AlertManagement::Alert do
describe '.counts_by_project_id' do
subject { described_class.counts_by_project_id }
- let!(:alert_other_project) { create(:alert_management_alert) }
-
it do
is_expected.to eq(
- project.id => 3,
- alert_other_project.project.id => 1
+ project.id => 2,
+ project2.id => 2
)
end
end
@@ -231,16 +268,12 @@ RSpec.describe AlertManagement::Alert do
describe '.open' do
subject { described_class.open }
- let!(:acknowledged_alert) { create(:alert_management_alert, :acknowledged, project: project)}
-
it { is_expected.to contain_exactly(acknowledged_alert, triggered_alert) }
end
describe '.not_resolved' do
subject { described_class.not_resolved }
- let!(:acknowledged_alert) { create(:alert_management_alert, :acknowledged, project: project) }
-
it { is_expected.to contain_exactly(acknowledged_alert, triggered_alert, ignored_alert) }
end
end
@@ -248,27 +281,22 @@ RSpec.describe AlertManagement::Alert do
describe '.last_prometheus_alert_by_project_id' do
subject { described_class.last_prometheus_alert_by_project_id }
- let(:project_1) { create(:project) }
- let!(:alert_1) { create(:alert_management_alert, project: project_1) }
- let!(:alert_2) { create(:alert_management_alert, project: project_1) }
+ let!(:p1_alert_1) { triggered_alert }
+ let!(:p1_alert_2) { acknowledged_alert }
- let(:project_2) { create(:project) }
- let!(:alert_3) { create(:alert_management_alert, project: project_2) }
- let!(:alert_4) { create(:alert_management_alert, project: project_2) }
+ let!(:p2_alert_1) { resolved_alert }
+ let!(:p2_alert_2) { ignored_alert }
it 'returns the latest alert for each project' do
- expect(subject).to contain_exactly(alert_2, alert_4)
+ expect(subject).to contain_exactly(p1_alert_2, p2_alert_2)
end
end
describe '.search' do
- let_it_be(:alert) do
- create(:alert_management_alert,
- title: 'Title',
- description: 'Desc',
- service: 'Service',
- monitoring_tool: 'Monitor'
- )
+ let(:alert) { triggered_alert }
+
+ before do
+ alert.update!(title: 'Title', description: 'Desc', service: 'Service', monitoring_tool: 'Monitor')
end
subject { AlertManagement::Alert.search(query) }
@@ -318,7 +346,8 @@ RSpec.describe AlertManagement::Alert do
}
}
end
- let(:alert) { build(:alert_management_alert, title: 'Details title', payload: payload) }
+
+ let(:alert) { build(:alert_management_alert, project: project, title: 'Details title', payload: payload) }
subject { alert.details }
@@ -331,16 +360,14 @@ RSpec.describe AlertManagement::Alert do
end
describe '#to_reference' do
- let(:alert) { build(:alert_management_alert) }
-
- it { expect(alert.to_reference).to eq('') }
+ it { expect(triggered_alert.to_reference).to eq('') }
end
describe '#trigger' do
subject { alert.trigger }
context 'when alert is in triggered state' do
- let(:alert) { create(:alert_management_alert) }
+ let(:alert) { triggered_alert }
it 'does not change the alert status' do
expect { subject }.not_to change { alert.reload.status }
@@ -348,7 +375,7 @@ RSpec.describe AlertManagement::Alert do
end
context 'when alert not in triggered state' do
- let(:alert) { create(:alert_management_alert, :resolved) }
+ let(:alert) { resolved_alert }
it 'changes the alert status to triggered' do
expect { subject }.to change { alert.triggered? }.to(true)
@@ -363,7 +390,7 @@ RSpec.describe AlertManagement::Alert do
describe '#acknowledge' do
subject { alert.acknowledge }
- let(:alert) { create(:alert_management_alert, :resolved) }
+ let(:alert) { resolved_alert }
it 'changes the alert status to acknowledged' do
expect { subject }.to change { alert.acknowledged? }.to(true)
@@ -383,15 +410,15 @@ RSpec.describe AlertManagement::Alert do
end
context 'when alert already resolved' do
- let(:alert) { create(:alert_management_alert, :resolved) }
+ let(:alert) { resolved_alert }
it 'does not change the alert status' do
- expect { subject }.not_to change { alert.reload.status }
+ expect { subject }.not_to change { resolved_alert.reload.status }
end
end
context 'when alert is not resolved' do
- let(:alert) { create(:alert_management_alert) }
+ let(:alert) { triggered_alert }
it 'changes alert status to "resolved"' do
expect { subject }.to change { alert.resolved? }.to(true)
@@ -402,7 +429,7 @@ RSpec.describe AlertManagement::Alert do
describe '#ignore' do
subject { alert.ignore }
- let(:alert) { create(:alert_management_alert, :resolved) }
+ let(:alert) { resolved_alert }
it 'changes the alert status to ignored' do
expect { subject }.to change { alert.ignored? }.to(true)
@@ -416,7 +443,7 @@ RSpec.describe AlertManagement::Alert do
describe '#register_new_event!' do
subject { alert.register_new_event! }
- let(:alert) { create(:alert_management_alert) }
+ let(:alert) { triggered_alert }
it 'increments the events count by 1' do
expect { subject }.to change { alert.events }.by(1)
@@ -425,7 +452,7 @@ RSpec.describe AlertManagement::Alert do
describe '#present' do
context 'when alert is generic' do
- let(:alert) { build(:alert_management_alert) }
+ let(:alert) { triggered_alert }
it 'uses generic alert presenter' do
expect(alert.present).to be_kind_of(AlertManagement::AlertPresenter)
@@ -433,7 +460,7 @@ RSpec.describe AlertManagement::Alert do
end
context 'when alert is Prometheus specific' do
- let(:alert) { build(:alert_management_alert, :prometheus) }
+ let(:alert) { build(:alert_management_alert, :prometheus, project: project) }
it 'uses Prometheus Alert presenter' do
expect(alert.present).to be_kind_of(AlertManagement::PrometheusAlertPresenter)
diff --git a/spec/models/analytics/cycle_analytics/project_stage_spec.rb b/spec/models/analytics/cycle_analytics/project_stage_spec.rb
index 2e024011553..4675f037957 100644
--- a/spec/models/analytics/cycle_analytics/project_stage_spec.rb
+++ b/spec/models/analytics/cycle_analytics/project_stage_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe Analytics::CycleAnalytics::ProjectStage do
context 'relative positioning' do
it_behaves_like 'a class that supports relative positioning' do
- let(:project) { build(:project) }
+ let_it_be(:project) { create(:project) }
let(:factory) { :cycle_analytics_project_stage }
let(:default_params) { { project: project } }
end
diff --git a/spec/models/application_record_spec.rb b/spec/models/application_record_spec.rb
index cc314d9077d..d9ab326505b 100644
--- a/spec/models/application_record_spec.rb
+++ b/spec/models/application_record_spec.rb
@@ -38,6 +38,21 @@ RSpec.describe ApplicationRecord do
expect { Suggestion.safe_find_or_create_by(build(:suggestion).attributes) }
.to change { Suggestion.count }.by(1)
end
+
+ it 'passes a block to find_or_create_by' do
+ attributes = build(:suggestion).attributes
+
+ expect do |block|
+ Suggestion.safe_find_or_create_by(attributes, &block)
+ end.to yield_with_args(an_object_having_attributes(attributes))
+ end
+
+ it 'does not create a record when is not valid' do
+ raw_usage_data = RawUsageData.safe_find_or_create_by({ recorded_at: nil })
+
+ expect(raw_usage_data.id).to be_nil
+ expect(raw_usage_data).not_to be_valid
+ end
end
describe '.safe_find_or_create_by!' do
@@ -51,6 +66,14 @@ RSpec.describe ApplicationRecord do
it 'raises a validation error if the record was not persisted' do
expect { Suggestion.find_or_create_by!(note: nil) }.to raise_error(ActiveRecord::RecordInvalid)
end
+
+ it 'passes a block to find_or_create_by' do
+ attributes = build(:suggestion).attributes
+
+ expect do |block|
+ Suggestion.safe_find_or_create_by!(attributes, &block)
+ end.to yield_with_args(an_object_having_attributes(attributes))
+ end
end
describe '.underscore' do
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 5723b0d0729..bcd8eccd68f 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -72,6 +72,7 @@ RSpec.describe ApplicationSetting do
it { is_expected.not_to allow_value(nil).for(:push_event_activities_limit) }
it { is_expected.to validate_numericality_of(:snippet_size_limit).only_integer.is_greater_than(0) }
+ it { is_expected.to validate_numericality_of(:wiki_page_max_content_bytes).only_integer.is_greater_than_or_equal_to(1024) }
it { is_expected.to validate_presence_of(:max_artifacts_size) }
it { is_expected.to validate_numericality_of(:max_artifacts_size).only_integer.is_greater_than(0) }
it { is_expected.to validate_presence_of(:max_pages_size) }
@@ -86,11 +87,6 @@ RSpec.describe ApplicationSetting do
it { is_expected.not_to allow_value('abc').for(:minimum_password_length) }
it { is_expected.to allow_value(10).for(:minimum_password_length) }
- it { is_expected.to allow_value(0).for(:namespace_storage_size_limit) }
- it { is_expected.to allow_value(1).for(:namespace_storage_size_limit) }
- it { is_expected.not_to allow_value(nil).for(:namespace_storage_size_limit) }
- it { is_expected.not_to allow_value(-1).for(:namespace_storage_size_limit) }
-
it { is_expected.to allow_value(300).for(:issues_create_limit) }
it { is_expected.not_to allow_value('three').for(:issues_create_limit) }
it { is_expected.not_to allow_value(nil).for(:issues_create_limit) }
diff --git a/spec/models/audit_event_partitioned_spec.rb b/spec/models/audit_event_partitioned_spec.rb
new file mode 100644
index 00000000000..fe69f0083b7
--- /dev/null
+++ b/spec/models/audit_event_partitioned_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AuditEventPartitioned do
+ let(:source_table) { AuditEvent }
+ let(:partitioned_table) { described_class }
+
+ it 'has the same columns as the source table' do
+ expect(partitioned_table.column_names).to match_array(source_table.column_names)
+ end
+
+ it 'has the same null constraints as the source table' do
+ constraints_from_source_table = null_constraints(source_table)
+ constraints_from_partitioned_table = null_constraints(partitioned_table)
+
+ expect(constraints_from_partitioned_table.to_a).to match_array(constraints_from_source_table.to_a)
+ end
+
+ it 'inserts the same record as the one in the source table', :aggregate_failures do
+ expect { create(:audit_event) }.to change { partitioned_table.count }.by(1)
+
+ event_from_source_table = source_table.connection.select_one(
+ "SELECT * FROM #{source_table.table_name} ORDER BY created_at desc LIMIT 1"
+ )
+ event_from_partitioned_table = partitioned_table.connection.select_one(
+ "SELECT * FROM #{partitioned_table.table_name} ORDER BY created_at desc LIMIT 1"
+ )
+
+ expect(event_from_partitioned_table).to eq(event_from_source_table)
+ end
+
+ def null_constraints(table)
+ table.connection.select_all(<<~SQL)
+ SELECT c.column_name, c.is_nullable
+ FROM information_schema.columns c
+ WHERE c.table_name = '#{table.table_name}'
+ AND c.column_name != 'created_at'
+ SQL
+ end
+end
diff --git a/spec/models/audit_event_spec.rb b/spec/models/audit_event_spec.rb
new file mode 100644
index 00000000000..a1ed48c57f4
--- /dev/null
+++ b/spec/models/audit_event_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AuditEvent do
+ let_it_be(:audit_event) { create(:project_audit_event) }
+ subject { audit_event }
+
+ describe '#as_json' do
+ context 'ip_address' do
+ subject { build(:group_audit_event, ip_address: '192.168.1.1').as_json }
+
+ it 'overrides the ip_address with its string value' do
+ expect(subject['ip_address']).to eq('192.168.1.1')
+ end
+ end
+ end
+end
diff --git a/spec/models/blob_viewer/composer_json_spec.rb b/spec/models/blob_viewer/composer_json_spec.rb
index 8d66e9e951f..5af58f3d6c7 100644
--- a/spec/models/blob_viewer/composer_json_spec.rb
+++ b/spec/models/blob_viewer/composer_json_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe BlobViewer::ComposerJson do
}
SPEC
end
+
let(:blob) { fake_blob(path: 'composer.json', data: data) }
subject { described_class.new(blob) }
diff --git a/spec/models/blob_viewer/gemspec_spec.rb b/spec/models/blob_viewer/gemspec_spec.rb
index b6f3e059c7e..43c63050c90 100644
--- a/spec/models/blob_viewer/gemspec_spec.rb
+++ b/spec/models/blob_viewer/gemspec_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe BlobViewer::Gemspec do
end
SPEC
end
+
let(:blob) { fake_blob(path: 'activerecord.gemspec', data: data) }
subject { described_class.new(blob) }
diff --git a/spec/models/blob_viewer/go_mod_spec.rb b/spec/models/blob_viewer/go_mod_spec.rb
index 21e84d39a54..3249e86fb03 100644
--- a/spec/models/blob_viewer/go_mod_spec.rb
+++ b/spec/models/blob_viewer/go_mod_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe BlobViewer::GoMod do
module #{Settings.build_gitlab_go_url}/#{project.full_path}
SPEC
end
+
let(:blob) { fake_blob(path: 'go.mod', data: data) }
subject { described_class.new(blob) }
diff --git a/spec/models/blob_viewer/package_json_spec.rb b/spec/models/blob_viewer/package_json_spec.rb
index d2e8ab6575f..8a394a7334f 100644
--- a/spec/models/blob_viewer/package_json_spec.rb
+++ b/spec/models/blob_viewer/package_json_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe BlobViewer::PackageJson do
}
SPEC
end
+
let(:blob) { fake_blob(path: 'package.json', data: data) }
subject { described_class.new(blob) }
@@ -54,6 +55,7 @@ RSpec.describe BlobViewer::PackageJson do
}
SPEC
end
+
let(:blob) { fake_blob(path: 'package.json', data: data) }
subject { described_class.new(blob) }
diff --git a/spec/models/blob_viewer/podspec_json_spec.rb b/spec/models/blob_viewer/podspec_json_spec.rb
index 61d2602c413..cdeea4e8744 100644
--- a/spec/models/blob_viewer/podspec_json_spec.rb
+++ b/spec/models/blob_viewer/podspec_json_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe BlobViewer::PodspecJson do
}
SPEC
end
+
let(:blob) { fake_blob(path: 'AFNetworking.podspec.json', data: data) }
subject { described_class.new(blob) }
diff --git a/spec/models/blob_viewer/podspec_spec.rb b/spec/models/blob_viewer/podspec_spec.rb
index 0a0fbcaebd4..c2828067f22 100644
--- a/spec/models/blob_viewer/podspec_spec.rb
+++ b/spec/models/blob_viewer/podspec_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe BlobViewer::Podspec do
end
SPEC
end
+
let(:blob) { fake_blob(path: 'Reachability.podspec', data: data) }
subject { described_class.new(blob) }
diff --git a/spec/models/blob_viewer/route_map_spec.rb b/spec/models/blob_viewer/route_map_spec.rb
index bb0284d7868..c412afaac4e 100644
--- a/spec/models/blob_viewer/route_map_spec.rb
+++ b/spec/models/blob_viewer/route_map_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe BlobViewer::RouteMap do
public: 'team/'
MAP
end
+
let(:blob) { fake_blob(path: '.gitlab/route-map.yml', data: data) }
subject { described_class.new(blob) }
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 857b238981b..069ac23c5a4 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -612,6 +612,62 @@ RSpec.describe Ci::Build do
end
end
+ describe '#available_artifacts?' do
+ let(:build) { create(:ci_build) }
+
+ subject { build.available_artifacts? }
+
+ context 'when artifacts are not expired' do
+ before do
+ build.artifacts_expire_at = Date.tomorrow
+ end
+
+ context 'when artifacts exist' do
+ before do
+ create(:ci_job_artifact, :archive, job: build)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when artifacts do not exist' do
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when artifacts are expired' do
+ before do
+ build.artifacts_expire_at = Date.yesterday
+ end
+
+ context 'when artifacts are not locked' do
+ before do
+ build.pipeline.locked = :unlocked
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when artifacts are locked' do
+ before do
+ build.pipeline.locked = :artifacts_locked
+ end
+
+ context 'when artifacts exist' do
+ before do
+ create(:ci_job_artifact, :archive, job: build)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when artifacts do not exist' do
+ it { is_expected.to be_falsey }
+ end
+ end
+ end
+ end
+
describe '#browsable_artifacts?' do
subject { build.browsable_artifacts? }
@@ -1195,18 +1251,6 @@ RSpec.describe Ci::Build do
is_expected.to eq('review/host')
end
-
- context 'when ci_persisted_expanded_environment_name feature flag is disabled' do
- before do
- stub_feature_flags(ci_persisted_expanded_environment_name: false)
- end
-
- it 'returns an expanded environment name with a list of variables' do
- expect(build).to receive(:simple_variables).once.and_call_original
-
- is_expected.to eq('review/host')
- end
- end
end
end
@@ -1703,112 +1747,6 @@ RSpec.describe Ci::Build do
end
end
end
-
- describe '#options_retry_max' do
- context 'with retries max config option' do
- subject { create(:ci_build, options: { retry: { max: 1 } }) }
-
- context 'when build_metadata_config is set' do
- before do
- stub_feature_flags(ci_build_metadata_config: true)
- end
-
- it 'returns the number of configured max retries' do
- expect(subject.options_retry_max).to eq 1
- end
- end
-
- context 'when build_metadata_config is not set' do
- before do
- stub_feature_flags(ci_build_metadata_config: false)
- end
-
- it 'returns the number of configured max retries' do
- expect(subject.options_retry_max).to eq 1
- end
- end
- end
-
- context 'without retries max config option' do
- subject { create(:ci_build) }
-
- it 'returns nil' do
- expect(subject.options_retry_max).to be_nil
- end
- end
-
- context 'when build is degenerated' do
- subject { create(:ci_build, :degenerated) }
-
- it 'returns nil' do
- expect(subject.options_retry_max).to be_nil
- end
- end
-
- context 'with integer only config option' do
- subject { create(:ci_build, options: { retry: 1 }) }
-
- it 'returns the number of configured max retries' do
- expect(subject.options_retry_max).to eq 1
- end
- end
- end
-
- describe '#options_retry_when' do
- context 'with retries when config option' do
- subject { create(:ci_build, options: { retry: { when: ['some_reason'] } }) }
-
- it 'returns the configured when' do
- expect(subject.options_retry_when).to eq ['some_reason']
- end
- end
-
- context 'without retries when config option' do
- subject { create(:ci_build) }
-
- it 'returns always array' do
- expect(subject.options_retry_when).to eq ['always']
- end
- end
-
- context 'with integer only config option' do
- subject { create(:ci_build, options: { retry: 1 }) }
-
- it 'returns always array' do
- expect(subject.options_retry_when).to eq ['always']
- end
- end
- end
-
- describe '#retry_failure?' do
- using RSpec::Parameterized::TableSyntax
-
- let(:build) { create(:ci_build) }
-
- subject { build.retry_failure? }
-
- where(:description, :retry_count, :options, :failure_reason, :result) do
- "retries are disabled" | 0 | { max: 0 } | nil | false
- "max equals count" | 2 | { max: 2 } | nil | false
- "max is higher than count" | 1 | { max: 2 } | nil | true
- "matching failure reason" | 0 | { when: %w[api_failure], max: 2 } | :api_failure | true
- "not matching with always" | 0 | { when: %w[always], max: 2 } | :api_failure | true
- "not matching reason" | 0 | { when: %w[script_error], max: 2 } | :api_failure | false
- "scheduler failure override" | 1 | { when: %w[scheduler_failure], max: 1 } | :scheduler_failure | false
- "default for scheduler failure" | 1 | {} | :scheduler_failure | true
- end
-
- with_them do
- before do
- allow(build).to receive(:retries_count) { retry_count }
-
- build.options[:retry] = options
- build.failure_reason = failure_reason
- end
-
- it { is_expected.to eq(result) }
- end
- end
end
describe '.keep_artifacts!' do
@@ -2115,23 +2053,13 @@ RSpec.describe Ci::Build do
it { is_expected.to be_nil }
end
- context 'when build has a start environment' do
- let(:build) { create(:ci_build, :with_deployment, :deploy_to_production, pipeline: pipeline) }
-
- it 'does not expand environment name' do
- expect(build).not_to receive(:expanded_environment_name)
-
- subject
- end
- end
-
context 'when build has a stop environment' do
- let(:build) { create(:ci_build, :stop_review_app, pipeline: pipeline) }
+ let(:build) { create(:ci_build, :stop_review_app, pipeline: pipeline, environment: "foo-#{project.default_branch}") }
it 'expands environment name' do
- expect(build).to receive(:expanded_environment_name)
+ expect(build).to receive(:expanded_environment_name).and_call_original
- subject
+ is_expected.to eq(environment)
end
end
end
@@ -2925,6 +2853,7 @@ RSpec.describe Ci::Build do
let(:ci_registry) do
{ key: 'CI_REGISTRY', value: 'registry.example.com', public: true, masked: false }
end
+
let(:ci_registry_image) do
{ key: 'CI_REGISTRY_IMAGE', value: project.container_registry_url, public: true, masked: false }
end
@@ -3007,25 +2936,46 @@ RSpec.describe Ci::Build do
end
context 'when build is parallelized' do
- let(:total) { 5 }
- let(:index) { 3 }
+ shared_examples 'parallelized jobs config' do
+ let(:index) { 3 }
+ let(:total) { 5 }
- before do
- build.options[:parallel] = total
- build.options[:instance] = index
- build.name = "#{build.name} #{index}/#{total}"
+ before do
+ build.options[:parallel] = config
+ build.options[:instance] = index
+ end
+
+ it 'includes CI_NODE_INDEX' do
+ is_expected.to include(
+ { key: 'CI_NODE_INDEX', value: index.to_s, public: true, masked: false }
+ )
+ end
+
+ it 'includes correct CI_NODE_TOTAL' do
+ is_expected.to include(
+ { key: 'CI_NODE_TOTAL', value: total.to_s, public: true, masked: false }
+ )
+ end
end
- it 'includes CI_NODE_INDEX' do
- is_expected.to include(
- { key: 'CI_NODE_INDEX', value: index.to_s, public: true, masked: false }
- )
+ context 'when parallel is a number' do
+ let(:config) { 5 }
+
+ it_behaves_like 'parallelized jobs config'
end
- it 'includes correct CI_NODE_TOTAL' do
- is_expected.to include(
- { key: 'CI_NODE_TOTAL', value: total.to_s, public: true, masked: false }
- )
+ context 'when parallel is hash with the total key' do
+ let(:config) { { total: 5 } }
+
+ it_behaves_like 'parallelized jobs config'
+ end
+
+ context 'when parallel is nil' do
+ let(:config) {}
+
+ it_behaves_like 'parallelized jobs config' do
+ let(:total) { 1 }
+ end
end
end
@@ -3161,6 +3111,14 @@ RSpec.describe Ci::Build do
end
end
+ describe '#simple_variables_without_dependencies' do
+ it 'does not load dependencies' do
+ expect(build).not_to receive(:dependency_variables)
+
+ build.simple_variables_without_dependencies
+ end
+ end
+
shared_examples "secret CI variables" do
context 'when ref is branch' do
let(:build) { create(:ci_build, ref: 'master', tag: false, project: project) }
diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb
index dab523f67ab..a6362d46449 100644
--- a/spec/models/ci/build_trace_chunk_spec.rb
+++ b/spec/models/ci/build_trace_chunk_spec.rb
@@ -262,6 +262,12 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
expect(build_trace_chunk.data).to be_empty
end
+ it 'does not read data when appending' do
+ expect(build_trace_chunk).not_to receive(:data)
+
+ build_trace_chunk.append(new_data, offset)
+ end
+
it_behaves_like 'Appending correctly'
it_behaves_like 'Scheduling sidekiq worker to flush data to persist store'
end
@@ -486,7 +492,7 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
expect(build_trace_chunk.redis?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to eq(data)
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
- expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound)
+ expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to be_nil
subject
@@ -508,7 +514,7 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
expect(build_trace_chunk.redis?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to eq(data)
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to be_nil
- expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound)
+ expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to be_nil
end
end
end
@@ -535,7 +541,7 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
expect(build_trace_chunk.database?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to eq(data)
- expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound)
+ expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to be_nil
subject
@@ -557,7 +563,7 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
expect(build_trace_chunk.database?).to be_truthy
expect(Ci::BuildTraceChunks::Redis.new.data(build_trace_chunk)).to be_nil
expect(Ci::BuildTraceChunks::Database.new.data(build_trace_chunk)).to eq(data)
- expect { Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk) }.to raise_error(Excon::Error::NotFound)
+ expect(Ci::BuildTraceChunks::Fog.new.data(build_trace_chunk)).to be_nil
end
end
end
diff --git a/spec/models/ci/build_trace_chunks/database_spec.rb b/spec/models/ci/build_trace_chunks/database_spec.rb
index 245625b8046..313328ac037 100644
--- a/spec/models/ci/build_trace_chunks/database_spec.rb
+++ b/spec/models/ci/build_trace_chunks/database_spec.rb
@@ -89,6 +89,24 @@ RSpec.describe Ci::BuildTraceChunks::Database do
end
end
+ describe '#size' do
+ context 'when data exists' do
+ let(:model) { create(:ci_build_trace_chunk, :database_with_data, initial_data: 'üabcdef') }
+
+ it 'returns data bytesize correctly' do
+ expect(data_store.size(model)).to eq 8
+ end
+ end
+
+ context 'when data does not exist' do
+ let(:model) { create(:ci_build_trace_chunk, :database_without_data) }
+
+ it 'returns zero' do
+ expect(data_store.size(model)).to be_zero
+ end
+ end
+ end
+
describe '#keys' do
subject { data_store.keys(relation) }
diff --git a/spec/models/ci/build_trace_chunks/fog_spec.rb b/spec/models/ci/build_trace_chunks/fog_spec.rb
index 7ef3018d87b..a44ae58dfd2 100644
--- a/spec/models/ci/build_trace_chunks/fog_spec.rb
+++ b/spec/models/ci/build_trace_chunks/fog_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe Ci::BuildTraceChunks::Fog do
let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
it 'returns nil' do
- expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound)
+ expect(data_store.data(model)).to be_nil
end
end
end
@@ -66,7 +66,7 @@ RSpec.describe Ci::BuildTraceChunks::Fog do
let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
it 'sets new data' do
- expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound)
+ expect(data_store.data(model)).to be_nil
subject
@@ -86,7 +86,7 @@ RSpec.describe Ci::BuildTraceChunks::Fog do
subject
- expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound)
+ expect(data_store.data(model)).to be_nil
end
end
@@ -94,11 +94,29 @@ RSpec.describe Ci::BuildTraceChunks::Fog do
let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
it 'does nothing' do
- expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound)
+ expect(data_store.data(model)).to be_nil
subject
- expect { data_store.data(model) }.to raise_error(Excon::Error::NotFound)
+ expect(data_store.data(model)).to be_nil
+ end
+ end
+ end
+
+ describe '#size' do
+ context 'when data exists' do
+ let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'üabcd') }
+
+ it 'returns data bytesize correctly' do
+ expect(data_store.size(model)).to eq 6
+ end
+ end
+
+ context 'when data does not exist' do
+ let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
+
+ it 'returns zero' do
+ expect(data_store.size(model)).to be_zero
end
end
end
diff --git a/spec/models/ci/build_trace_chunks/redis_spec.rb b/spec/models/ci/build_trace_chunks/redis_spec.rb
index c37b8697a4d..cb0b6baadeb 100644
--- a/spec/models/ci/build_trace_chunks/redis_spec.rb
+++ b/spec/models/ci/build_trace_chunks/redis_spec.rb
@@ -61,6 +61,86 @@ RSpec.describe Ci::BuildTraceChunks::Redis, :clean_gitlab_redis_shared_state do
end
end
+ describe '#append_data' do
+ context 'when valid offset is used with existing data' do
+ let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'abcd') }
+
+ it 'appends data' do
+ expect(data_store.data(model)).to eq('abcd')
+
+ length = data_store.append_data(model, '12345', 4)
+
+ expect(length).to eq 9
+ expect(data_store.data(model)).to eq('abcd12345')
+ end
+ end
+
+ context 'when data does not exist yet' do
+ let(:model) { create(:ci_build_trace_chunk, :redis_without_data) }
+
+ it 'sets new data' do
+ expect(data_store.data(model)).to be_nil
+
+ length = data_store.append_data(model, 'abc', 0)
+
+ expect(length).to eq 3
+ expect(data_store.data(model)).to eq('abc')
+ end
+ end
+
+ context 'when data needs to be truncated' do
+ let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: '12345678') }
+
+ it 'appends data and truncates stored value' do
+ expect(data_store.data(model)).to eq('12345678')
+
+ length = data_store.append_data(model, 'ab', 4)
+
+ expect(length).to eq 6
+ expect(data_store.data(model)).to eq('1234ab')
+ end
+ end
+
+ context 'when invalid offset is provided' do
+ let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'abc') }
+
+ it 'raises an exception' do
+ length = data_store.append_data(model, '12345', 4)
+
+ expect(length).to be_negative
+ end
+ end
+
+ context 'when trace contains multi-byte UTF8 characters' do
+ let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'aüc') }
+
+ it 'appends data' do
+ length = data_store.append_data(model, '1234', 4)
+
+ data_store.data(model).then do |new_data|
+ expect(new_data.bytesize).to eq 8
+ expect(new_data).to eq 'aüc1234'
+ end
+
+ expect(length).to eq 8
+ end
+ end
+
+ context 'when trace contains non-UTF8 characters' do
+ let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: "a\255c") }
+
+ it 'appends data' do
+ length = data_store.append_data(model, '1234', 3)
+
+ data_store.data(model).then do |new_data|
+ expect(new_data.bytesize).to eq 7
+ end
+
+ expect(length).to eq 7
+ end
+ end
+ end
+
describe '#delete_data' do
subject { data_store.delete_data(model) }
@@ -89,6 +169,24 @@ RSpec.describe Ci::BuildTraceChunks::Redis, :clean_gitlab_redis_shared_state do
end
end
+ describe '#size' do
+ context 'when data exists' do
+ let(:model) { create(:ci_build_trace_chunk, :redis_with_data, initial_data: 'üabcd') }
+
+ it 'returns data bytesize correctly' do
+ expect(data_store.size(model)).to eq 6
+ end
+ end
+
+ context 'when data does not exist' do
+ let(:model) { create(:ci_build_trace_chunk, :redis_without_data) }
+
+ it 'returns zero' do
+ expect(data_store.size(model)).to be_zero
+ end
+ end
+ end
+
describe '#keys' do
subject { data_store.keys(relation) }
diff --git a/spec/models/ci/daily_build_group_report_result_spec.rb b/spec/models/ci/daily_build_group_report_result_spec.rb
index 059a5b76b9a..326366666cb 100644
--- a/spec/models/ci/daily_build_group_report_result_spec.rb
+++ b/spec/models/ci/daily_build_group_report_result_spec.rb
@@ -36,6 +36,7 @@ RSpec.describe Ci::DailyBuildGroupReportResult do
data: { coverage: 71.2 }
)
end
+
let!(:new_pipeline) { create(:ci_pipeline) }
it 'creates or updates matching report results' do
diff --git a/spec/models/ci/group_spec.rb b/spec/models/ci/group_spec.rb
index dc9aee906ea..c20b7e61044 100644
--- a/spec/models/ci/group_spec.rb
+++ b/spec/models/ci/group_spec.rb
@@ -29,24 +29,8 @@ RSpec.describe Ci::Group do
[create(:ci_build, :failed)]
end
- context 'when ci_composite_status is enabled' do
- before do
- stub_feature_flags(ci_composite_status: true)
- end
-
- it 'returns a failed status' do
- expect(subject.status).to eq('failed')
- end
- end
-
- context 'when ci_composite_status is disabled' do
- before do
- stub_feature_flags(ci_composite_status: false)
- end
-
- it 'returns a failed status' do
- expect(subject.status).to eq('failed')
- end
+ it 'returns a failed status' do
+ expect(subject.status).to eq('failed')
end
end
diff --git a/spec/models/ci/instance_variable_spec.rb b/spec/models/ci/instance_variable_spec.rb
index 344ba5bfafd..15d0c911bc4 100644
--- a/spec/models/ci/instance_variable_spec.rb
+++ b/spec/models/ci/instance_variable_spec.rb
@@ -9,12 +9,39 @@ RSpec.describe Ci::InstanceVariable do
it { is_expected.to include_module(Ci::Maskable) }
it { is_expected.to validate_uniqueness_of(:key).with_message(/\(\w+\) has already been taken/) }
- it { is_expected.to validate_length_of(:encrypted_value).is_at_most(1024).with_message(/Variables over 700 characters risk exceeding the limit/) }
+ it { is_expected.to validate_length_of(:value).is_at_most(10_000).with_message(/The value of the provided variable exceeds the 10000 character limit/) }
it_behaves_like 'includes Limitable concern' do
subject { build(:ci_instance_variable) }
end
+ describe '#value' do
+ context 'without application limit' do
+ # Ensures breakage if encryption algorithm changes
+ let(:variable) { build(:ci_instance_variable, key: 'too_long', value: value) }
+
+ before do
+ allow(variable).to receive(:valid?).and_return(true)
+ end
+
+ context 'when value is over the limit' do
+ let(:value) { SecureRandom.alphanumeric(10_002) }
+
+ it 'raises a database level error' do
+ expect { variable.save }.to raise_error(ActiveRecord::StatementInvalid)
+ end
+ end
+
+ context 'when value is under the limit' do
+ let(:value) { SecureRandom.alphanumeric(10_000) }
+
+ it 'does not raise database level error' do
+ expect { variable.save }.not_to raise_error
+ end
+ end
+ end
+ end
+
describe '.unprotected' do
subject { described_class.unprotected }
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index b5f9128b7c5..91a669aa3f4 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -483,11 +483,7 @@ RSpec.describe Ci::JobArtifact do
subject { create(:ci_job_artifact, :archive) }
context 'when existing object has local store' do
- it 'is stored locally' do
- expect(subject.file_store).to be(ObjectStorage::Store::LOCAL)
- expect(subject.file).to be_file_storage
- expect(subject.file.object_store).to eq(ObjectStorage::Store::LOCAL)
- end
+ it_behaves_like 'mounted file in local store'
end
context 'when direct upload is enabled' do
@@ -496,11 +492,7 @@ RSpec.describe Ci::JobArtifact do
end
context 'when file is stored' do
- it 'is stored remotely' do
- expect(subject.file_store).to eq(ObjectStorage::Store::REMOTE)
- expect(subject.file).not_to be_file_storage
- expect(subject.file.object_store).to eq(ObjectStorage::Store::REMOTE)
- end
+ it_behaves_like 'mounted file in object store'
end
end
end
@@ -529,11 +521,9 @@ RSpec.describe Ci::JobArtifact do
context 'when file type is supported' do
let(:project_closest_setting) { 1024 }
let(:artifact_type) { 'junit' }
+ let(:limit_name) { "#{described_class::PLAN_LIMIT_PREFIX}#{artifact_type}" }
- before do
- stub_feature_flags(ci_max_artifact_size_per_type: flag_enabled)
- allow(build.project).to receive(:closest_setting).with(:max_artifacts_size).and_return(project_closest_setting)
- end
+ let!(:plan_limits) { create(:plan_limits, :default_plan) }
shared_examples_for 'basing off the project closest setting' do
it { is_expected.to eq(project_closest_setting.megabytes.to_i) }
@@ -543,49 +533,40 @@ RSpec.describe Ci::JobArtifact do
it { is_expected.to eq(max_size_for_type.megabytes.to_i) }
end
- context 'and feature flag for custom max size per type is enabled' do
- let(:flag_enabled) { true }
- let(:limit_name) { "#{described_class::PLAN_LIMIT_PREFIX}#{artifact_type}" }
-
- let!(:plan_limits) { create(:plan_limits, :default_plan) }
+ before do
+ allow(build.project).to receive(:closest_setting).with(:max_artifacts_size).and_return(project_closest_setting)
+ end
- context 'and plan limit is disabled for the given artifact type' do
- before do
- plan_limits.update!(limit_name => 0)
- end
+ context 'and plan limit is disabled for the given artifact type' do
+ before do
+ plan_limits.update!(limit_name => 0)
+ end
- it_behaves_like 'basing off the project closest setting'
+ it_behaves_like 'basing off the project closest setting'
- context 'and project closest setting results to zero' do
- let(:project_closest_setting) { 0 }
+ context 'and project closest setting results to zero' do
+ let(:project_closest_setting) { 0 }
- it { is_expected.to eq(0) }
- end
+ it { is_expected.to eq(0) }
end
+ end
- context 'and plan limit is enabled for the given artifact type' do
- before do
- plan_limits.update!(limit_name => max_size_for_type)
- end
-
- context 'and plan limit is smaller than project setting' do
- let(:max_size_for_type) { project_closest_setting - 1 }
-
- it_behaves_like 'basing off the plan limit'
- end
+ context 'and plan limit is enabled for the given artifact type' do
+ before do
+ plan_limits.update!(limit_name => max_size_for_type)
+ end
- context 'and plan limit is smaller than project setting' do
- let(:max_size_for_type) { project_closest_setting + 1 }
+ context 'and plan limit is smaller than project setting' do
+ let(:max_size_for_type) { project_closest_setting - 1 }
- it_behaves_like 'basing off the project closest setting'
- end
+ it_behaves_like 'basing off the plan limit'
end
- end
- context 'and feature flag for custom max size per type is disabled' do
- let(:flag_enabled) { false }
+ context 'and plan limit is larger than project setting' do
+ let(:max_size_for_type) { project_closest_setting + 1 }
- it_behaves_like 'basing off the project closest setting'
+ it_behaves_like 'basing off the project closest setting'
+ end
end
end
end
@@ -597,7 +578,8 @@ RSpec.describe Ci::JobArtifact do
Please refer to https://docs.gitlab.com/ee/development/application_limits.html on how to add new plan limit columns.
Take note that while existing max size plan limits default to 0, succeeding new limits are recommended to have
- non-zero default values.
+ non-zero default values. Also, remember to update the plan limits documentation (doc/administration/instance_limits.md)
+ when changes or new entries are made.
MSG
end
end
diff --git a/spec/models/ci/pipeline_artifact_spec.rb b/spec/models/ci/pipeline_artifact_spec.rb
new file mode 100644
index 00000000000..9d63d74a6cc
--- /dev/null
+++ b/spec/models/ci/pipeline_artifact_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::PipelineArtifact, type: :model do
+ let_it_be(:coverage_report) { create(:ci_pipeline_artifact) }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:pipeline) }
+ it { is_expected.to belong_to(:project) }
+ end
+
+ it_behaves_like 'having unique enum values'
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:pipeline) }
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:file_type) }
+ it { is_expected.to validate_presence_of(:file_format) }
+ it { is_expected.to validate_presence_of(:size) }
+ it { is_expected.to validate_presence_of(:file) }
+
+ context 'when attributes are valid' do
+ it 'returns no errors' do
+ expect(coverage_report).to be_valid
+ end
+ end
+
+ context 'when file_store is invalid' do
+ it 'returns errors' do
+ coverage_report.file_store = 0
+
+ expect(coverage_report).to be_invalid
+ expect(coverage_report.errors.full_messages).to eq(["File store is not included in the list"])
+ end
+ end
+
+ context 'when size is over 10 megabytes' do
+ it 'returns errors' do
+ coverage_report.size = 11.megabytes
+
+ expect(coverage_report).to be_invalid
+ end
+ end
+ end
+
+ describe '#set_size' do
+ subject { create(:ci_pipeline_artifact) }
+
+ context 'when file is being created' do
+ it 'sets the size' do
+ expect(subject.size).to eq(85)
+ end
+ end
+
+ context 'when file is being updated' do
+ it 'updates the size' do
+ subject.update!(file: fixture_file_upload('spec/fixtures/dk.png'))
+
+ expect(subject.size).to eq(1062)
+ end
+ end
+ end
+
+ describe 'file is being stored' do
+ subject { create(:ci_pipeline_artifact) }
+
+ context 'when existing object has local store' do
+ it_behaves_like 'mounted file in local store'
+ end
+
+ context 'when direct upload is enabled' do
+ before do
+ stub_artifacts_object_storage(Ci::PipelineArtifactUploader, direct_upload: true)
+ end
+
+ context 'when file is stored' do
+ it_behaves_like 'mounted file in object store'
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index ed2466d6413..b4e80fa7588 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -46,6 +46,7 @@ RSpec.describe Ci::Pipeline, :mailer do
it { is_expected.to respond_to :git_author_email }
it { is_expected.to respond_to :short_sha }
it { is_expected.to delegate_method(:full_path).to(:project).with_prefix }
+ it { is_expected.to have_many(:pipeline_artifacts) }
describe 'associations' do
it 'has a bidirectional relationship with projects' do
@@ -813,6 +814,8 @@ RSpec.describe Ci::Pipeline, :mailer do
expect(subject.to_hash)
.to include(
'CI_EXTERNAL_PULL_REQUEST_IID' => pull_request.pull_request_iid.to_s,
+ 'CI_EXTERNAL_PULL_REQUEST_SOURCE_REPOSITORY' => pull_request.source_repository,
+ 'CI_EXTERNAL_PULL_REQUEST_TARGET_REPOSITORY' => pull_request.target_repository,
'CI_EXTERNAL_PULL_REQUEST_SOURCE_BRANCH_SHA' => pull_request.source_sha,
'CI_EXTERNAL_PULL_REQUEST_TARGET_BRANCH_SHA' => pull_request.target_sha,
'CI_EXTERNAL_PULL_REQUEST_SOURCE_BRANCH_NAME' => pull_request.source_branch,
@@ -936,69 +939,59 @@ RSpec.describe Ci::Pipeline, :mailer do
subject { pipeline.legacy_stages }
- where(:ci_composite_status) do
- [false, true]
+ context 'stages list' do
+ it 'returns ordered list of stages' do
+ expect(subject.map(&:name)).to eq(%w[build test deploy])
+ end
end
- with_them do
- before do
- stub_feature_flags(ci_composite_status: ci_composite_status)
+ context 'stages with statuses' do
+ let(:statuses) do
+ subject.map { |stage| [stage.name, stage.status] }
end
- context 'stages list' do
- it 'returns ordered list of stages' do
- expect(subject.map(&:name)).to eq(%w[build test deploy])
- end
+ it 'returns list of stages with correct statuses' do
+ expect(statuses).to eq([%w(build failed),
+ %w(test success),
+ %w(deploy running)])
end
- context 'stages with statuses' do
- let(:statuses) do
- subject.map { |stage| [stage.name, stage.status] }
+ context 'when commit status is retried' do
+ before do
+ create(:commit_status, pipeline: pipeline,
+ stage: 'build',
+ name: 'mac',
+ stage_idx: 0,
+ status: 'success')
+
+ Ci::ProcessPipelineService
+ .new(pipeline)
+ .execute
end
- it 'returns list of stages with correct statuses' do
- expect(statuses).to eq([%w(build failed),
+ it 'ignores the previous state' do
+ expect(statuses).to eq([%w(build success),
%w(test success),
%w(deploy running)])
end
-
- context 'when commit status is retried' do
- before do
- create(:commit_status, pipeline: pipeline,
- stage: 'build',
- name: 'mac',
- stage_idx: 0,
- status: 'success')
-
- Ci::ProcessPipelineService
- .new(pipeline)
- .execute
- end
-
- it 'ignores the previous state' do
- expect(statuses).to eq([%w(build success),
- %w(test success),
- %w(deploy running)])
- end
- end
end
+ end
- context 'when there is a stage with warnings' do
- before do
- create(:commit_status, pipeline: pipeline,
- stage: 'deploy',
- name: 'prod:2',
- stage_idx: 2,
- status: 'failed',
- allow_failure: true)
- end
+ context 'when there is a stage with warnings' do
+ before do
+ create(:commit_status, pipeline: pipeline,
+ stage: 'deploy',
+ name: 'prod:2',
+ stage_idx: 2,
+ status: 'failed',
+ allow_failure: true)
+ end
- it 'populates stage with correct number of warnings' do
- deploy_stage = pipeline.legacy_stages.third
+ it 'populates stage with correct number of warnings' do
+ deploy_stage = pipeline.legacy_stages.third
- expect(deploy_stage).not_to receive(:statuses)
- expect(deploy_stage).to have_warnings
- end
+ expect(deploy_stage).not_to receive(:statuses)
+ expect(deploy_stage).to have_warnings
end
end
end
@@ -1044,19 +1037,6 @@ RSpec.describe Ci::Pipeline, :mailer do
before do
create(:ci_stage_entity, project: project,
pipeline: pipeline,
- name: 'build')
- end
-
- it 'returns persisted stages' do
- expect(pipeline.stages).not_to be_empty
- expect(pipeline.stages).to all(be_persisted)
- end
- end
-
- describe '#ordered_stages' do
- before do
- create(:ci_stage_entity, project: project,
- pipeline: pipeline,
position: 4,
name: 'deploy')
@@ -1083,60 +1063,25 @@ RSpec.describe Ci::Pipeline, :mailer do
name: 'cleanup')
end
- subject { pipeline.ordered_stages }
+ subject { pipeline.stages }
- context 'when using atomic processing' do
- before do
- stub_feature_flags(
- ci_atomic_processing: true
- )
- end
-
- context 'when pipelines is not complete' do
- it 'returns stages in valid order' do
- expect(subject).to all(be_a Ci::Stage)
- expect(subject.map(&:name))
- .to eq %w[sanity build test deploy cleanup]
- end
- end
-
- context 'when pipeline is complete' do
- before do
- pipeline.succeed!
- end
-
- it 'returns stages in valid order' do
- expect(subject).to all(be_a Ci::Stage)
- expect(subject.map(&:name))
- .to eq %w[sanity build test deploy cleanup]
- end
+ context 'when pipelines is not complete' do
+ it 'returns stages in valid order' do
+ expect(subject).to all(be_a Ci::Stage)
+ expect(subject.map(&:name))
+ .to eq %w[sanity build test deploy cleanup]
end
end
- context 'when using persisted stages' do
+ context 'when pipeline is complete' do
before do
- stub_feature_flags(
- ci_atomic_processing: false
- )
+ pipeline.succeed!
end
- context 'when pipelines is not complete' do
- it 'still returns legacy stages' do
- expect(subject).to all(be_a Ci::LegacyStage)
- expect(subject.map(&:name)).to eq %w[build test]
- end
- end
-
- context 'when pipeline is complete' do
- before do
- pipeline.succeed!
- end
-
- it 'returns stages in valid order' do
- expect(subject).to all(be_a Ci::Stage)
- expect(subject.map(&:name))
- .to eq %w[sanity build test deploy cleanup]
- end
+ it 'returns stages in valid order' do
+ expect(subject).to all(be_a Ci::Stage)
+ expect(subject.map(&:name))
+ .to eq %w[sanity build test deploy cleanup]
end
end
end
@@ -1932,6 +1877,7 @@ RSpec.describe Ci::Pipeline, :mailer do
project: project
)
end
+
let!(:commit_123_ref_develop) do
create(
:ci_empty_pipeline,
@@ -1941,6 +1887,7 @@ RSpec.describe Ci::Pipeline, :mailer do
project: project
)
end
+
let!(:commit_456_ref_test) do
create(
:ci_empty_pipeline,
@@ -2139,58 +2086,6 @@ RSpec.describe Ci::Pipeline, :mailer do
end
end
- describe '#update_status' do
- context 'when pipeline is empty' do
- it 'updates does not change pipeline status' do
- expect(pipeline.statuses.latest.slow_composite_status(project: project)).to be_nil
-
- expect { pipeline.update_legacy_status }
- .to change { pipeline.reload.status }
- .from('created')
- .to('skipped')
- end
- end
-
- context 'when updating status to pending' do
- before do
- create(:ci_build, pipeline: pipeline, status: :running)
- end
-
- it 'updates pipeline status to running' do
- expect { pipeline.update_legacy_status }
- .to change { pipeline.reload.status }
- .from('created')
- .to('running')
- end
- end
-
- context 'when updating status to scheduled' do
- before do
- create(:ci_build, pipeline: pipeline, status: :scheduled)
- end
-
- it 'updates pipeline status to scheduled' do
- expect { pipeline.update_legacy_status }
- .to change { pipeline.reload.status }
- .from('created')
- .to('scheduled')
- end
- end
-
- context 'when statuses status was not recognized' do
- before do
- allow(pipeline)
- .to receive(:latest_builds_status)
- .and_return(:unknown)
- end
-
- it 'raises an exception' do
- expect { pipeline.update_legacy_status }
- .to raise_error(Ci::HasStatus::UnknownStatusError)
- end
- end
- end
-
describe '#detailed_status' do
subject { pipeline.detailed_status(user) }
@@ -2918,25 +2813,9 @@ RSpec.describe Ci::Pipeline, :mailer do
describe '#ensure_ci_ref!' do
subject { pipeline.ensure_ci_ref! }
- shared_examples_for 'protected by feature flag' do
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(ci_pipeline_fixed_notifications: false)
- end
-
- it 'does not do anything' do
- expect(Ci::Ref).not_to receive(:ensure_for)
-
- subject
- end
- end
- end
-
context 'when ci_ref does not exist yet' do
let!(:pipeline) { create(:ci_pipeline, ci_ref_presence: false) }
- it_behaves_like 'protected by feature flag'
-
it 'creates a new ci_ref and assigns it' do
expect { subject }.to change { Ci::Ref.count }.by(1)
@@ -2947,8 +2826,6 @@ RSpec.describe Ci::Pipeline, :mailer do
context 'when ci_ref already exists' do
let!(:pipeline) { create(:ci_pipeline) }
- it_behaves_like 'protected by feature flag'
-
it 'fetches a new ci_ref and assigns it' do
expect { subject }.not_to change { Ci::Ref.count }
@@ -3082,24 +2959,14 @@ RSpec.describe Ci::Pipeline, :mailer do
create(:ci_build, :success, :report_results, name: 'java', pipeline: pipeline, project: project)
end
- it 'returns test report summary with collected data', :aggregate_failures do
- expect(subject.total_time).to be(0.84)
- expect(subject.total_count).to be(4)
- expect(subject.success_count).to be(0)
- expect(subject.failed_count).to be(0)
- expect(subject.error_count).to be(4)
- expect(subject.skipped_count).to be(0)
+ it 'returns test report summary with collected data' do
+ expect(subject.total).to include(time: 0.84, count: 4, success: 0, failed: 0, skipped: 0, error: 4)
end
end
context 'when pipeline does not have any builds with report results' do
- it 'returns empty test report sumary', :aggregate_failures do
- expect(subject.total_time).to be(0)
- expect(subject.total_count).to be(0)
- expect(subject.success_count).to be(0)
- expect(subject.failed_count).to be(0)
- expect(subject.error_count).to be(0)
- expect(subject.skipped_count).to be(0)
+ it 'returns empty test report summary' do
+ expect(subject.total).to include(time: 0, count: 0, success: 0, failed: 0, skipped: 0, error: 0)
end
end
end
@@ -3141,40 +3008,6 @@ RSpec.describe Ci::Pipeline, :mailer do
end
end
- describe '#test_reports_count', :use_clean_rails_memory_store_caching do
- subject { pipeline.test_reports }
-
- context 'when pipeline has multiple builds with test reports' do
- let!(:build_rspec) { create(:ci_build, :success, name: 'rspec', pipeline: pipeline, project: project) }
- let!(:build_java) { create(:ci_build, :success, name: 'java', pipeline: pipeline, project: project) }
-
- before do
- create(:ci_job_artifact, :junit, job: build_rspec, project: project)
- create(:ci_job_artifact, :junit_with_ant, job: build_java, project: project)
- end
-
- it 'returns test report count equal to test reports total_count' do
- expect(subject.total_count).to eq(7)
- expect(subject.total_count).to eq(pipeline.test_reports_count)
- end
-
- it 'reads from cache when records are cached' do
- expect(Rails.cache.fetch(['project', project.id, 'pipeline', pipeline.id, 'test_reports_count'], force: false)).to be_nil
-
- pipeline.test_reports_count
-
- expect(ActiveRecord::QueryRecorder.new { pipeline.test_reports_count }.count).to eq(0)
- end
- end
-
- context 'when pipeline does not have any builds with test reports' do
- it 'returns empty test report count' do
- expect(subject.total_count).to eq(0)
- expect(subject.total_count).to eq(pipeline.test_reports_count)
- end
- end
- end
-
describe '#accessibility_reports' do
subject { pipeline.accessibility_reports }
@@ -3282,32 +3115,6 @@ RSpec.describe Ci::Pipeline, :mailer do
end
end
end
-
- context 'when transitioning to success' do
- context 'when feature is enabled' do
- before do
- stub_feature_flags(keep_latest_artifacts_for_ref: true)
- end
-
- it 'calls the PipelineSuccessUnlockArtifactsWorker' do
- expect(Ci::PipelineSuccessUnlockArtifactsWorker).to receive(:perform_async).with(pipeline.id)
-
- pipeline.succeed!
- end
- end
-
- context 'when feature is disabled' do
- before do
- stub_feature_flags(keep_latest_artifacts_for_ref: false)
- end
-
- it 'does not call the PipelineSuccessUnlockArtifactsWorker' do
- expect(Ci::PipelineSuccessUnlockArtifactsWorker).not_to receive(:perform_async)
-
- pipeline.succeed!
- end
- end
- end
end
describe '#default_branch?' do
diff --git a/spec/models/ci/ref_spec.rb b/spec/models/ci/ref_spec.rb
index fd4742a8ad2..8bce3c10d8c 100644
--- a/spec/models/ci/ref_spec.rb
+++ b/spec/models/ci/ref_spec.rb
@@ -3,8 +3,69 @@
require 'spec_helper'
RSpec.describe Ci::Ref do
+ using RSpec::Parameterized::TableSyntax
+
it { is_expected.to belong_to(:project) }
+ describe 'state machine transitions' do
+ context 'unlock artifacts transition' do
+ let(:ci_ref) { create(:ci_ref) }
+ let(:unlock_artifacts_worker_spy) { class_spy(::Ci::PipelineSuccessUnlockArtifactsWorker) }
+
+ before do
+ stub_const('Ci::PipelineSuccessUnlockArtifactsWorker', unlock_artifacts_worker_spy)
+ end
+
+ context 'when keep latest artifact feature is enabled' do
+ before do
+ stub_feature_flags(keep_latest_artifacts_for_ref: true)
+ end
+
+ where(:initial_state, :action, :count) do
+ :unknown | :succeed! | 1
+ :unknown | :do_fail! | 0
+ :success | :succeed! | 1
+ :success | :do_fail! | 0
+ :failed | :succeed! | 1
+ :failed | :do_fail! | 0
+ :fixed | :succeed! | 1
+ :fixed | :do_fail! | 0
+ :broken | :succeed! | 1
+ :broken | :do_fail! | 0
+ :still_failing | :succeed | 1
+ :still_failing | :do_fail | 0
+ end
+
+ with_them do
+ context "when transitioning states" do
+ before do
+ status_value = Ci::Ref.state_machines[:status].states[initial_state].value
+ ci_ref.update!(status: status_value)
+ end
+
+ it 'calls unlock artifacts service' do
+ ci_ref.send(action)
+
+ expect(unlock_artifacts_worker_spy).to have_received(:perform_async).exactly(count).times
+ end
+ end
+ end
+ end
+
+ context 'when keep latest artifact feature is not enabled' do
+ before do
+ stub_feature_flags(keep_latest_artifacts_for_ref: false)
+ end
+
+ it 'does not call unlock artifacts service' do
+ ci_ref.succeed!
+
+ expect(unlock_artifacts_worker_spy).not_to have_received(:perform_async)
+ end
+ end
+ end
+ end
+
describe '.ensure_for' do
let_it_be(:project) { create(:project, :repository) }
@@ -161,16 +222,6 @@ RSpec.describe Ci::Ref do
it_behaves_like 'no-op'
end
- context 'when feature flag is disabled' do
- let(:pipeline) { create(:ci_pipeline, :success, ci_ref: ci_ref) }
-
- before do
- stub_feature_flags(ci_pipeline_fixed_notifications: false)
- end
-
- it_behaves_like 'no-op'
- end
-
context 'when pipeline is not the latest pipeline' do
let!(:pipeline) { create(:ci_pipeline, :success, ci_ref: ci_ref) }
let!(:latest_pipeline) { create(:ci_pipeline, :success, ci_ref: ci_ref) }
diff --git a/spec/models/clusters/agent_spec.rb b/spec/models/clusters/agent_spec.rb
new file mode 100644
index 00000000000..bb1fc021e66
--- /dev/null
+++ b/spec/models/clusters/agent_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Clusters::Agent do
+ subject { create(:cluster_agent) }
+
+ it { is_expected.to belong_to(:project).class_name('::Project') }
+ it { is_expected.to have_many(:agent_tokens).class_name('Clusters::AgentToken') }
+
+ it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_length_of(:name).is_at_most(63) }
+ it { is_expected.to validate_uniqueness_of(:name).scoped_to(:project_id) }
+
+ describe 'validation' do
+ describe 'name validation' do
+ it 'rejects names that do not conform to RFC 1123', :aggregate_failures do
+ %w[Agent agentA agentAagain gent- -agent agent.a agent/a agent>a].each do |name|
+ agent = build(:cluster_agent, name: name)
+
+ expect(agent).not_to be_valid
+ expect(agent.errors[:name]).to eq(["can contain only lowercase letters, digits, and '-', but cannot start or end with '-'"])
+ end
+ end
+
+ it 'accepts valid names', :aggregate_failures do
+ %w[agent agent123 agent-123].each do |name|
+ agent = build(:cluster_agent, name: name)
+
+ expect(agent).to be_valid
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/clusters/agent_token_spec.rb b/spec/models/clusters/agent_token_spec.rb
new file mode 100644
index 00000000000..ad9dd11b24e
--- /dev/null
+++ b/spec/models/clusters/agent_token_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Clusters::AgentToken do
+ it { is_expected.to belong_to(:agent).class_name('Clusters::Agent') }
+
+ describe '#token' do
+ it 'is generated on save' do
+ agent_token = build(:cluster_agent_token, token_encrypted: nil)
+ expect(agent_token.token).to be_nil
+
+ agent_token.save!
+
+ expect(agent_token.token).to be_present
+ end
+ end
+end
diff --git a/spec/models/clusters/applications/ingress_spec.rb b/spec/models/clusters/applications/ingress_spec.rb
index d1138f5fa2d..e029283326f 100644
--- a/spec/models/clusters/applications/ingress_spec.rb
+++ b/spec/models/clusters/applications/ingress_spec.rb
@@ -136,7 +136,7 @@ RSpec.describe Clusters::Applications::Ingress do
it 'is initialized with ingress arguments' do
expect(subject.name).to eq('ingress')
expect(subject.chart).to eq('stable/nginx-ingress')
- expect(subject.version).to eq('1.29.7')
+ expect(subject.version).to eq('1.40.2')
expect(subject).to be_rbac
expect(subject.files).to eq(ingress.files)
end
@@ -153,7 +153,7 @@ RSpec.describe Clusters::Applications::Ingress do
let(:ingress) { create(:clusters_applications_ingress, :errored, version: 'nginx') }
it 'is initialized with the locked version' do
- expect(subject.version).to eq('1.29.7')
+ expect(subject.version).to eq('1.40.2')
end
end
end
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 4807957152c..2d0b5af0e77 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -1153,6 +1153,57 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
end
end
+ describe '#connection_error' do
+ let(:cluster) { create(:cluster) }
+ let(:error) { :unknown_error }
+
+ subject { cluster.connection_error }
+
+ it { is_expected.to be_nil }
+
+ context 'with a cached status' do
+ before do
+ stub_reactive_cache(cluster, connection_error: error)
+ end
+
+ it { is_expected.to eq(error) }
+ end
+ end
+
+ describe '#node_connection_error' do
+ let(:cluster) { create(:cluster) }
+ let(:error) { :unknown_error }
+
+ subject { cluster.node_connection_error }
+
+ it { is_expected.to be_nil }
+
+ context 'with a cached status' do
+ before do
+ stub_reactive_cache(cluster, node_connection_error: error)
+ end
+
+ it { is_expected.to eq(error) }
+ end
+ end
+
+ describe '#metrics_connection_error' do
+ let(:cluster) { create(:cluster) }
+ let(:error) { :unknown_error }
+
+ subject { cluster.metrics_connection_error }
+
+ it { is_expected.to be_nil }
+
+ context 'with a cached status' do
+ before do
+ stub_reactive_cache(cluster, metrics_connection_error: error)
+ end
+
+ it { is_expected.to eq(error) }
+ end
+ end
+
describe '#nodes' do
let(:cluster) { create(:cluster) }
@@ -1186,43 +1237,49 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
context 'cluster is enabled' do
let(:cluster) { create(:cluster, :provided_by_user, :group) }
let(:gl_k8s_node_double) { double(Gitlab::Kubernetes::Node) }
- let(:expected_nodes) { nil }
+ let(:expected_nodes) { {} }
before do
stub_kubeclient_discover(cluster.platform.api_url)
allow(Gitlab::Kubernetes::Node).to receive(:new).with(cluster).and_return(gl_k8s_node_double)
- allow(gl_k8s_node_double).to receive(:all).and_return([])
+ allow(gl_k8s_node_double).to receive(:all).and_return(expected_nodes)
end
context 'connection to the cluster is successful' do
+ let(:expected_nodes) { { nodes: [kube_node.merge(kube_node_metrics)] } }
+ let(:connection_status) { { connection_status: :connected } }
+
before do
allow(gl_k8s_node_double).to receive(:all).and_return(expected_nodes)
end
- let(:expected_nodes) { [kube_node.merge(kube_node_metrics)] }
-
- it { is_expected.to eq(connection_status: :connected, nodes: expected_nodes) }
+ it { is_expected.to eq(**connection_status, **expected_nodes) }
end
context 'cluster cannot be reached' do
+ let(:connection_status) { { connection_status: :unreachable, connection_error: :connection_error } }
+
before do
allow(cluster.kubeclient.core_client).to receive(:discover)
.and_raise(SocketError)
end
- it { is_expected.to eq(connection_status: :unreachable, nodes: expected_nodes) }
+ it { is_expected.to eq(**connection_status, **expected_nodes) }
end
context 'cluster cannot be authenticated to' do
+ let(:connection_status) { { connection_status: :authentication_failure, connection_error: :authentication_error } }
+
before do
allow(cluster.kubeclient.core_client).to receive(:discover)
.and_raise(OpenSSL::X509::CertificateError.new("Certificate error"))
end
- it { is_expected.to eq(connection_status: :authentication_failure, nodes: expected_nodes) }
+ it { is_expected.to eq(**connection_status, **expected_nodes) }
end
describe 'Kubeclient::HttpError' do
+ let(:connection_status) { { connection_status: :authentication_failure, connection_error: :http_error } }
let(:error_code) { 403 }
let(:error_message) { "Forbidden" }
@@ -1231,28 +1288,32 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
.and_raise(Kubeclient::HttpError.new(error_code, error_message, nil))
end
- it { is_expected.to eq(connection_status: :authentication_failure, nodes: expected_nodes) }
+ it { is_expected.to eq(**connection_status, **expected_nodes) }
context 'generic timeout' do
+ let(:connection_status) { { connection_status: :unreachable, connection_error: :http_error } }
let(:error_message) { 'Timed out connecting to server'}
- it { is_expected.to eq(connection_status: :unreachable, nodes: expected_nodes) }
+ it { is_expected.to eq(**connection_status, **expected_nodes) }
end
context 'gateway timeout' do
+ let(:connection_status) { { connection_status: :unreachable, connection_error: :http_error } }
let(:error_message) { '504 Gateway Timeout for GET https://kubernetes.example.com/api/v1'}
- it { is_expected.to eq(connection_status: :unreachable, nodes: expected_nodes) }
+ it { is_expected.to eq(**connection_status, **expected_nodes) }
end
end
context 'an uncategorised error is raised' do
+ let(:connection_status) { { connection_status: :unknown_failure, connection_error: :unknown_error } }
+
before do
allow(cluster.kubeclient.core_client).to receive(:discover)
.and_raise(StandardError)
end
- it { is_expected.to eq(connection_status: :unknown_failure, nodes: expected_nodes) }
+ it { is_expected.to eq(**connection_status, **expected_nodes) }
it 'notifies Sentry' do
expect(Gitlab::ErrorTracking).to receive(:track_exception)
diff --git a/spec/models/clusters/platforms/kubernetes_spec.rb b/spec/models/clusters/platforms/kubernetes_spec.rb
index adccc72d13d..c6a2b67a008 100644
--- a/spec/models/clusters/platforms/kubernetes_spec.rb
+++ b/spec/models/clusters/platforms/kubernetes_spec.rb
@@ -410,6 +410,7 @@ RSpec.describe Clusters::Platforms::Kubernetes do
let(:expected_pod_cached_data) do
kube_pod.tap { |kp| kp['metadata'].delete('namespace') }
end
+
let(:namespace) { "project-namespace" }
let(:environment) { instance_double(Environment, deployment_namespace: namespace) }
diff --git a/spec/models/commit_collection_spec.rb b/spec/models/commit_collection_spec.rb
index f4e86f3292b..de9b72c1da2 100644
--- a/spec/models/commit_collection_spec.rb
+++ b/spec/models/commit_collection_spec.rb
@@ -52,27 +52,38 @@ RSpec.describe CommitCollection do
end
describe '#with_latest_pipeline' do
+ let(:another_commit) { project.commit("60ecb67744cb56576c30214ff52294f8ce2def98") }
+
let!(:pipeline) do
- create(
- :ci_empty_pipeline,
- ref: 'master',
- sha: commit.id,
- status: 'success',
- project: project
- )
+ create(:ci_empty_pipeline, ref: 'master', sha: commit.id, status: 'success', project: project)
+ end
+
+ let!(:another_pipeline) do
+ create(:ci_empty_pipeline, ref: 'master', sha: another_commit.id, status: 'success', project: project)
end
- let(:collection) { described_class.new(project, [commit]) }
+
+ let(:collection) { described_class.new(project, [commit, another_commit]) }
it 'sets the latest pipeline for every commit so no additional queries are necessary' do
commits = collection.with_latest_pipeline('master')
recorder = ActiveRecord::QueryRecorder.new do
expect(commits.map { |c| c.latest_pipeline('master') })
- .to eq([pipeline])
+ .to eq([pipeline, another_pipeline])
end
expect(recorder.count).to be_zero
end
+
+ it 'performs a single query to fetch pipeline warnings' do
+ recorder = ActiveRecord::QueryRecorder.new do
+ collection.with_latest_pipeline('master').each do |c|
+ c.latest_pipeline('master').number_of_warnings.itself
+ end
+ end
+
+ expect(recorder.count).to eq(2) # 1 for pipelines, 1 for warnings counts
+ end
end
describe '#with_markdown_cache' do
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index cd0110a787b..7f893d6a100 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -66,51 +66,35 @@ RSpec.describe CommitStatus do
describe '#processed' do
subject { commit_status.processed }
- context 'when ci_atomic_processing is disabled' do
+ context 'status is latest' do
before do
- stub_feature_flags(ci_atomic_processing: false)
-
- commit_status.save!
+ commit_status.update!(retried: false, status: :pending)
end
- it { is_expected.to be_nil }
+ it { is_expected.to be_falsey }
end
- context 'when ci_atomic_processing is enabled' do
+ context 'status is retried' do
before do
- stub_feature_flags(ci_atomic_processing: true)
- end
-
- context 'status is latest' do
- before do
- commit_status.update!(retried: false, status: :pending)
- end
-
- it { is_expected.to be_falsey }
+ commit_status.update!(retried: true, status: :pending)
end
- context 'status is retried' do
- before do
- commit_status.update!(retried: true, status: :pending)
- end
-
- it { is_expected.to be_truthy }
- end
+ it { is_expected.to be_truthy }
+ end
- it "processed state is always persisted" do
- commit_status.update!(retried: false, status: :pending)
+ it "processed state is always persisted" do
+ commit_status.update!(retried: false, status: :pending)
- # another process does mark object as processed
- CommitStatus.find(commit_status.id).update_column(:processed, true)
+ # another process does mark object as processed
+ CommitStatus.find(commit_status.id).update_column(:processed, true)
- # subsequent status transitions on the same instance
- # always saves processed=false to DB even though
- # the current value did not change
- commit_status.update!(retried: false, status: :running)
+ # subsequent status transitions on the same instance
+ # always saves processed=false to DB even though
+ # the current value did not change
+ commit_status.update!(retried: false, status: :running)
- # we look at a persisted state in DB
- expect(CommitStatus.find(commit_status.id).processed).to eq(false)
- end
+ # we look at a persisted state in DB
+ expect(CommitStatus.find(commit_status.id).processed).to eq(false)
end
end
@@ -438,7 +422,7 @@ RSpec.describe CommitStatus do
end
it 'returns a correct compound status' do
- expect(described_class.all.slow_composite_status(project: project)).to eq 'running'
+ expect(described_class.all.composite_status).to eq 'running'
end
end
@@ -448,7 +432,7 @@ RSpec.describe CommitStatus do
end
it 'returns status that indicates success' do
- expect(described_class.all.slow_composite_status(project: project)).to eq 'success'
+ expect(described_class.all.composite_status).to eq 'success'
end
end
@@ -459,7 +443,7 @@ RSpec.describe CommitStatus do
end
it 'returns status according to the scope' do
- expect(described_class.latest.slow_composite_status(project: project)).to eq 'success'
+ expect(described_class.latest.composite_status).to eq 'success'
end
end
end
diff --git a/spec/models/commit_with_pipeline_spec.rb b/spec/models/commit_with_pipeline_spec.rb
index ff451527929..c4b6deebae0 100644
--- a/spec/models/commit_with_pipeline_spec.rb
+++ b/spec/models/commit_with_pipeline_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe CommitWithPipeline do
sha: commit.sha,
status: 'success')
end
+
let!(:second_pipeline) do
create(:ci_empty_pipeline,
project: project,
diff --git a/spec/models/concerns/cache_markdown_field_spec.rb b/spec/models/concerns/cache_markdown_field_spec.rb
index 5f8c65e429e..440943171c3 100644
--- a/spec/models/concerns/cache_markdown_field_spec.rb
+++ b/spec/models/concerns/cache_markdown_field_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
@title, @description, @cached_markdown_version = args[:title], args[:description], args[:cached_markdown_version]
@title_html, @description_html = args[:title_html], args[:description_html]
@author, @project = args[:author], args[:project]
+ @parent_user = args[:parent_user]
end
attr_accessor :title, :description, :cached_markdown_version
@@ -41,8 +42,8 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
let(:cache_version) { Gitlab::MarkdownCache::CACHE_COMMONMARK_VERSION << 16 }
- def thing_subclass(klass, extra_attribute)
- Class.new(klass) { attr_accessor(extra_attribute) }
+ def thing_subclass(klass, *extra_attributes)
+ Class.new(klass) { attr_accessor(*extra_attributes) }
end
shared_examples 'a class with cached markdown fields' do
@@ -192,11 +193,33 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
end
context 'with an author' do
- let(:thing) { thing_subclass(klass, :author).new(title: markdown, title_html: html, author: :author_value) }
+ let(:user) { build(:user) }
+ let(:thing) { thing_subclass(klass, :author).new(title: markdown, title_html: html, author: user) }
it 'sets the author in the context' do
is_expected.to have_key(:author)
- expect(context[:author]).to eq(:author_value)
+ expect(context[:author]).to eq(user)
+ end
+ end
+
+ context 'with a parent_user' do
+ let(:user) { build(:user) }
+ let(:thing) { thing_subclass(klass, :author, :parent_user).new(title: markdown, title_html: html, parent_user: user, author: user) }
+
+ it 'sets the user in the context' do
+ is_expected.to have_key(:user)
+ expect(context[:user]).to eq(user)
+ end
+
+ context 'when the personal_snippet_reference_filters flag is disabled' do
+ before do
+ stub_feature_flags(personal_snippet_reference_filters: false)
+ end
+
+ it 'does not set the user in the context' do
+ is_expected.not_to have_key(:user)
+ expect(context[:user]).to be_nil
+ end
end
end
end
diff --git a/spec/models/concerns/ci/artifactable_spec.rb b/spec/models/concerns/ci/artifactable_spec.rb
new file mode 100644
index 00000000000..13c2ff5efe5
--- /dev/null
+++ b/spec/models/concerns/ci/artifactable_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Artifactable do
+ let(:ci_job_artifact) { build(:ci_job_artifact) }
+
+ describe 'artifact properties are included' do
+ context 'when enum is defined' do
+ subject { ci_job_artifact }
+
+ it { is_expected.to define_enum_for(:file_format).with_values(raw: 1, zip: 2, gzip: 3).with_suffix }
+ end
+
+ context 'when const is defined' do
+ subject { ci_job_artifact.class }
+
+ it { is_expected.to be_const_defined(:FILE_FORMAT_ADAPTERS) }
+ end
+ end
+end
diff --git a/spec/models/concerns/ci/has_status_spec.rb b/spec/models/concerns/ci/has_status_spec.rb
index fe46b63781d..b16420bc658 100644
--- a/spec/models/concerns/ci/has_status_spec.rb
+++ b/spec/models/concerns/ci/has_status_spec.rb
@@ -3,10 +3,10 @@
require 'spec_helper'
RSpec.describe Ci::HasStatus do
- describe '.slow_composite_status' do
+ describe '.composite_status' do
using RSpec::Parameterized::TableSyntax
- subject { CommitStatus.slow_composite_status(project: nil) }
+ subject { CommitStatus.composite_status }
shared_examples 'build status summary' do
context 'all successful' do
@@ -184,26 +184,16 @@ RSpec.describe Ci::HasStatus do
end
end
- where(:ci_composite_status) do
- [false, true]
- end
-
- with_them do
- before do
- stub_feature_flags(ci_composite_status: ci_composite_status)
- end
+ context 'ci build statuses' do
+ let(:type) { :ci_build }
- context 'ci build statuses' do
- let(:type) { :ci_build }
-
- it_behaves_like 'build status summary'
- end
+ it_behaves_like 'build status summary'
+ end
- context 'generic commit statuses' do
- let(:type) { :generic_commit_status }
+ context 'generic commit statuses' do
+ let(:type) { :generic_commit_status }
- it_behaves_like 'build status summary'
- end
+ it_behaves_like 'build status summary'
end
end
@@ -400,12 +390,4 @@ RSpec.describe Ci::HasStatus do
end
end
end
-
- describe '.legacy_status_sql' do
- subject { Ci::Build.legacy_status_sql }
-
- it 'returns SQL' do
- puts subject
- end
- end
end
diff --git a/spec/models/concerns/counter_attribute_spec.rb b/spec/models/concerns/counter_attribute_spec.rb
new file mode 100644
index 00000000000..f23865a5dbb
--- /dev/null
+++ b/spec/models/concerns/counter_attribute_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe CounterAttribute, :counter_attribute, :clean_gitlab_redis_shared_state do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:project_statistics) { create(:project_statistics) }
+ let(:model) { CounterAttributeModel.find(project_statistics.id) }
+
+ it_behaves_like CounterAttribute, [:build_artifacts_size, :commit_count] do
+ let(:model) { CounterAttributeModel.find(project_statistics.id) }
+ end
+
+ describe '.steal_increments' do
+ let(:increment_key) { 'counters:Model:123:attribute' }
+ let(:flushed_key) { 'counter:Model:123:attribute:flushed' }
+
+ subject { model.send(:steal_increments, increment_key, flushed_key) }
+
+ where(:increment, :flushed, :result, :flushed_key_present) do
+ nil | nil | 0 | false
+ nil | 0 | 0 | false
+ 0 | 0 | 0 | false
+ 1 | 0 | 1 | true
+ 1 | nil | 1 | true
+ 1 | 1 | 2 | true
+ 1 | -2 | -1 | true
+ -1 | 1 | 0 | false
+ end
+
+ with_them do
+ before do
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.set(increment_key, increment) if increment
+ redis.set(flushed_key, flushed) if flushed
+ end
+ end
+
+ it { is_expected.to eq(result) }
+
+ it 'drops the increment key and creates the flushed key if it does not exist' do
+ subject
+
+ Gitlab::Redis::SharedState.with do |redis|
+ expect(redis.exists(increment_key)).to be_falsey
+ expect(redis.exists(flushed_key)).to eq(flushed_key_present)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/featurable_spec.rb b/spec/models/concerns/featurable_spec.rb
index cc01820cc97..31186b5fc77 100644
--- a/spec/models/concerns/featurable_spec.rb
+++ b/spec/models/concerns/featurable_spec.rb
@@ -42,6 +42,7 @@ RSpec.describe Featurable do
end
end
end
+
let!(:instance) { klass.new }
it { expect(klass.available_features).to eq [:feature1, :feature2] }
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index 96d3e2b7b1b..0824b5c7834 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Issuable do
it { is_expected.to have_many(:notes).dependent(:destroy) }
it { is_expected.to have_many(:todos).dependent(:destroy) }
it { is_expected.to have_many(:labels) }
+ it { is_expected.to have_many(:note_authors).through(:notes) }
context 'Notes' do
let!(:note) { create(:note, noteable: issue, project: issue.project) }
@@ -149,6 +150,7 @@ RSpec.describe Issuable do
let!(:searchable_issue) do
create(:issue, title: "Searchable awesome issue", description: 'Many cute kittens')
end
+
let!(:searchable_issue2) { create(:issue, title: "Aw", description: "Cu") }
it 'returns issues with a matching title' do
diff --git a/spec/models/concerns/manual_inverse_association_spec.rb b/spec/models/concerns/manual_inverse_association_spec.rb
index 1349d2cc680..0d56d06c624 100644
--- a/spec/models/concerns/manual_inverse_association_spec.rb
+++ b/spec/models/concerns/manual_inverse_association_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe ManualInverseAssociation do
stub_const("#{described_class}::Model", model)
end
- let(:instance) { create(:merge_request).becomes(model) }
+ let(:instance) { create(:merge_request).becomes(model) } # rubocop: disable Cop/AvoidBecomes
describe '.manual_inverse_association' do
context 'when the relation exists' do
diff --git a/spec/models/concerns/milestoneable_spec.rb b/spec/models/concerns/milestoneable_spec.rb
index 15352a1453c..3dd6f1450c7 100644
--- a/spec/models/concerns/milestoneable_spec.rb
+++ b/spec/models/concerns/milestoneable_spec.rb
@@ -103,7 +103,7 @@ RSpec.describe Milestoneable do
end
describe 'release scopes' do
- let_it_be(:project) { create(:project) }
+ let_it_be(:project) { create(:project, :repository) }
let_it_be(:release_1) { create(:release, tag: 'v1.0', project: project) }
let_it_be(:release_2) { create(:release, tag: 'v2.0', project: project) }
@@ -126,6 +126,22 @@ RSpec.describe Milestoneable do
let_it_be(:items) { Issue.all }
+ describe '#any_milestone' do
+ context 'when milestone filter is present and related closing issues are joined' do
+ let_it_be(:merge_request_1) { create(:merge_request, source_project: project, source_branch: 'feature-1') }
+ let_it_be(:merge_request_2) { create(:merge_request, source_project: project, source_branch: 'feature-2') }
+
+ let_it_be(:mrc_issue_1) { create(:merge_requests_closing_issues, issue: issue_1, merge_request: merge_request_1) }
+ let_it_be(:mrc_issue_2) { create(:merge_requests_closing_issues, issue: issue_2, merge_request: merge_request_2) }
+
+ it 'returns merge request closing issues of any milestone' do
+ relation = items.joins(merge_requests_closing_issues: :issue).any_milestone
+
+ expect(relation).to contain_exactly(issue_1, issue_2)
+ end
+ end
+ end
+
describe '#without_release' do
it 'returns the issues not tied to any milestone and the ones tied to milestone with no release' do
expect(items.without_release).to contain_exactly(issue_5, issue_6)
diff --git a/spec/models/concerns/sha_attribute_spec.rb b/spec/models/concerns/sha_attribute_spec.rb
index 50748efcda4..3846dd9c231 100644
--- a/spec/models/concerns/sha_attribute_spec.rb
+++ b/spec/models/concerns/sha_attribute_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe ShaAttribute do
end
describe '#sha_attribute' do
- context 'when in non-production' do
+ context 'when in development' do
before do
stub_rails_env('development')
end
@@ -38,24 +38,22 @@ RSpec.describe ShaAttribute do
end
context 'when the table does not exist' do
- it 'allows the attribute to be added and issues a warning' do
+ it 'allows the attribute to be added' do
allow(model).to receive(:table_exists?).and_return(false)
expect(model).not_to receive(:columns)
expect(model).to receive(:attribute)
- expect(model).to receive(:warn)
model.sha_attribute(:name)
end
end
context 'when the column does not exist' do
- it 'allows the attribute to be added and issues a warning' do
+ it 'allows the attribute to be added' do
allow(model).to receive(:table_exists?).and_return(true)
expect(model).to receive(:columns)
expect(model).to receive(:attribute)
- expect(model).to receive(:warn)
model.sha_attribute(:no_name)
end
diff --git a/spec/models/custom_emoji_spec.rb b/spec/models/custom_emoji_spec.rb
index 2b569b6097d..836c4139107 100644
--- a/spec/models/custom_emoji_spec.rb
+++ b/spec/models/custom_emoji_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe CustomEmoji do
it { is_expected.to have_db_column(:file) }
it { is_expected.to validate_length_of(:name).is_at_most(36) }
it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to have_db_column(:external) }
end
describe 'exclusion of duplicated emoji' do
diff --git a/spec/models/design_management/design_at_version_spec.rb b/spec/models/design_management/design_at_version_spec.rb
index 2c640ee5c2c..3c1ff45c53f 100644
--- a/spec/models/design_management/design_at_version_spec.rb
+++ b/spec/models/design_management/design_at_version_spec.rb
@@ -78,18 +78,23 @@ RSpec.describe DesignManagement::DesignAtVersion do
let!(:version_a) do
create(:design_version, designs: [design_a])
end
+
let!(:version_b) do
create(:design_version, designs: [design_b])
end
+
let!(:version_mod) do
create(:design_version, modified_designs: [design_a, design_b])
end
+
let!(:version_c) do
create(:design_version, deleted_designs: [design_a])
end
+
let!(:version_d) do
create(:design_version, deleted_designs: [design_b])
end
+
let!(:version_e) do
create(:design_version, designs: [design_a])
end
@@ -296,9 +301,11 @@ RSpec.describe DesignManagement::DesignAtVersion do
let!(:version_a) do
create(:design_version, designs: create_list(:design, 3, issue: issue))
end
+
let!(:version_b) do
create(:design_version, designs: create_list(:design, 1, issue: issue))
end
+
let!(:version_c) do
create(:design_version, designs: create_list(:design, 1, issue: issue_b))
end
@@ -346,10 +353,12 @@ RSpec.describe DesignManagement::DesignAtVersion do
let!(:version_a) do
create(:design_version, designs: create_list(:design, 3, issue: issue))
end
+
let!(:version_b) do
create(:design_version, designs: create_list(:design, 2, issue: issue))
end
# 1 version, with 3 designs on issue B, so 1*3 = 3
+
let!(:version_c) do
create(:design_version, designs: create_list(:design, 3, issue: issue_b))
end
diff --git a/spec/models/design_management/design_collection_spec.rb b/spec/models/design_management/design_collection_spec.rb
index c5e290da759..de766d5ce09 100644
--- a/spec/models/design_management/design_collection_spec.rb
+++ b/spec/models/design_management/design_collection_spec.rb
@@ -34,6 +34,15 @@ RSpec.describe DesignManagement::DesignCollection do
collection.find_or_create_design!(filename: 'world.jpg')
end.not_to exceed_query_limit(1)
end
+
+ it 'inserts the design after any existing designs' do
+ design1 = collection.find_or_create_design!(filename: 'design1.jpg')
+ design1.update!(relative_position: 100)
+
+ design2 = collection.find_or_create_design!(filename: 'design2.jpg')
+
+ expect(collection.designs.ordered(issue.project)).to eq([design1, design2])
+ end
end
describe "#versions" do
diff --git a/spec/models/design_management/design_spec.rb b/spec/models/design_management/design_spec.rb
index 345147390c0..2c129f883b9 100644
--- a/spec/models/design_management/design_spec.rb
+++ b/spec/models/design_management/design_spec.rb
@@ -11,6 +11,11 @@ RSpec.describe DesignManagement::Design do
let_it_be(:design3) { create(:design, :with_versions, issue: issue, versions_count: 1) }
let_it_be(:deleted_design) { create(:design, :with_versions, deleted: true) }
+ it_behaves_like 'a class that supports relative positioning' do
+ let(:factory) { :design }
+ let(:default_params) { { issue: issue } }
+ end
+
describe 'relations' do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:issue) }
@@ -21,7 +26,7 @@ RSpec.describe DesignManagement::Design do
end
describe 'validations' do
- subject(:design) { build(:design) }
+ subject(:design) { build(:design, issue: issue) }
it { is_expected.to be_valid }
it { is_expected.to validate_presence_of(:project) }
@@ -147,6 +152,45 @@ RSpec.describe DesignManagement::Design do
end
end
+ describe '.ordered' do
+ before_all do
+ design1.update!(relative_position: 2)
+ design2.update!(relative_position: 1)
+ design3.update!(relative_position: nil)
+ deleted_design.update!(relative_position: nil)
+ end
+
+ it 'sorts by relative position and ID in ascending order' do
+ expect(described_class.ordered(issue.project)).to eq([design2, design1, design3, deleted_design])
+ end
+
+ context 'when the :reorder_designs feature is enabled for the project' do
+ before do
+ stub_feature_flags(reorder_designs: issue.project)
+ end
+
+ it 'sorts by relative position and ID in ascending order' do
+ expect(described_class.ordered(issue.project)).to eq([design2, design1, design3, deleted_design])
+ end
+ end
+
+ context 'when the :reorder_designs feature is disabled' do
+ before do
+ stub_feature_flags(reorder_designs: false)
+ end
+
+ it 'sorts by ID in ascending order' do
+ expect(described_class.ordered(issue.project)).to eq([design1, design2, design3, deleted_design])
+ end
+ end
+ end
+
+ describe '.in_creation_order' do
+ it 'sorts by ID in ascending order' do
+ expect(described_class.in_creation_order).to eq([design1, design2, design3, deleted_design])
+ end
+ end
+
describe '.with_filename' do
it 'returns correct design when passed a single filename' do
expect(described_class.with_filename(design1.filename)).to eq([design1])
@@ -181,7 +225,7 @@ RSpec.describe DesignManagement::Design do
end
describe '#visible_in?' do
- let_it_be(:issue) { create(:issue) }
+ let_it_be(:issue) { create(:issue, project: issue.project) }
# It is expensive to re-create complex histories, so we do it once, and then
# assert that we can establish visibility at any given version.
@@ -237,7 +281,7 @@ RSpec.describe DesignManagement::Design do
describe '#status' do
context 'the design is new' do
- subject { build(:design) }
+ subject { build(:design, issue: issue) }
it { is_expected.to have_attributes(status: :new) }
end
@@ -257,7 +301,7 @@ RSpec.describe DesignManagement::Design do
describe '#deleted?' do
context 'the design is new' do
- let(:design) { build(:design) }
+ let(:design) { build(:design, issue: issue) }
it 'is falsy' do
expect(design).not_to be_deleted
@@ -281,7 +325,7 @@ RSpec.describe DesignManagement::Design do
end
context 'the design has been deleted, but was then re-created' do
- let(:design) { create(:design, :with_versions, versions_count: 1, deleted: true) }
+ let(:design) { create(:design, :with_versions, issue: issue, versions_count: 1, deleted: true) }
it 'is falsy' do
restore_designs(design)
@@ -299,7 +343,7 @@ RSpec.describe DesignManagement::Design do
end
it "is true when there are no versions" do
- expect(build(:design)).to be_new_design
+ expect(build(:design, issue: issue)).to be_new_design
end
it 'is false for deleted designs' do
@@ -336,7 +380,7 @@ RSpec.describe DesignManagement::Design do
describe "#full_path" do
it "builds the full path for a design" do
- design = build(:design, filename: "hello.jpg")
+ design = build(:design, issue: issue, filename: "hello.jpg")
expected_path = "#{DesignManagement.designs_directory}/issue-#{design.issue.iid}/hello.jpg"
expect(design.full_path).to eq(expected_path)
@@ -359,15 +403,13 @@ RSpec.describe DesignManagement::Design do
let(:versions_count) { 1 }
it 'builds diff refs based on the empty tree if there was only one version' do
- design = create(:design, :with_file, versions_count: 1)
-
expect(design.diff_refs.base_sha).to eq(Gitlab::Git::BLANK_SHA)
expect(design.diff_refs.head_sha).to eq(design.diff_refs.head_sha)
end
end
it 'has no diff ref if new' do
- design = build(:design)
+ design = build(:design, issue: issue)
expect(design.diff_refs).to be_nil
end
@@ -375,7 +417,7 @@ RSpec.describe DesignManagement::Design do
describe '#repository' do
it 'is a design repository' do
- design = build(:design)
+ design = build(:design, issue: issue)
expect(design.repository).to be_a(DesignManagement::Repository)
end
@@ -383,7 +425,7 @@ RSpec.describe DesignManagement::Design do
describe '#note_etag_key' do
it 'returns a correct etag key' do
- design = create(:design)
+ design = design1
expect(design.note_etag_key).to eq(
::Gitlab::Routing.url_helpers.designs_project_issue_path(design.project, design.issue, { vueroute: design.filename })
@@ -392,47 +434,26 @@ RSpec.describe DesignManagement::Design do
end
describe '#user_notes_count', :use_clean_rails_memory_store_caching do
- let_it_be(:design) { create(:design, :with_file) }
-
- subject { design.user_notes_count }
-
# Note: Cache invalidation tests are in `design_user_notes_count_service_spec.rb`
-
it 'returns a count of user-generated notes' do
- create(:diff_note_on_design, noteable: design)
-
- is_expected.to eq(1)
- end
-
- it 'does not count notes on other designs' do
- second_design = create(:design, :with_file)
- create(:diff_note_on_design, noteable: second_design)
+ common_attrs = { issue: issue, project: issue.project, author: issue.project.creator }
+ design, second_design = create_list(:design, 2, :with_file, issue: issue)
+ create(:diff_note_on_design, **common_attrs, noteable: design)
+ create(:diff_note_on_design, **common_attrs, system: true, noteable: design)
+ create(:diff_note_on_design, **common_attrs, noteable: second_design)
- is_expected.to eq(0)
- end
-
- it 'does not count system notes' do
- create(:diff_note_on_design, system: true, noteable: design)
-
- is_expected.to eq(0)
+ expect(design.user_notes_count).to eq(1)
end
end
describe '#after_note_changed' do
- subject { build(:design) }
+ it 'calls #delete_cache on DesignUserNotesCountService for non-system notes' do
+ design = design1
- it 'calls #delete_cache on DesignUserNotesCountService' do
- expect_next_instance_of(DesignManagement::DesignUserNotesCountService) do |service|
- expect(service).to receive(:delete_cache)
- end
+ expect(design.send(:user_notes_count_service)).to receive(:delete_cache).once
- subject.after_note_changed(build(:note))
- end
-
- it 'does not call #delete_cache on DesignUserNotesCountService when passed a system note' do
- expect(DesignManagement::DesignUserNotesCountService).not_to receive(:new)
-
- subject.after_note_changed(build(:note, :system))
+ design.after_note_changed(build(:note, project: issue.project))
+ design.after_note_changed(build(:note, :system, project: issue.project))
end
end
@@ -516,14 +537,14 @@ RSpec.describe DesignManagement::Design do
with_them do
let(:filename) { "my-file.#{ext}" }
- let(:design) { build(:design, filename: filename) }
+ let(:design) { build(:design, issue: issue, filename: filename) }
let(:url) { url_for_design(design) }
let(:captures) { described_class.link_reference_pattern.match(url)&.named_captures }
it 'matches the URL' do
expect(captures).to include(
'url_filename' => filename,
- 'issue' => design.issue.iid.to_s,
+ 'issue' => issue.iid.to_s,
'namespace' => design.project.namespace.to_param,
'project' => design.project.name
)
@@ -565,4 +586,25 @@ RSpec.describe DesignManagement::Design do
end
end
end
+
+ describe '#immediately_before' do
+ let_it_be(:design) { create(:design, issue: issue, relative_position: 100) }
+ let_it_be(:next_design) { create(:design, issue: issue, relative_position: 200) }
+
+ it 'is true when there is no element positioned between this item and the next' do
+ expect(design.immediately_before?(next_design)).to be true
+ end
+
+ it 'is false when there is an element positioned between this item and the next' do
+ create(:design, issue: issue, relative_position: 150)
+
+ expect(design.immediately_before?(next_design)).to be false
+ end
+
+ it 'is false when the next design is to the left of this design' do
+ further_left = create(:design, issue: issue, relative_position: 50)
+
+ expect(design.immediately_before?(further_left)).to be false
+ end
+ end
end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index c449a3c3c47..2696d144db4 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
it { is_expected.to have_many(:deployments) }
it { is_expected.to have_many(:metrics_dashboard_annotations) }
it { is_expected.to have_many(:alert_management_alerts) }
+ it { is_expected.to have_one(:latest_opened_most_severe_alert) }
it { is_expected.to delegate_method(:stop_action).to(:last_deployment) }
it { is_expected.to delegate_method(:manual_actions).to(:last_deployment) }
@@ -1347,4 +1348,27 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
expect(project.environments.count_by_state).to eq({ stopped: 0, available: 0 })
end
end
+
+ describe '#has_opened_alert?' do
+ subject { environment.has_opened_alert? }
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:environment, reload: true) { create(:environment, project: project) }
+
+ context 'when environment has an triggered alert' do
+ let!(:alert) { create(:alert_management_alert, :triggered, project: project, environment: environment) }
+
+ it { is_expected.to be(true) }
+ end
+
+ context 'when environment has an resolved alert' do
+ let!(:alert) { create(:alert_management_alert, :resolved, project: project, environment: environment) }
+
+ it { is_expected.to be(false) }
+ end
+
+ context 'when environment does not have an alert' do
+ it { is_expected.to be(false) }
+ end
+ end
end
diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb
index 96baeab6809..015a86cb28b 100644
--- a/spec/models/event_spec.rb
+++ b/spec/models/event_spec.rb
@@ -111,6 +111,45 @@ RSpec.describe Event do
expect(found).not_to include(false_positive)
end
end
+
+ describe '.for_fingerprint' do
+ let_it_be(:with_fingerprint) { create(:event, fingerprint: 'aaa') }
+
+ before_all do
+ create(:event)
+ create(:event, fingerprint: 'bbb')
+ end
+
+ it 'returns none if there is no fingerprint' do
+ expect(described_class.for_fingerprint(nil)).to be_empty
+ expect(described_class.for_fingerprint('')).to be_empty
+ end
+
+ it 'returns none if there is no match' do
+ expect(described_class.for_fingerprint('not-found')).to be_empty
+ end
+
+ it 'can find a given event' do
+ expect(described_class.for_fingerprint(with_fingerprint.fingerprint))
+ .to contain_exactly(with_fingerprint)
+ end
+ end
+ end
+
+ describe '#fingerprint' do
+ it 'is unique scoped to target' do
+ issue = create(:issue)
+ mr = create(:merge_request)
+
+ expect { create_list(:event, 2, target: issue, fingerprint: '1234') }
+ .to raise_error(include('fingerprint'))
+
+ expect do
+ create(:event, target: mr, fingerprint: 'abcd')
+ create(:event, target: issue, fingerprint: 'abcd')
+ create(:event, target: issue, fingerprint: 'efgh')
+ end.not_to raise_error
+ end
end
describe "Push event" do
diff --git a/spec/models/experiment_spec.rb b/spec/models/experiment_spec.rb
new file mode 100644
index 00000000000..64cd2da4621
--- /dev/null
+++ b/spec/models/experiment_spec.rb
@@ -0,0 +1,112 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Experiment do
+ subject { build(:experiment) }
+
+ describe 'associations' do
+ it { is_expected.to have_many(:experiment_users) }
+ it { is_expected.to have_many(:users) }
+ it { is_expected.to have_many(:control_group_users) }
+ it { is_expected.to have_many(:experimental_group_users) }
+
+ describe 'control_group_users and experimental_group_users' do
+ let(:experiment) { create(:experiment) }
+ let(:control_group_user) { build(:user) }
+ let(:experimental_group_user) { build(:user) }
+
+ before do
+ experiment.control_group_users << control_group_user
+ experiment.experimental_group_users << experimental_group_user
+ end
+
+ describe 'control_group_users' do
+ subject { experiment.control_group_users }
+
+ it { is_expected.to contain_exactly(control_group_user) }
+ end
+
+ describe 'experimental_group_users' do
+ subject { experiment.experimental_group_users }
+
+ it { is_expected.to contain_exactly(experimental_group_user) }
+ end
+ end
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_uniqueness_of(:name) }
+ it { is_expected.to validate_length_of(:name).is_at_most(255) }
+ end
+
+ describe '.add_user' do
+ let(:name) { :experiment_key }
+ let(:user) { build(:user) }
+
+ let!(:experiment) { create(:experiment, name: name) }
+
+ subject { described_class.add_user(name, :control, user) }
+
+ describe 'creating a new experiment record' do
+ context 'an experiment with the provided name already exists' do
+ it 'does not create a new experiment record' do
+ expect { subject }.not_to change(Experiment, :count)
+ end
+ end
+
+ context 'an experiment with the provided name does not exist yet' do
+ let(:experiment) { nil }
+
+ it 'creates a new experiment record' do
+ expect { subject }.to change(Experiment, :count).by(1)
+ end
+ end
+ end
+
+ describe 'creating a new experiment_user record' do
+ context 'an experiment_user record for this experiment already exists' do
+ before do
+ subject
+ end
+
+ it 'does not create a new experiment_user record' do
+ expect { subject }.not_to change(ExperimentUser, :count)
+ end
+ end
+
+ context 'an experiment_user record for this experiment does not exist yet' do
+ it 'creates a new experiment_user record' do
+ expect { subject }.to change(ExperimentUser, :count).by(1)
+ end
+
+ it 'assigns the correct group_type to the experiment_user' do
+ expect { subject }.to change { experiment.control_group_users.count }.by(1)
+ end
+ end
+ end
+ end
+
+ describe '#add_control_user' do
+ let(:experiment) { create(:experiment) }
+ let(:user) { build(:user) }
+
+ subject { experiment.add_control_user(user) }
+
+ it 'creates a new experiment_user record and assigns the correct group_type' do
+ expect { subject }.to change { experiment.control_group_users.count }.by(1)
+ end
+ end
+
+ describe '#add_experimental_user' do
+ let(:experiment) { create(:experiment) }
+ let(:user) { build(:user) }
+
+ subject { experiment.add_experimental_user(user) }
+
+ it 'creates a new experiment_user record and assigns the correct group_type' do
+ expect { subject }.to change { experiment.experimental_group_users.count }.by(1)
+ end
+ end
+end
diff --git a/spec/models/experiment_user_spec.rb b/spec/models/experiment_user_spec.rb
new file mode 100644
index 00000000000..9201529b145
--- /dev/null
+++ b/spec/models/experiment_user_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ExperimentUser do
+ describe 'Associations' do
+ it { is_expected.to belong_to(:experiment) }
+ it { is_expected.to belong_to(:user) }
+ end
+
+ describe 'Validations' do
+ it { is_expected.to validate_presence_of(:group_type) }
+ end
+end
diff --git a/spec/models/group_deploy_key_spec.rb b/spec/models/group_deploy_key_spec.rb
index 3fe71cc4699..6757c5534ce 100644
--- a/spec/models/group_deploy_key_spec.rb
+++ b/spec/models/group_deploy_key_spec.rb
@@ -4,8 +4,82 @@ require 'spec_helper'
RSpec.describe GroupDeployKey do
it { is_expected.to validate_presence_of(:user) }
+ it { is_expected.to belong_to(:user) }
+ it { is_expected.to have_many(:groups) }
+
+ let_it_be(:group_deploy_key) { create(:group_deploy_key) }
+ let_it_be(:group) { create(:group) }
it 'is of type DeployKey' do
expect(build(:group_deploy_key).type).to eq('DeployKey')
end
+
+ describe '#group_deploy_keys_group_for' do
+ subject { group_deploy_key.group_deploy_keys_group_for(group) }
+
+ context 'when this group deploy key is linked to a given group' do
+ it 'returns the relevant group_deploy_keys_group association' do
+ group_deploy_keys_group = create(:group_deploy_keys_group, group: group, group_deploy_key: group_deploy_key)
+
+ expect(subject).to eq(group_deploy_keys_group)
+ end
+ end
+
+ context 'when this group deploy key is not linked to a given group' do
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#can_be_edited_for' do
+ let_it_be(:user) { create(:user) }
+
+ subject { group_deploy_key.can_be_edited_for?(user, group) }
+
+ context 'when a given user has the :update_group_deploy_key permission for that key' do
+ it 'is true' do
+ allow(Ability).to receive(:allowed?).with(user, :update_group_deploy_key, group_deploy_key).and_return(true)
+
+ expect(subject).to be_truthy
+ end
+ end
+
+ context 'when a given user does not have the :update_group_deploy_key permission for that key' do
+ before do
+ allow(Ability).to receive(:allowed?).with(user, :update_group_deploy_key, group_deploy_key).and_return(false)
+ end
+
+ it 'is true when this user has the :update_group_deploy_key_for_group permission for this group' do
+ allow(Ability).to receive(:allowed?).with(user, :update_group_deploy_key_for_group, group_deploy_key.group_deploy_keys_group_for(group)).and_return(true)
+
+ expect(subject).to be_truthy
+ end
+
+ it 'is false when this user does not have the :update_group_deploy_key_for_group permission for this group' do
+ allow(Ability).to receive(:allowed?).with(user, :update_group_deploy_key_for_group, group_deploy_key.group_deploy_keys_group_for(group)).and_return(false)
+
+ expect(subject).to be_falsey
+ end
+ end
+ end
+
+ describe '#group_deploy_keys_groups_for_user' do
+ let_it_be(:user) { create(:user) }
+
+ context 'when a group has a group deploy key' do
+ let_it_be(:expected_association) { create(:group_deploy_keys_group, group: group, group_deploy_key: group_deploy_key) }
+
+ it 'returns the related group_deploy_keys_group association when the user can read the group' do
+ allow(Ability).to receive(:allowed?).with(user, :read_group, group).and_return(true)
+
+ expect(group_deploy_key.group_deploy_keys_groups_for_user(user))
+ .to contain_exactly(expected_association)
+ end
+
+ it 'does not return the related group_deploy_keys_group association when the user cannot read the group' do
+ allow(Ability).to receive(:allowed?).with(user, :read_group, group).and_return(false)
+
+ expect(group_deploy_key.group_deploy_keys_groups_for_user(user)).to be_empty
+ end
+ end
+ end
end
diff --git a/spec/models/group_deploy_keys_group_spec.rb b/spec/models/group_deploy_keys_group_spec.rb
new file mode 100644
index 00000000000..31e5627a093
--- /dev/null
+++ b/spec/models/group_deploy_keys_group_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GroupDeployKeysGroup do
+ describe "Associations" do
+ it { is_expected.to belong_to(:group_deploy_key) }
+ it { is_expected.to belong_to(:group) }
+ end
+
+ describe "Validation" do
+ it { is_expected.to validate_presence_of(:group_id) }
+ it { is_expected.to validate_presence_of(:group_deploy_key) }
+ end
+end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 4184f2d07cc..3eb74da09e1 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -26,6 +26,7 @@ RSpec.describe Group do
it { is_expected.to have_many(:container_repositories) }
it { is_expected.to have_many(:milestones) }
it { is_expected.to have_many(:iterations) }
+ it { is_expected.to have_many(:group_deploy_keys) }
describe '#members & #requesters' do
let(:requester) { create(:user) }
@@ -1540,4 +1541,48 @@ RSpec.describe Group do
end
end
end
+
+ describe '#default_owner' do
+ let(:group) { build(:group) }
+
+ context 'the group has owners' do
+ before do
+ group.add_owner(create(:user))
+ group.add_owner(create(:user))
+ end
+
+ it 'is the first owner' do
+ expect(group.default_owner)
+ .to eq(group.owners.first)
+ .and be_a(User)
+ end
+ end
+
+ context 'the group has a parent' do
+ let(:parent) { build(:group) }
+
+ before do
+ group.parent = parent
+ parent.add_owner(create(:user))
+ end
+
+ it 'is the first owner of the parent' do
+ expect(group.default_owner)
+ .to eq(parent.default_owner)
+ .and be_a(User)
+ end
+ end
+
+ context 'we fallback to group.owner' do
+ before do
+ group.owner = build(:user)
+ end
+
+ it 'is the group.owner' do
+ expect(group.default_owner)
+ .to eq(group.owner)
+ .and be_a(User)
+ end
+ end
+ end
end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 80041d2e859..59634524e74 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -5,10 +5,14 @@ require 'spec_helper'
RSpec.describe Issue do
include ExternalAuthorizationServiceHelpers
+ let_it_be(:user) { create(:user) }
+ let_it_be(:reusable_project) { create(:project) }
+
describe "Associations" do
it { is_expected.to belong_to(:milestone) }
it { is_expected.to belong_to(:iteration) }
it { is_expected.to belong_to(:project) }
+ it { is_expected.to have_one(:namespace).through(:project) }
it { is_expected.to belong_to(:moved_to).class_name('Issue') }
it { is_expected.to have_one(:moved_from).class_name('Issue') }
it { is_expected.to belong_to(:duplicated_to).class_name('Issue') }
@@ -55,6 +59,26 @@ RSpec.describe Issue do
end
end
+ describe 'validations' do
+ subject { issue.valid? }
+
+ describe 'issue_type' do
+ let(:issue) { build(:issue, issue_type: issue_type) }
+
+ context 'when a valid type' do
+ let(:issue_type) { :issue }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'empty type' do
+ let(:issue_type) { nil }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+ end
+
subject { create(:issue) }
describe 'callbacks' do
@@ -105,14 +129,30 @@ RSpec.describe Issue do
end
end
+ describe '.with_issue_type' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:incident) { create(:incident, project: project) }
+
+ it 'gives issues with the given issue type' do
+ expect(described_class.with_issue_type('issue'))
+ .to contain_exactly(issue)
+ end
+
+ it 'gives issues with the given issue type' do
+ expect(described_class.with_issue_type(%w(issue incident)))
+ .to contain_exactly(issue, incident)
+ end
+ end
+
describe '#order_by_position_and_priority' do
- let(:project) { create :project }
+ let(:project) { reusable_project }
let(:p1) { create(:label, title: 'P1', project: project, priority: 1) }
let(:p2) { create(:label, title: 'P2', project: project, priority: 2) }
let!(:issue1) { create(:labeled_issue, project: project, labels: [p1]) }
let!(:issue2) { create(:labeled_issue, project: project, labels: [p2]) }
- let!(:issue3) { create(:issue, project: project, relative_position: 100) }
- let!(:issue4) { create(:issue, project: project, relative_position: 200) }
+ let!(:issue3) { create(:issue, project: project, relative_position: -200) }
+ let!(:issue4) { create(:issue, project: project, relative_position: -100) }
it 'returns ordered list' do
expect(project.issues.order_by_position_and_priority)
@@ -121,10 +161,10 @@ RSpec.describe Issue do
end
describe '#sort' do
- let(:project) { create(:project) }
+ let(:project) { reusable_project }
context "by relative_position" do
- let!(:issue) { create(:issue, project: project) }
+ let!(:issue) { create(:issue, project: project, relative_position: nil) }
let!(:issue2) { create(:issue, project: project, relative_position: 2) }
let!(:issue3) { create(:issue, project: project, relative_position: 1) }
@@ -166,39 +206,9 @@ RSpec.describe Issue do
expect { issue.close }.to change { issue.state_id }.from(open_state).to(closed_state)
end
-
- context 'when there is an associated Alert Management Alert' do
- context 'when alert can be resolved' do
- let!(:alert) { create(:alert_management_alert, project: issue.project, issue: issue) }
-
- it 'resolves an alert' do
- expect { issue.close }.to change { alert.reload.resolved? }.to(true)
- end
- end
-
- context 'when alert cannot be resolved' do
- let!(:alert) { create(:alert_management_alert, :with_validation_errors, project: issue.project, issue: issue) }
-
- before do
- allow(Gitlab::AppLogger).to receive(:warn).and_call_original
- end
-
- it 'writes a warning into the log' do
- issue.close
-
- expect(Gitlab::AppLogger).to have_received(:warn).with(
- message: 'Cannot resolve an associated Alert Management alert',
- issue_id: issue.id,
- alert_id: alert.id,
- alert_errors: { hosts: ['hosts array is over 255 chars'] }
- )
- end
- end
- end
end
describe '#reopen' do
- let(:user) { create(:user) }
let(:issue) { create(:issue, state: 'closed', closed_at: Time.current, closed_by: user) }
it 'sets closed_at to nil when an issue is reopend' do
@@ -282,7 +292,6 @@ RSpec.describe Issue do
end
describe '#assignee_or_author?' do
- let(:user) { create(:user) }
let(:issue) { create(:issue) }
it 'returns true for a user that is assigned to an issue' do
@@ -303,7 +312,6 @@ RSpec.describe Issue do
end
describe '#can_move?' do
- let(:user) { create(:user) }
let(:issue) { create(:issue) }
subject { issue.can_move?(user) }
@@ -1020,7 +1028,7 @@ RSpec.describe Issue do
context "relative positioning" do
it_behaves_like "a class that supports relative positioning" do
- let(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
let(:factory) { :issue }
let(:default_params) { { project: project } }
end
diff --git a/spec/models/iteration_spec.rb b/spec/models/iteration_spec.rb
index ef638330208..5c684fa9771 100644
--- a/spec/models/iteration_spec.rb
+++ b/spec/models/iteration_spec.rb
@@ -8,11 +8,11 @@ RSpec.describe Iteration do
describe "#iid" do
it "is properly scoped on project and group" do
- iteration1 = create(:iteration, project: project)
- iteration2 = create(:iteration, project: project)
+ iteration1 = create(:iteration, :skip_project_validation, project: project)
+ iteration2 = create(:iteration, :skip_project_validation, project: project)
iteration3 = create(:iteration, group: group)
iteration4 = create(:iteration, group: group)
- iteration5 = create(:iteration, project: project)
+ iteration5 = create(:iteration, :skip_project_validation, project: project)
want = {
iteration1: 1,
@@ -35,6 +35,15 @@ RSpec.describe Iteration do
context 'Validations' do
subject { build(:iteration, group: group, start_date: start_date, due_date: due_date) }
+ describe '#not_belonging_to_project' do
+ subject { build(:iteration, project: project, start_date: Time.current, due_date: 1.day.from_now) }
+
+ it 'is invalid' do
+ expect(subject).not_to be_valid
+ expect(subject.errors[:project_id]).to include('is not allowed. We do not currently support project-level iterations')
+ end
+ end
+
describe '#dates_do_not_overlap' do
let_it_be(:existing_iteration) { create(:iteration, group: group, start_date: 4.days.from_now, due_date: 1.week.from_now) }
@@ -54,7 +63,10 @@ RSpec.describe Iteration do
end
context 'when dates overlap' do
- context 'same group' do
+ let(:start_date) { 5.days.from_now }
+ let(:due_date) { 6.days.from_now }
+
+ shared_examples_for 'overlapping dates' do
context 'when start_date is in range' do
let(:start_date) { 5.days.from_now }
let(:due_date) { 3.weeks.from_now }
@@ -63,6 +75,11 @@ RSpec.describe Iteration do
expect(subject).not_to be_valid
expect(subject.errors[:base]).to include('Dates cannot overlap with other existing Iterations')
end
+
+ it 'is not valid even if forced' do
+ subject.validate # to generate iid/etc
+ expect { subject.save!(validate: false) }.to raise_exception(ActiveRecord::StatementInvalid, /#{constraint_name}/)
+ end
end
context 'when end_date is in range' do
@@ -73,25 +90,72 @@ RSpec.describe Iteration do
expect(subject).not_to be_valid
expect(subject.errors[:base]).to include('Dates cannot overlap with other existing Iterations')
end
+
+ it 'is not valid even if forced' do
+ subject.validate # to generate iid/etc
+ expect { subject.save!(validate: false) }.to raise_exception(ActiveRecord::StatementInvalid, /#{constraint_name}/)
+ end
end
context 'when both overlap' do
- let(:start_date) { 5.days.from_now }
- let(:due_date) { 6.days.from_now }
-
it 'is not valid' do
expect(subject).not_to be_valid
expect(subject.errors[:base]).to include('Dates cannot overlap with other existing Iterations')
end
+
+ it 'is not valid even if forced' do
+ subject.validate # to generate iid/etc
+ expect { subject.save!(validate: false) }.to raise_exception(ActiveRecord::StatementInvalid, /#{constraint_name}/)
+ end
end
end
- context 'different group' do
- let(:start_date) { 5.days.from_now }
- let(:due_date) { 6.days.from_now }
- let(:group) { create(:group) }
+ context 'group' do
+ it_behaves_like 'overlapping dates' do
+ let(:constraint_name) { 'iteration_start_and_due_daterange_group_id_constraint' }
+ end
+
+ context 'different group' do
+ let(:group) { create(:group) }
- it { is_expected.to be_valid }
+ it { is_expected.to be_valid }
+
+ it 'does not trigger exclusion constraints' do
+ expect { subject.save! }.not_to raise_exception
+ end
+ end
+ end
+
+ context 'project' do
+ let_it_be(:existing_iteration) { create(:iteration, :skip_project_validation, project: project, start_date: 4.days.from_now, due_date: 1.week.from_now) }
+
+ subject { build(:iteration, :skip_project_validation, project: project, start_date: start_date, due_date: due_date) }
+
+ it_behaves_like 'overlapping dates' do
+ let(:constraint_name) { 'iteration_start_and_due_daterange_project_id_constraint' }
+ end
+
+ context 'different project' do
+ let(:project) { create(:project) }
+
+ it { is_expected.to be_valid }
+
+ it 'does not trigger exclusion constraints' do
+ expect { subject.save! }.not_to raise_exception
+ end
+ end
+
+ context 'in a group' do
+ let(:group) { create(:group) }
+
+ subject { build(:iteration, group: group, start_date: start_date, due_date: due_date) }
+
+ it { is_expected.to be_valid }
+
+ it 'does not trigger exclusion constraints' do
+ expect { subject.save! }.not_to raise_exception
+ end
+ end
end
end
end
@@ -148,9 +212,9 @@ RSpec.describe Iteration do
context 'time scopes' do
let_it_be(:project) { create(:project, :empty_repo) }
- let_it_be(:iteration_1) { create(:iteration, :skip_future_date_validation, project: project, start_date: 3.days.ago, due_date: 1.day.from_now) }
- let_it_be(:iteration_2) { create(:iteration, :skip_future_date_validation, project: project, start_date: 10.days.ago, due_date: 4.days.ago) }
- let_it_be(:iteration_3) { create(:iteration, project: project, start_date: 4.days.from_now, due_date: 1.week.from_now) }
+ let_it_be(:iteration_1) { create(:iteration, :skip_future_date_validation, :skip_project_validation, project: project, start_date: 3.days.ago, due_date: 1.day.from_now) }
+ let_it_be(:iteration_2) { create(:iteration, :skip_future_date_validation, :skip_project_validation, project: project, start_date: 10.days.ago, due_date: 4.days.ago) }
+ let_it_be(:iteration_3) { create(:iteration, :skip_project_validation, project: project, start_date: 4.days.from_now, due_date: 1.week.from_now) }
describe 'start_date_passed' do
it 'returns iterations where start_date is in the past but due_date is in the future' do
@@ -168,9 +232,9 @@ RSpec.describe Iteration do
describe '.within_timeframe' do
let_it_be(:now) { Time.current }
let_it_be(:project) { create(:project, :empty_repo) }
- let_it_be(:iteration_1) { create(:iteration, project: project, start_date: now, due_date: 1.day.from_now) }
- let_it_be(:iteration_2) { create(:iteration, project: project, start_date: 2.days.from_now, due_date: 3.days.from_now) }
- let_it_be(:iteration_3) { create(:iteration, project: project, start_date: 4.days.from_now, due_date: 1.week.from_now) }
+ let_it_be(:iteration_1) { create(:iteration, :skip_project_validation, project: project, start_date: now, due_date: 1.day.from_now) }
+ let_it_be(:iteration_2) { create(:iteration, :skip_project_validation, project: project, start_date: 2.days.from_now, due_date: 3.days.from_now) }
+ let_it_be(:iteration_3) { create(:iteration, :skip_project_validation, project: project, start_date: 4.days.from_now, due_date: 1.week.from_now) }
it 'returns iterations with start_date and/or end_date between timeframe' do
iterations = described_class.within_timeframe(2.days.from_now, 3.days.from_now)
diff --git a/spec/models/lfs_object_spec.rb b/spec/models/lfs_object_spec.rb
index 36d45f17392..a0f633218b0 100644
--- a/spec/models/lfs_object_spec.rb
+++ b/spec/models/lfs_object_spec.rb
@@ -152,14 +152,10 @@ RSpec.describe LfsObject do
end
describe 'file is being stored' do
- let(:lfs_object) { create(:lfs_object, :with_file) }
+ subject { create(:lfs_object, :with_file) }
context 'when existing object has local store' do
- it 'is stored locally' do
- expect(lfs_object.file_store).to be(ObjectStorage::Store::LOCAL)
- expect(lfs_object.file).to be_file_storage
- expect(lfs_object.file.object_store).to eq(ObjectStorage::Store::LOCAL)
- end
+ it_behaves_like 'mounted file in local store'
end
context 'when direct upload is enabled' do
@@ -167,13 +163,7 @@ RSpec.describe LfsObject do
stub_lfs_object_storage(direct_upload: true)
end
- context 'when file is stored' do
- it 'is stored remotely' do
- expect(lfs_object.file_store).to eq(ObjectStorage::Store::REMOTE)
- expect(lfs_object.file).not_to be_file_storage
- expect(lfs_object.file.object_store).to eq(ObjectStorage::Store::REMOTE)
- end
- end
+ it_behaves_like 'mounted file in object store'
end
end
end
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index f155c240fb2..a3ed39abfb3 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -113,9 +113,10 @@ RSpec.describe Member do
end
describe 'Scopes & finders' do
- before do
- project = create(:project, :public)
- group = create(:group)
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:group) { create(:group) }
+
+ before_all do
@owner_user = create(:user).tap { |u| group.add_owner(u) }
@owner = group.members.find_by(user_id: @owner_user.id)
@@ -252,9 +253,9 @@ RSpec.describe Member do
describe '.add_user' do
%w[project group].each do |source_type|
context "when source is a #{source_type}" do
- let!(:source) { create(source_type, :public) }
- let!(:user) { create(:user) }
- let!(:admin) { create(:admin) }
+ let_it_be(:source, reload: true) { create(source_type, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:admin) { create(:admin) }
it 'returns a <Source>Member object' do
member = described_class.add_user(source, user, :maintainer)
@@ -322,7 +323,7 @@ RSpec.describe Member do
it 'adds the user as a member' do
expect(source.users).not_to include(user)
- described_class.add_user(source, 42, :maintainer)
+ described_class.add_user(source, non_existing_record_id, :maintainer)
expect(source.users.reload).not_to include(user)
end
@@ -482,10 +483,10 @@ RSpec.describe Member do
describe '.add_users' do
%w[project group].each do |source_type|
context "when source is a #{source_type}" do
- let!(:source) { create(source_type, :public) }
- let!(:admin) { create(:admin) }
- let(:user1) { create(:user) }
- let(:user2) { create(:user) }
+ let_it_be(:source) { create(source_type, :public) }
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:user2) { create(:user) }
it 'returns a <Source>Member objects' do
members = described_class.add_users(source, [user1, user2], :maintainer)
diff --git a/spec/models/merge_request/metrics_spec.rb b/spec/models/merge_request/metrics_spec.rb
index 4d9e768ecc6..82402b95597 100644
--- a/spec/models/merge_request/metrics_spec.rb
+++ b/spec/models/merge_request/metrics_spec.rb
@@ -8,4 +8,44 @@ RSpec.describe MergeRequest::Metrics do
it { is_expected.to belong_to(:latest_closed_by).class_name('User') }
it { is_expected.to belong_to(:merged_by).class_name('User') }
end
+
+ it 'sets `target_project_id` before save' do
+ merge_request = create(:merge_request)
+ metrics = merge_request.metrics
+
+ metrics.update_column(:target_project_id, nil)
+
+ metrics.save!
+
+ expect(metrics.target_project_id).to eq(merge_request.target_project_id)
+ end
+
+ describe 'scopes' do
+ let_it_be(:metrics_1) { create(:merge_request).metrics.tap { |m| m.update!(merged_at: 10.days.ago) } }
+ let_it_be(:metrics_2) { create(:merge_request).metrics.tap { |m| m.update!(merged_at: 5.days.ago) } }
+
+ describe '.merged_after' do
+ subject { described_class.merged_after(7.days.ago) }
+
+ it 'finds the record' do
+ is_expected.to eq([metrics_2])
+ end
+
+ it "doesn't include record outside of the filter" do
+ is_expected.not_to include([metrics_1])
+ end
+ end
+
+ describe '.merged_before' do
+ subject { described_class.merged_before(7.days.ago) }
+
+ it 'finds the record' do
+ is_expected.to eq([metrics_1])
+ end
+
+ it "doesn't include record outside of the filter" do
+ is_expected.not_to include([metrics_2])
+ end
+ end
+ end
end
diff --git a/spec/models/merge_request_diff_commit_spec.rb b/spec/models/merge_request_diff_commit_spec.rb
index 5ea0145e60f..84fdfae1ed1 100644
--- a/spec/models/merge_request_diff_commit_spec.rb
+++ b/spec/models/merge_request_diff_commit_spec.rb
@@ -30,6 +30,7 @@ RSpec.describe MergeRequestDiffCommit do
project.commit('570e7b2abdd848b95f2f578043fc23bd6f6fd24d')
]
end
+
let(:rows) do
[
{
@@ -73,6 +74,7 @@ RSpec.describe MergeRequestDiffCommit do
# This commit's date is "Sun Aug 17 07:12:55 292278994 +0000"
[project.commit('ba3343bc4fa403a8dfbfcab7fc1a8c29ee34bd69')]
end
+
let(:timestamp) { Time.zone.at((1 << 31) - 1) }
let(:rows) do
[{
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index d153ccedf8c..e02c71a1c6f 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -103,6 +103,8 @@ RSpec.describe MergeRequestDiff do
it 'ignores diffs with 0 files' do
MergeRequestDiffFile.where(merge_request_diff_id: [closed_recently.id, merged_recently.id]).delete_all
+ closed_recently.update!(files_count: 0)
+ merged_recently.update!(files_count: 0)
is_expected.to contain_exactly(outdated.id, latest.id, closed.id, merged.id)
end
@@ -672,6 +674,12 @@ RSpec.describe MergeRequestDiff do
end
end
+ describe '#files_count' do
+ it 'returns number of diff files' do
+ expect(diff_with_commits.files_count).to eq(diff_with_commits.merge_request_diff_files.count)
+ end
+ end
+
describe '#first_commit' do
it 'returns first commit' do
expect(diff_with_commits.first_commit.sha).to eq(diff_with_commits.merge_request_diff_commits.last.sha)
@@ -721,10 +729,12 @@ RSpec.describe MergeRequestDiff do
describe '#modified_paths' do
subject do
- diff = create(:merge_request_diff)
- create(:merge_request_diff_file, :new_file, merge_request_diff: diff)
- create(:merge_request_diff_file, :renamed_file, merge_request_diff: diff)
- diff
+ create(:merge_request_diff).tap do |diff|
+ create(:merge_request_diff_file, :new_file, merge_request_diff: diff)
+ create(:merge_request_diff_file, :renamed_file, merge_request_diff: diff)
+
+ diff.merge_request_diff_files.reset
+ end
end
it 'returns affected file paths' do
@@ -735,12 +745,6 @@ RSpec.describe MergeRequestDiff do
let(:merge_request) { create(:merge_request, source_branch: 'feature', target_branch: 'master') }
let(:diff) { merge_request.merge_request_diff }
- # before do
- # # Temporarily unstub diff.modified_paths in favor of original code
- # #
- # allow(diff).to receive(:modified_paths).and_call_original
- # end
-
context "when the merge_request_diff is overflowed" do
before do
expect(diff).to receive(:overflow?).and_return(true)
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 06febddef0c..6edef54b153 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -63,6 +63,7 @@ RSpec.describe MergeRequest do
subject.source_project.repository.path
end
end
+
let(:squash_path) { File.join(repo_path, "gitlab-worktree", "squash-#{subject.id}") }
before do
@@ -247,24 +248,20 @@ RSpec.describe MergeRequest do
describe 'callbacks' do
describe '#ensure_merge_request_metrics' do
- it 'creates metrics after saving' do
- merge_request = create(:merge_request)
+ let(:merge_request) { create(:merge_request) }
+ it 'creates metrics after saving' do
expect(merge_request.metrics).to be_persisted
expect(MergeRequest::Metrics.count).to eq(1)
end
it 'does not duplicate metrics for a merge request' do
- merge_request = create(:merge_request)
-
merge_request.mark_as_merged!
expect(MergeRequest::Metrics.count).to eq(1)
end
it 'does not create duplicated metrics records when MR is concurrently updated' do
- merge_request = create(:merge_request)
-
merge_request.metrics.destroy
instance1 = MergeRequest.find(merge_request.id)
@@ -276,6 +273,27 @@ RSpec.describe MergeRequest do
metrics_records = MergeRequest::Metrics.where(merge_request_id: merge_request.id)
expect(metrics_records.size).to eq(1)
end
+
+ it 'syncs the `target_project_id` to the metrics record' do
+ project = create(:project)
+
+ merge_request.update!(target_project: project, state: :closed)
+
+ expect(merge_request.target_project_id).to eq(project.id)
+ expect(merge_request.target_project_id).to eq(merge_request.metrics.target_project_id)
+ end
+
+ context 'when metrics record already exists with NULL target_project_id' do
+ before do
+ merge_request.metrics.update_column(:target_project_id, nil)
+ end
+
+ it 'returns the metrics record' do
+ metrics_record = merge_request.ensure_metrics
+
+ expect(metrics_record).to be_persisted
+ end
+ end
end
end
@@ -725,6 +743,7 @@ RSpec.describe MergeRequest do
let!(:diff_note) do
create(:diff_note_on_merge_request, project: project, noteable: merge_request)
end
+
let!(:draft_note) do
create(:draft_note_on_text_diff, author: user, merge_request: merge_request)
end
@@ -3696,6 +3715,7 @@ RSpec.describe MergeRequest do
source_branch: 'fixes',
target_project: target_project)
end
+
let(:user) { create(:user) }
before do
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index ad4c8448745..4ef2ddd218a 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -507,6 +507,7 @@ RSpec.describe Namespace do
Gitlab.config.repositories.storages.default.legacy_disk_path
end
end
+
let(:path_in_dir) { File.join(repository_storage_path, namespace.full_path) }
let(:deleted_path) { namespace.full_path.gsub(namespace.path, "#{namespace.full_path}+#{namespace.id}+deleted") }
let(:deleted_path_in_dir) { File.join(repository_storage_path, deleted_path) }
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index e6e6a8c35cf..7edd7849bbe 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -471,6 +471,7 @@ RSpec.describe Note do
note: "added label #{private_label.to_reference(ext_proj)}",
system: true
end
+
let!(:system_note_metadata) { create(:system_note_metadata, note: note, action: :label) }
it_behaves_like "checks references"
@@ -1372,11 +1373,11 @@ RSpec.describe Note do
describe 'banzai_render_context' do
let(:project) { build(:project_empty_repo) }
+ subject(:context) { noteable.banzai_render_context(:title) }
+
context 'when noteable is a merge request' do
let(:noteable) { build :merge_request, target_project: project, source_project: project }
- subject(:context) { noteable.banzai_render_context(:title) }
-
it 'sets the label_url_method in the context' do
expect(context[:label_url_method]).to eq(:project_merge_requests_url)
end
@@ -1385,11 +1386,34 @@ RSpec.describe Note do
context 'when noteable is an issue' do
let(:noteable) { build :issue, project: project }
- subject(:context) { noteable.banzai_render_context(:title) }
-
it 'sets the label_url_method in the context' do
expect(context[:label_url_method]).to eq(:project_issues_url)
end
end
+
+ context 'when noteable is a personal snippet' do
+ let(:noteable) { build(:personal_snippet) }
+
+ it 'sets the parent user in the context' do
+ expect(context[:user]).to eq(noteable.author)
+ end
+ end
+ end
+
+ describe '#parent_user' do
+ it 'returns the author of a personal snippet' do
+ note = build(:note_on_personal_snippet)
+ expect(note.parent_user).to eq(note.noteable.author)
+ end
+
+ it 'returns nil for project snippet' do
+ note = build(:note_on_project_snippet)
+ expect(note.parent_user).to be_nil
+ end
+
+ it 'returns nil when noteable is not a snippet' do
+ note = build(:note_on_issue)
+ expect(note.parent_user).to be_nil
+ end
end
end
diff --git a/spec/models/notification_setting_spec.rb b/spec/models/notification_setting_spec.rb
index 67738eaec20..0f765d6b09b 100644
--- a/spec/models/notification_setting_spec.rb
+++ b/spec/models/notification_setting_spec.rb
@@ -118,6 +118,46 @@ RSpec.describe NotificationSetting do
expect(subject.event_enabled?(:foo_event)).to be(false)
end
end
+
+ describe 'for failed_pipeline' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:column, :expected) do
+ nil | true
+ true | true
+ false | false
+ end
+
+ with_them do
+ before do
+ subject.update!(failed_pipeline: column)
+ end
+
+ it do
+ expect(subject.event_enabled?(:failed_pipeline)).to eq(expected)
+ end
+ end
+ end
+
+ describe 'for fixed_pipeline' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:column, :expected) do
+ nil | true
+ true | true
+ false | false
+ end
+
+ with_them do
+ before do
+ subject.update!(fixed_pipeline: column)
+ end
+
+ it do
+ expect(subject.event_enabled?(:fixed_pipeline)).to eq(expected)
+ end
+ end
+ end
end
describe '.email_events' do
@@ -138,7 +178,8 @@ RSpec.describe NotificationSetting do
:merge_merge_request,
:failed_pipeline,
:success_pipeline,
- :fixed_pipeline
+ :fixed_pipeline,
+ :moved_project
)
end
diff --git a/spec/models/packages/package_file_spec.rb b/spec/models/packages/package_file_spec.rb
index 7758ed4a500..7cc8e13d449 100644
--- a/spec/models/packages/package_file_spec.rb
+++ b/spec/models/packages/package_file_spec.rb
@@ -32,11 +32,17 @@ RSpec.describe Packages::PackageFile, type: :model do
end
end
- it_behaves_like 'UpdateProjectStatistics' do
- subject { build(:package_file, :jar, size: 42) }
+ context 'updating project statistics' do
+ context 'when the package file has an explicit size' do
+ it_behaves_like 'UpdateProjectStatistics' do
+ subject { build(:package_file, :jar, size: 42) }
+ end
+ end
- before do
- allow_any_instance_of(Packages::PackageFileUploader).to receive(:size).and_return(42)
+ context 'when the package file does not have a size' do
+ it_behaves_like 'UpdateProjectStatistics' do
+ subject { build(:package_file, size: nil) }
+ end
end
end
@@ -52,7 +58,7 @@ RSpec.describe Packages::PackageFile, type: :model do
end
describe '#update_file_metadata callback' do
- let_it_be(:package_file) { build(:package_file, :nuget, file_store: nil, size: nil) }
+ let_it_be(:package_file) { build(:package_file, :nuget, size: nil) }
subject { package_file.save! }
@@ -61,9 +67,14 @@ RSpec.describe Packages::PackageFile, type: :model do
.to receive(:update_file_metadata)
.and_call_original
- expect { subject }
- .to change { package_file.file_store }.from(nil).to(::Packages::PackageFileUploader::Store::LOCAL)
- .and change { package_file.size }.from(nil).to(3513)
+ # This expectation uses a stub because we can no longer test a change from
+ # `nil` to `1`, because the field is no longer nullable, and it defaults
+ # to `1`.
+ expect(package_file)
+ .to receive(:update_column)
+ .with(:file_store, ::Packages::PackageFileUploader::Store::LOCAL)
+
+ expect { subject }.to change { package_file.size }.from(nil).to(3513)
end
end
end
diff --git a/spec/models/pages_domain_spec.rb b/spec/models/pages_domain_spec.rb
index d283389e29e..78980f8cdab 100644
--- a/spec/models/pages_domain_spec.rb
+++ b/spec/models/pages_domain_spec.rb
@@ -328,9 +328,11 @@ RSpec.describe PagesDomain do
end
describe '#update_daemon' do
+ let_it_be(:project) { create(:project).tap(&:mark_pages_as_deployed) }
+
context 'when usage is serverless' do
it 'does not call the UpdatePagesConfigurationService' do
- expect(Projects::UpdatePagesConfigurationService).not_to receive(:new)
+ expect(PagesUpdateConfigurationWorker).not_to receive(:perform_async)
create(:pages_domain, usage: :serverless)
end
@@ -352,12 +354,30 @@ RSpec.describe PagesDomain do
domain.destroy!
end
- it 'delegates to Projects::UpdatePagesConfigurationService' do
+ it 'delegates to Projects::UpdatePagesConfigurationService when not running async' do
+ stub_feature_flags(async_update_pages_config: false)
+
service = instance_double('Projects::UpdatePagesConfigurationService')
expect(Projects::UpdatePagesConfigurationService).to receive(:new) { service }
expect(service).to receive(:execute)
- create(:pages_domain)
+ create(:pages_domain, project: project)
+ end
+
+ it "schedules a PagesUpdateConfigurationWorker" do
+ expect(PagesUpdateConfigurationWorker).to receive(:perform_async).with(project.id)
+
+ create(:pages_domain, project: project)
+ end
+
+ context "when the pages aren't deployed" do
+ let_it_be(:project) { create(:project).tap(&:mark_pages_as_not_deployed) }
+
+ it "does not schedule a PagesUpdateConfigurationWorker" do
+ expect(PagesUpdateConfigurationWorker).not_to receive(:perform_async).with(project.id)
+
+ create(:pages_domain, project: project)
+ end
end
context 'configuration updates when attributes change' do
@@ -611,6 +631,7 @@ RSpec.describe PagesDomain do
let!(:domain_with_expired_user_provided_certificate) do
create(:pages_domain, :with_expired_certificate)
end
+
let!(:domain_with_user_provided_certificate_and_auto_ssl) do
create(:pages_domain, auto_ssl_enabled: true)
end
diff --git a/spec/models/personal_access_token_spec.rb b/spec/models/personal_access_token_spec.rb
index a39a37b605f..9e80d0e0886 100644
--- a/spec/models/personal_access_token_spec.rb
+++ b/spec/models/personal_access_token_spec.rb
@@ -180,6 +180,18 @@ RSpec.describe PersonalAccessToken do
end
end
+ describe '.expired_today_and_not_notified' do
+ let_it_be(:active) { create(:personal_access_token) }
+ let_it_be(:expired_yesterday) { create(:personal_access_token, expires_at: Date.yesterday) }
+ let_it_be(:revoked_token) { create(:personal_access_token, expires_at: Date.current, revoked: true) }
+ let_it_be(:expired_today) { create(:personal_access_token, expires_at: Date.current) }
+ let_it_be(:expired_today_and_notified) { create(:personal_access_token, expires_at: Date.current, after_expiry_notification_delivered: true) }
+
+ it 'returns tokens that have expired today' do
+ expect(described_class.expired_today_and_not_notified).to contain_exactly(expired_today)
+ end
+ end
+
describe '.without_impersonation' do
let_it_be(:impersonation_token) { create(:personal_access_token, :impersonation) }
let_it_be(:personal_access_token) { create(:personal_access_token) }
diff --git a/spec/models/personal_snippet_spec.rb b/spec/models/personal_snippet_spec.rb
index 10d70fed1ee..234f6e4b4b5 100644
--- a/spec/models/personal_snippet_spec.rb
+++ b/spec/models/personal_snippet_spec.rb
@@ -21,7 +21,15 @@ RSpec.describe PersonalSnippet do
let_it_be(:container) { create(:personal_snippet, :repository) }
let(:stubbed_container) { build_stubbed(:personal_snippet) }
let(:expected_full_path) { "@snippets/#{container.id}" }
- let(:expected_web_url_path) { "snippets/#{container.id}" }
- let(:expected_repo_url_path) { expected_web_url_path }
+ let(:expected_web_url_path) { "-/snippets/#{container.id}" }
+ let(:expected_repo_url_path) { "snippets/#{container.id}" }
+ end
+
+ describe '#parent_user' do
+ it 'returns the snippet author' do
+ snippet = build(:personal_snippet)
+
+ expect(snippet.parent_user).to eq(snippet.author)
+ end
end
end
diff --git a/spec/models/plan_limits_spec.rb b/spec/models/plan_limits_spec.rb
index 831fd0dcbc3..bc6398de9a4 100644
--- a/spec/models/plan_limits_spec.rb
+++ b/spec/models/plan_limits_spec.rb
@@ -183,12 +183,9 @@ RSpec.describe PlanLimits do
ci_max_artifact_size_trace
ci_max_artifact_size_junit
ci_max_artifact_size_sast
- ci_max_artifact_size_dependency_scanning
- ci_max_artifact_size_container_scanning
ci_max_artifact_size_dast
ci_max_artifact_size_codequality
ci_max_artifact_size_license_management
- ci_max_artifact_size_license_scanning
ci_max_artifact_size_performance
ci_max_artifact_size_browser_performance
ci_max_artifact_size_load_performance
@@ -197,7 +194,6 @@ RSpec.describe PlanLimits do
ci_max_artifact_size_network_referee
ci_max_artifact_size_dotenv
ci_max_artifact_size_cobertura
- ci_max_artifact_size_terraform
ci_max_artifact_size_accessibility
ci_max_artifact_size_cluster_applications
ci_max_artifact_size_secret_detection
diff --git a/spec/models/product_analytics_event_spec.rb b/spec/models/product_analytics_event_spec.rb
index 6058df9fa13..afdb5b690f8 100644
--- a/spec/models/product_analytics_event_spec.rb
+++ b/spec/models/product_analytics_event_spec.rb
@@ -21,4 +21,18 @@ RSpec.describe ProductAnalyticsEvent, type: :model do
it { expect(described_class.timerange(7.days)).to match_array([event_1, event_2]) }
it { expect(described_class.timerange(30.days)).to match_array([event_1, event_2, event_3]) }
end
+
+ describe '.count_by_graph' do
+ let_it_be(:events) do
+ [
+ create(:product_analytics_event, platform: 'web'),
+ create(:product_analytics_event, platform: 'web'),
+ create(:product_analytics_event, platform: 'app'),
+ create(:product_analytics_event, platform: 'mobile', collector_tstamp: Time.zone.now - 10.days)
+ ]
+ end
+
+ it { expect(described_class.count_by_graph('platform', 7.days)).to eq({ 'app' => 1, 'web' => 2 }) }
+ it { expect(described_class.count_by_graph('platform', 30.days)).to eq({ 'app' => 1, 'mobile' => 1, 'web' => 2 }) }
+ end
end
diff --git a/spec/models/project_repository_storage_move_spec.rb b/spec/models/project_repository_storage_move_spec.rb
index 83711085c92..3e679c8af4d 100644
--- a/spec/models/project_repository_storage_move_spec.rb
+++ b/spec/models/project_repository_storage_move_spec.rb
@@ -74,9 +74,9 @@ RSpec.describe ProjectRepositoryStorageMove, type: :model do
context 'when started' do
subject(:storage_move) { create(:project_repository_storage_move, :started, project: project, destination_storage_name: 'test_second_storage') }
- context 'and transits to finished' do
+ context 'and transits to replicated' do
it 'sets the repository storage and marks the project as writable' do
- storage_move.finish!
+ storage_move.finish_replication!
expect(project.repository_storage).to eq('test_second_storage')
expect(project).not_to be_repository_read_only
diff --git a/spec/models/project_services/buildkite_service_spec.rb b/spec/models/project_services/buildkite_service_spec.rb
index ff717a59e7b..3d0c2cc1006 100644
--- a/spec/models/project_services/buildkite_service_spec.rb
+++ b/spec/models/project_services/buildkite_service_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe BuildkiteService, :use_clean_rails_memory_store_caching do
project: project,
properties: {
service_hook: true,
- project_url: 'https://buildkite.com/account-name/example-project',
+ project_url: 'https://buildkite.com/organization-name/example-pipeline',
token: 'secret-sauce-webhook-token:secret-sauce-status-token'
}
)
@@ -45,11 +45,27 @@ RSpec.describe BuildkiteService, :use_clean_rails_memory_store_caching do
end
end
+ describe '.supported_events' do
+ it 'supports push, merge_request, and tag_push events' do
+ expect(service.supported_events).to eq %w(push merge_request tag_push)
+ end
+ end
+
describe 'commits methods' do
before do
allow(project).to receive(:default_branch).and_return('default-brancho')
end
+ it 'always activates SSL verification after saved' do
+ service.create_service_hook(enable_ssl_verification: false)
+
+ service.enable_ssl_verification = false
+ service.active = true
+
+ expect { service.save! }
+ .to change { service.service_hook.enable_ssl_verification }.from(false).to(true)
+ end
+
describe '#webhook_url' do
it 'returns the webhook url' do
expect(service.webhook_url).to eq(
@@ -69,7 +85,7 @@ RSpec.describe BuildkiteService, :use_clean_rails_memory_store_caching do
describe '#build_page' do
it 'returns the correct build page' do
expect(service.build_page('2ab7834c', nil)).to eq(
- 'https://buildkite.com/account-name/example-project/builds?commit=2ab7834c'
+ 'https://buildkite.com/organization-name/example-pipeline/builds?commit=2ab7834c'
)
end
end
diff --git a/spec/models/project_services/gitlab_issue_tracker_service_spec.rb b/spec/models/project_services/gitlab_issue_tracker_service_spec.rb
deleted file mode 100644
index a6b7cb05836..00000000000
--- a/spec/models/project_services/gitlab_issue_tracker_service_spec.rb
+++ /dev/null
@@ -1,54 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe GitlabIssueTrackerService do
- describe "Associations" do
- it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
- end
-
- describe 'Validations' do
- context 'when service is active' do
- subject { described_class.new(project: create(:project), active: true) }
-
- it { is_expected.to validate_presence_of(:issues_url) }
- it_behaves_like 'issue tracker service URL attribute', :issues_url
- end
-
- context 'when service is inactive' do
- subject { described_class.new(project: create(:project), active: false) }
-
- it { is_expected.not_to validate_presence_of(:issues_url) }
- end
- end
-
- describe 'project and issue urls' do
- let(:project) { create(:project) }
- let(:service) { project.create_gitlab_issue_tracker_service(active: true) }
-
- context 'with absolute urls' do
- before do
- allow(described_class).to receive(:default_url_options).and_return(script_name: "/gitlab/root")
- end
-
- it 'gives the correct path' do
- expect(service.project_url).to eq("http://#{Gitlab.config.gitlab.host}/gitlab/root/#{project.full_path}/-/issues")
- expect(service.new_issue_url).to eq("http://#{Gitlab.config.gitlab.host}/gitlab/root/#{project.full_path}/-/issues/new")
- expect(service.issue_url(432)).to eq("http://#{Gitlab.config.gitlab.host}/gitlab/root/#{project.full_path}/-/issues/432")
- end
- end
-
- context 'with relative urls' do
- before do
- allow(described_class).to receive(:default_url_options).and_return(script_name: "/gitlab/root")
- end
-
- it 'gives the correct path' do
- expect(service.issue_tracker_path).to eq("/gitlab/root/#{project.full_path}/-/issues")
- expect(service.new_issue_path).to eq("/gitlab/root/#{project.full_path}/-/issues/new")
- expect(service.issue_path(432)).to eq("/gitlab/root/#{project.full_path}/-/issues/432")
- end
- end
- end
-end
diff --git a/spec/models/project_services/jira_service_spec.rb b/spec/models/project_services/jira_service_spec.rb
index cfc2c920cd2..28bba893be4 100644
--- a/spec/models/project_services/jira_service_spec.rb
+++ b/spec/models/project_services/jira_service_spec.rb
@@ -121,6 +121,7 @@ RSpec.describe JiraService do
{ url: url, api_url: api_url, username: username, password: password,
jira_issue_transition_id: transition_id }
end
+
let(:data_params) do
{
url: url, api_url: api_url,
@@ -562,6 +563,7 @@ RSpec.describe JiraService do
password: password
)
end
+
let(:jira_issue) { ExternalIssue.new('JIRA-123', project) }
subject { jira_service.create_cross_reference_note(jira_issue, resource, user) }
diff --git a/spec/models/project_services/jira_tracker_data_spec.rb b/spec/models/project_services/jira_tracker_data_spec.rb
index 9e38bced46c..f2e2fa65e93 100644
--- a/spec/models/project_services/jira_tracker_data_spec.rb
+++ b/spec/models/project_services/jira_tracker_data_spec.rb
@@ -8,4 +8,8 @@ RSpec.describe JiraTrackerData do
describe 'Associations' do
it { is_expected.to belong_to(:service) }
end
+
+ describe 'deployment_type' do
+ it { is_expected.to define_enum_for(:deployment_type).with_values([:unknown, :server, :cloud]).with_prefix(:deployment) }
+ end
end
diff --git a/spec/models/project_services/microsoft_teams_service_spec.rb b/spec/models/project_services/microsoft_teams_service_spec.rb
index 610feb52827..53ab63ef030 100644
--- a/spec/models/project_services/microsoft_teams_service_spec.rb
+++ b/spec/models/project_services/microsoft_teams_service_spec.rb
@@ -121,6 +121,7 @@ RSpec.describe MicrosoftTeamsService do
message: "user created page: Awesome wiki_page"
}
end
+
let(:wiki_page) { create(:wiki_page, wiki: project.wiki, **opts) }
let(:wiki_page_sample_data) { Gitlab::DataBuilder::WikiPage.build(wiki_page, user, 'create') }
diff --git a/spec/models/project_snippet_spec.rb b/spec/models/project_snippet_spec.rb
index 464b9b1da84..3bcbf6b9e1b 100644
--- a/spec/models/project_snippet_spec.rb
+++ b/spec/models/project_snippet_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe ProjectSnippet do
let_it_be(:container) { create(:project_snippet, :repository) }
let(:stubbed_container) { build_stubbed(:project_snippet) }
let(:expected_full_path) { "#{container.project.full_path}/@snippets/#{container.id}" }
- let(:expected_web_url_path) { "#{container.project.full_path}/snippets/#{container.id}" }
- let(:expected_repo_url_path) { expected_web_url_path }
+ let(:expected_web_url_path) { "#{container.project.full_path}/-/snippets/#{container.id}" }
+ let(:expected_repo_url_path) { "#{container.project.full_path}/snippets/#{container.id}" }
end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 8fdda241719..f589589af8f 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -61,7 +61,6 @@ RSpec.describe Project do
it { is_expected.to have_one(:youtrack_service) }
it { is_expected.to have_one(:custom_issue_tracker_service) }
it { is_expected.to have_one(:bugzilla_service) }
- it { is_expected.to have_one(:gitlab_issue_tracker_service) }
it { is_expected.to have_one(:external_wiki_service) }
it { is_expected.to have_one(:confluence_service) }
it { is_expected.to have_one(:project_feature) }
@@ -104,6 +103,7 @@ RSpec.describe Project do
it { is_expected.to have_many(:clusters) }
it { is_expected.to have_many(:management_clusters).class_name('Clusters::Cluster') }
it { is_expected.to have_many(:kubernetes_namespaces) }
+ it { is_expected.to have_many(:cluster_agents).class_name('Clusters::Agent') }
it { is_expected.to have_many(:custom_attributes).class_name('ProjectCustomAttribute') }
it { is_expected.to have_many(:project_badges).class_name('ProjectBadge') }
it { is_expected.to have_many(:lfs_file_locks) }
@@ -122,6 +122,7 @@ RSpec.describe Project do
it { is_expected.to have_many(:reviews).inverse_of(:project) }
it { is_expected.to have_many(:packages).class_name('Packages::Package') }
it { is_expected.to have_many(:package_files).class_name('Packages::PackageFile') }
+ it { is_expected.to have_many(:pipeline_artifacts) }
it_behaves_like 'model with repository' do
let_it_be(:container) { create(:project, :repository, path: 'somewhere') }
@@ -400,6 +401,7 @@ RSpec.describe Project do
create(:project,
pending_delete: true)
end
+
let(:new_project) do
build(:project,
name: project_pending_deletion.name,
@@ -474,6 +476,46 @@ RSpec.describe Project do
end
end
+ describe '#has_packages?' do
+ let(:project) { create(:project, :public) }
+
+ subject { project.has_packages?(package_type) }
+
+ shared_examples 'returning true examples' do
+ let!(:package) { create("#{package_type}_package", project: project) }
+
+ it { is_expected.to be true }
+ end
+
+ shared_examples 'returning false examples' do
+ it { is_expected.to be false }
+ end
+
+ context 'with maven packages' do
+ it_behaves_like 'returning true examples' do
+ let(:package_type) { :maven }
+ end
+ end
+
+ context 'with npm packages' do
+ it_behaves_like 'returning true examples' do
+ let(:package_type) { :npm }
+ end
+ end
+
+ context 'with conan packages' do
+ it_behaves_like 'returning true examples' do
+ let(:package_type) { :conan }
+ end
+ end
+
+ context 'with no package type' do
+ it_behaves_like 'returning false examples' do
+ let(:package_type) { nil }
+ end
+ end
+ end
+
describe '#ci_pipelines' do
let(:project) { create(:project) }
@@ -638,6 +680,12 @@ RSpec.describe Project do
end
end
end
+
+ context 'when argument is a user' do
+ it 'returns full path to the project' do
+ expect(project.to_reference_base(owner)).to eq 'sample-namespace/sample-project'
+ end
+ end
end
describe '#to_human_reference' do
@@ -1042,6 +1090,30 @@ RSpec.describe Project do
end
end
+ describe '#default_owner' do
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:namespace) { create(:namespace, owner: owner) }
+
+ context 'the project does not have a group' do
+ let(:project) { build(:project, namespace: namespace) }
+
+ it 'is the namespace owner' do
+ expect(project.default_owner).to eq(owner)
+ end
+ end
+
+ context 'the project is in a group' do
+ let(:group) { build(:group) }
+ let(:project) { build(:project, group: group, namespace: namespace) }
+
+ it 'is the group owner' do
+ allow(group).to receive(:default_owner).and_return(Object.new)
+
+ expect(project.default_owner).to eq(group.default_owner)
+ end
+ end
+ end
+
describe '#external_wiki' do
let(:project) { create(:project) }
@@ -1408,16 +1480,69 @@ RSpec.describe Project do
end
describe '#service_desk_address' do
- let_it_be(:project) { create(:project, service_desk_enabled: true) }
+ let_it_be(:project, reload: true) { create(:project, service_desk_enabled: true) }
- before do
- allow(Gitlab::ServiceDesk).to receive(:enabled?).and_return(true)
- allow(Gitlab.config.incoming_email).to receive(:enabled).and_return(true)
- allow(Gitlab.config.incoming_email).to receive(:address).and_return("test+%{key}@mail.com")
+ subject { project.service_desk_address }
+
+ shared_examples 'with incoming email address' do
+ context 'when incoming email is enabled' do
+ before do
+ config = double(enabled: true, address: 'test+%{key}@mail.com')
+ allow(::Gitlab.config).to receive(:incoming_email).and_return(config)
+ end
+
+ it 'uses project full path as service desk address key' do
+ expect(project.service_desk_address).to eq("test+#{project.full_path_slug}-#{project.project_id}-issue-@mail.com")
+ end
+ end
+
+ context 'when incoming email is disabled' do
+ before do
+ config = double(enabled: false)
+ allow(::Gitlab.config).to receive(:incoming_email).and_return(config)
+ end
+
+ it 'uses project full path as service desk address key' do
+ expect(project.service_desk_address).to be_nil
+ end
+ end
+ end
+
+ context 'when service_desk_email is disabled' do
+ before do
+ allow(::Gitlab::ServiceDeskEmail).to receive(:enabled?).and_return(false)
+ end
+
+ it_behaves_like 'with incoming email address'
end
- it 'uses project full path as service desk address key' do
- expect(project.service_desk_address).to eq("test+#{project.full_path_slug}-#{project.project_id}-issue-@mail.com")
+ context 'when service_desk_email is enabled' do
+ before do
+ config = double(enabled: true, address: 'foo+%{key}@bar.com')
+ allow(::Gitlab::ServiceDeskEmail).to receive(:config).and_return(config)
+ end
+
+ context 'when service_desk_custom_address flag is enabled' do
+ before do
+ stub_feature_flags(service_desk_custom_address: true)
+ end
+
+ it 'returns custom address when project_key is set' do
+ create(:service_desk_setting, project: project, project_key: 'key1')
+
+ expect(subject).to eq("foo+#{project.full_path_slug}-key1@bar.com")
+ end
+
+ it_behaves_like 'with incoming email address'
+ end
+
+ context 'when service_desk_custom_address flag is disabled' do
+ before do
+ stub_feature_flags(service_desk_custom_address: false)
+ end
+
+ it_behaves_like 'with incoming email address'
+ end
end
end
@@ -1657,9 +1782,9 @@ RSpec.describe Project do
subject { project.pages_deployed? }
- context 'if public folder does exist' do
+ context 'if pages are deployed' do
before do
- allow(Dir).to receive(:exist?).with(project.public_pages_path).and_return(true)
+ project.pages_metadatum.update_column(:deployed, true)
end
it { is_expected.to be_truthy }
@@ -2221,6 +2346,7 @@ RSpec.describe Project do
create(:ci_empty_pipeline, project: project, sha: project.commit.id,
ref: project.default_branch)
end
+
let!(:pipeline_for_second_branch) do
create(:ci_empty_pipeline, project: project, sha: second_branch.target,
ref: second_branch.name)
@@ -3488,6 +3614,7 @@ RSpec.describe Project do
public: '\\1'
MAP
end
+
let(:sha) { project.commit.id }
context 'when there is a route map' do
@@ -4085,7 +4212,6 @@ RSpec.describe Project do
end
it 'removes the pages directory and marks the project as not having pages deployed' do
- expect_any_instance_of(Projects::UpdatePagesConfigurationService).to receive(:execute)
expect_any_instance_of(Gitlab::PagesTransfer).to receive(:rename_project).and_return(true)
expect(PagesWorker).to receive(:perform_in).with(5.minutes, :remove, namespace.full_path, anything)
@@ -5105,6 +5231,7 @@ RSpec.describe Project do
allow_collaboration: true
)
end
+
let!(:merge_request) do
create(
:merge_request,
@@ -5455,6 +5582,32 @@ RSpec.describe Project do
end
end
+ describe '.for_repository_storage' do
+ it 'returns the projects for a given repository storage' do
+ stub_storage_settings('test_second_storage' => {
+ 'path' => TestEnv::SECOND_STORAGE_PATH,
+ 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address
+ })
+ expected_project = create(:project, repository_storage: 'default')
+ create(:project, repository_storage: 'test_second_storage')
+
+ expect(described_class.for_repository_storage('default')).to eq([expected_project])
+ end
+ end
+
+ describe '.excluding_repository_storage' do
+ it 'returns the projects excluding the given repository storage' do
+ stub_storage_settings('test_second_storage' => {
+ 'path' => TestEnv::SECOND_STORAGE_PATH,
+ 'gitaly_address' => Gitlab.config.repositories.storages.default.gitaly_address
+ })
+ expected_project = create(:project, repository_storage: 'test_second_storage')
+ create(:project, repository_storage: 'default')
+
+ expect(described_class.excluding_repository_storage('default')).to eq([expected_project])
+ end
+ end
+
describe '.deployments' do
subject { project.deployments }
@@ -6154,6 +6307,48 @@ RSpec.describe Project do
end
end
+ describe '#has_packages?' do
+ let(:project) { create(:project, :public) }
+
+ subject { project.has_packages?(package_type) }
+
+ shared_examples 'has_package' do
+ context 'package of package_type exists' do
+ let!(:package) { create("#{package_type}_package", project: project) }
+
+ it { is_expected.to be true }
+ end
+
+ context 'package of package_type does not exist' do
+ it { is_expected.to be false }
+ end
+ end
+
+ context 'with maven packages' do
+ it_behaves_like 'has_package' do
+ let(:package_type) { :maven }
+ end
+ end
+
+ context 'with npm packages' do
+ it_behaves_like 'has_package' do
+ let(:package_type) { :npm }
+ end
+ end
+
+ context 'with conan packages' do
+ it_behaves_like 'has_package' do
+ let(:package_type) { :conan }
+ end
+ end
+
+ context 'calling has_package? with nil' do
+ let(:package_type) { nil }
+
+ it { is_expected.to be false }
+ end
+ end
+
describe '#environments_for_scope' do
let_it_be(:project, reload: true) { create(:project) }
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index 3659e6b973e..5f66de3a63c 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -328,8 +328,8 @@ RSpec.describe ProjectStatistics do
it 'increases also storage size by that amount' do
expect { described_class.increment_statistic(project.id, stat, 20) }
- .to change { statistics.reload.storage_size }
- .by(20)
+ .to change { statistics.reload.storage_size }
+ .by(20)
end
end
diff --git a/spec/models/prometheus_alert_spec.rb b/spec/models/prometheus_alert_spec.rb
index 7169a34d96f..8e517e1764e 100644
--- a/spec/models/prometheus_alert_spec.rb
+++ b/spec/models/prometheus_alert_spec.rb
@@ -50,6 +50,8 @@ RSpec.describe PrometheusAlert do
it { is_expected.to validate_presence_of(:environment) }
it { is_expected.to validate_presence_of(:project) }
it { is_expected.to validate_presence_of(:prometheus_metric) }
+ it { is_expected.to validate_presence_of(:operator) }
+ it { is_expected.to validate_presence_of(:threshold) }
context 'when environment and metric belongs same project' do
it { is_expected.to be_valid }
@@ -74,6 +76,34 @@ RSpec.describe PrometheusAlert do
end
end
+ describe 'runbook validations' do
+ it 'disallow invalid urls' do
+ unsafe_url = %{https://replaceme.com/'><script>alert(document.cookie)</script>}
+ non_ascii_url = 'http://gitlab.com/user/project1/wiki/something€'
+ excessively_long_url = 'https://gitla' + 'b' * 1024 + '.com'
+
+ is_expected.not_to allow_values(
+ unsafe_url,
+ non_ascii_url,
+ excessively_long_url
+ ).for(:runbook_url)
+ end
+
+ it 'allow valid urls' do
+ external_url = 'http://runbook.gitlab.com/'
+ internal_url = 'http://192.168.1.1'
+ blank_url = ''
+ nil_url = nil
+
+ is_expected.to allow_value(
+ external_url,
+ internal_url,
+ blank_url,
+ nil_url
+ ).for(:runbook_url)
+ end
+ end
+
describe '#full_query' do
before do
subject.operator = "gt"
@@ -91,6 +121,7 @@ RSpec.describe PrometheusAlert do
subject.operator = "gt"
subject.threshold = 1
subject.prometheus_metric = metric
+ subject.runbook_url = 'runbook'
end
it 'returns the params of the prometheus alert' do
@@ -102,7 +133,11 @@ RSpec.describe PrometheusAlert do
"gitlab" => "hook",
"gitlab_alert_id" => metric.id,
"gitlab_prometheus_alert_id" => subject.id
- })
+ },
+ "annotations" => {
+ "runbook" => "runbook"
+ }
+ )
end
end
end
diff --git a/spec/models/prometheus_metric_spec.rb b/spec/models/prometheus_metric_spec.rb
index f284102b4a9..9588167bbcc 100644
--- a/spec/models/prometheus_metric_spec.rb
+++ b/spec/models/prometheus_metric_spec.rb
@@ -138,10 +138,6 @@ RSpec.describe PrometheusMetric do
expect(subject.to_query_metric.required_metrics).to eq([])
end
- it 'queryable metric has weight 0' do
- expect(subject.to_query_metric.weight).to eq(0)
- end
-
it 'queryable metrics has query description' do
queries = [
{
diff --git a/spec/models/raw_usage_data_spec.rb b/spec/models/raw_usage_data_spec.rb
new file mode 100644
index 00000000000..c10db63da56
--- /dev/null
+++ b/spec/models/raw_usage_data_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe RawUsageData do
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:payload) }
+ it { is_expected.to validate_presence_of(:recorded_at) }
+
+ context 'uniqueness validation' do
+ let!(:existing_record) { create(:raw_usage_data) }
+
+ it { is_expected.to validate_uniqueness_of(:recorded_at) }
+ end
+
+ describe '#update_sent_at!' do
+ let(:raw_usage_data) { create(:raw_usage_data) }
+
+ context 'with save_raw_usage_data feature enabled' do
+ before do
+ stub_feature_flags(save_raw_usage_data: true)
+ end
+
+ it 'updates sent_at' do
+ raw_usage_data.update_sent_at!
+
+ expect(raw_usage_data.sent_at).not_to be_nil
+ end
+ end
+
+ context 'with save_raw_usage_data feature disabled' do
+ before do
+ stub_feature_flags(save_raw_usage_data: false)
+ end
+
+ it 'updates sent_at' do
+ raw_usage_data.update_sent_at!
+
+ expect(raw_usage_data.sent_at).to be_nil
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index 716e7dc786e..fea15ea00c8 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -73,6 +73,22 @@ RSpec.describe Release do
end
end
+ describe '.create' do
+ it "fills released_at using created_at if it's not set" do
+ release = described_class.create(project: project, author: user)
+
+ expect(release.released_at).to eq(release.created_at)
+ end
+
+ it "does not change released_at if it's set explicitly" do
+ released_at = Time.zone.parse('2018-10-20T18:00:00Z')
+
+ release = described_class.create(project: project, author: user, released_at: released_at)
+
+ expect(release.released_at).to eq(released_at)
+ end
+ end
+
describe '#sources' do
subject { release.sources }
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 964cc5a13ca..a6b79e55f02 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -587,15 +587,19 @@ RSpec.describe Repository do
end
it "is expired when the branches caches are expired" do
- expect(cache).to receive(:delete).with(:merged_branch_names).at_least(:once)
+ expect(cache).to receive(:delete) do |*args|
+ expect(args).to include(:merged_branch_names)
+ end
- repository.send(:expire_branches_cache)
+ repository.expire_branches_cache
end
it "is expired when the repository caches are expired" do
- expect(cache).to receive(:delete).with(:merged_branch_names).at_least(:once)
+ expect(cache).to receive(:delete) do |*args|
+ expect(args).to include(:merged_branch_names)
+ end
- repository.send(:expire_all_method_caches)
+ repository.expire_all_method_caches
end
end
@@ -1245,6 +1249,32 @@ RSpec.describe Repository do
end
end
+ describe '#has_ambiguous_refs?' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:branch_names, :tag_names, :result) do
+ nil | nil | false
+ %w() | %w() | false
+ %w(a b) | %w() | false
+ %w() | %w(c d) | false
+ %w(a b) | %w(c d) | false
+ %w(a/b) | %w(c/d) | false
+ %w(a b) | %w(c d a/z) | true
+ %w(a b c/z) | %w(c d) | true
+ %w(a/b/z) | %w(a/b) | false # we only consider refs ambiguous before the first slash
+ %w(a/b/z) | %w(a/b a) | true
+ end
+
+ with_them do
+ it do
+ allow(repository).to receive(:branch_names).and_return(branch_names)
+ allow(repository).to receive(:tag_names).and_return(tag_names)
+
+ expect(repository.has_ambiguous_refs?).to eq(result)
+ end
+ end
+ end
+
describe '#expand_ref' do
let(:ref) { 'ref' }
@@ -1926,8 +1956,9 @@ RSpec.describe Repository do
:has_visible_content?,
:issue_template_names,
:merge_request_template_names,
- :metrics_dashboard_paths,
- :xcode_project?
+ :user_defined_metrics_dashboard_paths,
+ :xcode_project?,
+ :has_ambiguous_refs?
])
repository.after_change_head
@@ -2072,7 +2103,7 @@ RSpec.describe Repository do
describe '#expire_branches_cache' do
it 'expires the cache' do
expect(repository).to receive(:expire_method_caches)
- .with(%i(branch_names merged_branch_names branch_count has_visible_content?))
+ .with(%i(branch_names merged_branch_names branch_count has_visible_content? has_ambiguous_refs?))
.and_call_original
repository.expire_branches_cache
@@ -2082,7 +2113,7 @@ RSpec.describe Repository do
describe '#expire_tags_cache' do
it 'expires the cache' do
expect(repository).to receive(:expire_method_caches)
- .with(%i(tag_names tag_count))
+ .with(%i(tag_names tag_count has_ambiguous_refs?))
.and_call_original
repository.expire_tags_cache
@@ -2673,6 +2704,7 @@ RSpec.describe Repository do
build(:commit, author: author_c),
build(:commit, author: author_c)]
end
+
let(:order_by) { nil }
let(:sort) { nil }
diff --git a/spec/models/resource_iteration_event_spec.rb b/spec/models/resource_iteration_event_spec.rb
new file mode 100644
index 00000000000..fe1310d7bf1
--- /dev/null
+++ b/spec/models/resource_iteration_event_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ResourceIterationEvent, type: :model do
+ it_behaves_like 'a resource event'
+ it_behaves_like 'a resource event for issues'
+ it_behaves_like 'a resource event for merge requests'
+
+ it_behaves_like 'having unique enum values'
+ it_behaves_like 'timebox resource event validations'
+ it_behaves_like 'timebox resource event actions'
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:iteration) }
+ end
+end
diff --git a/spec/models/resource_milestone_event_spec.rb b/spec/models/resource_milestone_event_spec.rb
index 76ffb358d80..0a5292b2d16 100644
--- a/spec/models/resource_milestone_event_spec.rb
+++ b/spec/models/resource_milestone_event_spec.rb
@@ -8,77 +8,14 @@ RSpec.describe ResourceMilestoneEvent, type: :model do
it_behaves_like 'a resource event for merge requests'
it_behaves_like 'having unique enum values'
+ it_behaves_like 'timebox resource event validations'
+ it_behaves_like 'timebox resource event states'
+ it_behaves_like 'timebox resource event actions'
describe 'associations' do
it { is_expected.to belong_to(:milestone) }
end
- describe 'validations' do
- context 'when issue and merge_request are both nil' do
- subject { build(described_class.name.underscore.to_sym, issue: nil, merge_request: nil) }
-
- it { is_expected.not_to be_valid }
- end
-
- context 'when issue and merge_request are both set' do
- subject { build(described_class.name.underscore.to_sym, issue: build(:issue), merge_request: build(:merge_request)) }
-
- it { is_expected.not_to be_valid }
- end
-
- context 'when issue is set' do
- subject { create(described_class.name.underscore.to_sym, issue: create(:issue), merge_request: nil) }
-
- it { is_expected.to be_valid }
- end
-
- context 'when merge_request is set' do
- subject { create(described_class.name.underscore.to_sym, issue: nil, merge_request: create(:merge_request)) }
-
- it { is_expected.to be_valid }
- end
- end
-
- describe 'states' do
- [Issue, MergeRequest].each do |klass|
- klass.available_states.each do |state|
- it "supports state #{state.first} for #{klass.name.underscore}" do
- model = create(klass.name.underscore, state: state[0])
- key = model.class.name.underscore
- event = build(described_class.name.underscore.to_sym, key => model, state: model.state)
-
- expect(event.state).to eq(state[0])
- end
- end
- end
- end
-
- shared_examples 'a milestone action queryable resource event' do |expected_results_for_actions|
- [Issue, MergeRequest].each do |klass|
- expected_results_for_actions.each do |action, expected_result|
- it "is #{expected_result} for action #{action} on #{klass.name.underscore}" do
- model = create(klass.name.underscore)
- key = model.class.name.underscore
- event = build(described_class.name.underscore.to_sym, key => model, action: action)
-
- expect(event.send(query_method)).to eq(expected_result)
- end
- end
- end
- end
-
- describe '#added?' do
- it_behaves_like 'a milestone action queryable resource event', { add: true, remove: false } do
- let(:query_method) { :add? }
- end
- end
-
- describe '#removed?' do
- it_behaves_like 'a milestone action queryable resource event', { add: false, remove: true } do
- let(:query_method) { :remove? }
- end
- end
-
describe '#milestone_title' do
let(:milestone) { create(:milestone, title: 'v2.3') }
let(:event) { create(:resource_milestone_event, milestone: milestone) }
diff --git a/spec/models/service_spec.rb b/spec/models/service_spec.rb
index 75bbb074526..c4a9c0329c7 100644
--- a/spec/models/service_spec.rb
+++ b/spec/models/service_spec.rb
@@ -11,30 +11,31 @@ RSpec.describe Service do
end
describe 'validations' do
- it { is_expected.to validate_presence_of(:type) }
-
- it 'validates presence of project_id if not template', :aggregate_failures do
- expect(build(:service, project_id: nil, template: true)).to be_valid
- expect(build(:service, project_id: nil, template: false)).to be_invalid
- end
+ using RSpec::Parameterized::TableSyntax
- it 'validates presence of project_id if not instance', :aggregate_failures do
- expect(build(:service, project_id: nil, instance: true)).to be_valid
- expect(build(:service, project_id: nil, instance: false)).to be_invalid
- end
+ let(:group) { create(:group) }
+ let(:project) { create(:project) }
- it 'validates absence of project_id if instance', :aggregate_failures do
- expect(build(:service, project_id: nil, instance: true)).to be_valid
- expect(build(:service, instance: true)).to be_invalid
- end
+ it { is_expected.to validate_presence_of(:type) }
- it 'validates absence of project_id if template', :aggregate_failures do
- expect(build(:service, template: true)).to validate_absence_of(:project_id)
- expect(build(:service, template: false)).not_to validate_absence_of(:project_id)
+ where(:project_id, :group_id, :template, :instance, :valid) do
+ 1 | nil | false | false | true
+ nil | 1 | false | false | true
+ nil | nil | true | false | true
+ nil | nil | false | true | true
+ nil | nil | false | false | false
+ nil | nil | true | true | false
+ 1 | 1 | false | false | false
+ 1 | nil | true | false | false
+ 1 | nil | false | true | false
+ nil | 1 | true | false | false
+ nil | 1 | false | true | false
end
- it 'validates service is template or instance' do
- expect(build(:service, project_id: nil, template: true, instance: true)).to be_invalid
+ with_them do
+ it 'validates the service' do
+ expect(build(:service, project_id: project_id, group_id: group_id, template: template, instance: instance).valid?).to eq(valid)
+ end
end
context 'with an existing service template' do
@@ -58,12 +59,15 @@ RSpec.describe Service do
end
it 'validates uniqueness of type and project_id on create' do
- project = create(:project)
-
expect(create(:service, project: project, type: 'Service')).to be_valid
expect(build(:service, project: project, type: 'Service').valid?(:create)).to eq(false)
expect(build(:service, project: project, type: 'Service').valid?(:update)).to eq(true)
end
+
+ it 'validates uniqueness of type and group_id' do
+ expect(create(:service, group_id: group.id, project_id: nil, type: 'Service')).to be_valid
+ expect(build(:service, group_id: group.id, project_id: nil, type: 'Service')).to be_invalid
+ end
end
describe 'Scopes' do
@@ -535,7 +539,7 @@ RSpec.describe Service do
describe 'initialize service with no properties' do
let(:service) do
- GitlabIssueTrackerService.create(
+ BugzillaService.create(
project: create(:project),
project_url: 'http://gitlab.example.com'
)
diff --git a/spec/models/snippet_repository_spec.rb b/spec/models/snippet_repository_spec.rb
index 8c25d713c0a..0f5e0bfc75c 100644
--- a/spec/models/snippet_repository_spec.rb
+++ b/spec/models/snippet_repository_spec.rb
@@ -102,6 +102,7 @@ RSpec.describe SnippetRepository do
{ action: :move }.merge(move_file),
{ action: :update }.merge(update_file)]
end
+
let(:repo) { double }
before do
diff --git a/spec/models/suggestion_spec.rb b/spec/models/suggestion_spec.rb
index 6c30bc39c1d..e88fc13ecee 100644
--- a/spec/models/suggestion_spec.rb
+++ b/spec/models/suggestion_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe Suggestion do
end
context 'when inapplicable_reason is not nil' do
- let(:inapplicable_reason) { :applied }
+ let(:inapplicable_reason) { "Can't apply this suggestion." }
it { is_expected.to be_falsey }
end
@@ -77,7 +77,7 @@ RSpec.describe Suggestion do
context 'when suggestion is already applied' do
let(:suggestion) { build(:suggestion, :applied, note: note) }
- it { is_expected.to eq(:applied) }
+ it { is_expected.to eq("Can't apply this suggestion.") }
end
context 'when merge request was merged' do
@@ -85,7 +85,7 @@ RSpec.describe Suggestion do
merge_request.mark_as_merged!
end
- it { is_expected.to eq(:merge_request_merged) }
+ it { is_expected.to eq("This merge request was merged. To apply this suggestion, edit this file directly.") }
end
context 'when merge request is closed' do
@@ -93,7 +93,7 @@ RSpec.describe Suggestion do
merge_request.close!
end
- it { is_expected.to eq(:merge_request_closed) }
+ it { is_expected.to eq("This merge request is closed. To apply this suggestion, edit this file directly.") }
end
context 'when source branch is deleted' do
@@ -101,23 +101,51 @@ RSpec.describe Suggestion do
merge_request.project.repository.rm_branch(merge_request.author, merge_request.source_branch)
end
- it { is_expected.to eq(:source_branch_deleted) }
+ it { is_expected.to eq("Can't apply as the source branch was deleted.") }
end
- context 'when content is outdated' do
- before do
- allow(suggestion).to receive(:outdated?).and_return(true)
+ context 'when outdated' do
+ shared_examples_for 'outdated suggestion' do
+ before do
+ allow(suggestion).to receive(:single_line?).and_return(single_line)
+ end
+
+ context 'and suggestion is for a single line' do
+ let(:single_line) { true }
+
+ it { is_expected.to eq("Can't apply as this line was changed in a more recent version.") }
+ end
+
+ context 'and suggestion is for multiple lines' do
+ let(:single_line) { false }
+
+ it { is_expected.to eq("Can't apply as these lines were changed in a more recent version.") }
+ end
end
- it { is_expected.to eq(:outdated) }
+ context 'and content is outdated' do
+ before do
+ allow(suggestion).to receive(:outdated?).and_return(true)
+ end
+
+ it_behaves_like 'outdated suggestion'
+ end
+
+ context 'and note is outdated' do
+ before do
+ allow(note).to receive(:active?).and_return(false)
+ end
+
+ it_behaves_like 'outdated suggestion'
+ end
end
- context 'when note is outdated' do
+ context 'when suggestion has the same content' do
before do
- allow(note).to receive(:active?).and_return(false)
+ allow(suggestion).to receive(:different_content?).and_return(false)
end
- it { is_expected.to eq(:outdated) }
+ it { is_expected.to eq("This suggestion already matches its content.") }
end
context 'when applicable' do
diff --git a/spec/models/terraform/state_spec.rb b/spec/models/terraform/state_spec.rb
index 00e67ad70db..68bb86bfa49 100644
--- a/spec/models/terraform/state_spec.rb
+++ b/spec/models/terraform/state_spec.rb
@@ -45,9 +45,7 @@ RSpec.describe Terraform::State do
describe '#update_file_store' do
context 'when file is stored in object storage' do
- it 'sets file_store to remote' do
- expect(subject.file_store).to eq(ObjectStorage::Store::REMOTE)
- end
+ it_behaves_like 'mounted file in object store'
end
context 'when file is stored locally' do
@@ -55,9 +53,7 @@ RSpec.describe Terraform::State do
stub_terraform_state_object_storage(Terraform::StateUploader, enabled: false)
end
- it 'sets file_store to local' do
- expect(subject.file_store).to eq(ObjectStorage::Store::LOCAL)
- end
+ it_behaves_like 'mounted file in local store'
end
end
end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index fa2e4b63648..f9b819e22cd 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -76,6 +76,7 @@ RSpec.describe User do
it { is_expected.to have_many(:groups) }
it { is_expected.to have_many(:keys).dependent(:destroy) }
it { is_expected.to have_many(:deploy_keys).dependent(:nullify) }
+ it { is_expected.to have_many(:group_deploy_keys) }
it { is_expected.to have_many(:events).dependent(:delete_all) }
it { is_expected.to have_many(:issues).dependent(:destroy) }
it { is_expected.to have_many(:notes).dependent(:destroy) }
@@ -241,6 +242,22 @@ RSpec.describe User do
it { is_expected.to validate_length_of(:last_name).is_at_most(127) }
end
+ describe 'preferred_language' do
+ context 'when its value is nil in the database' do
+ let(:user) { build(:user, preferred_language: nil) }
+
+ it 'falls back to I18n.default_locale when empty in the database' do
+ expect(user.preferred_language).to eq I18n.default_locale.to_s
+ end
+
+ it 'falls back to english when I18n.default_locale is not an available language' do
+ I18n.default_locale = :kl
+
+ expect(user.preferred_language).to eq 'en'
+ end
+ end
+ end
+
describe 'username' do
it 'validates presence' do
expect(subject).to validate_presence_of(:username)
@@ -839,6 +856,24 @@ RSpec.describe User do
end
end
+ describe '.with_personal_access_tokens_expired_today' do
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:expired_today) { create(:personal_access_token, user: user1, expires_at: Date.current) }
+
+ let_it_be(:user2) { create(:user) }
+ let_it_be(:revoked_token) { create(:personal_access_token, user: user2, expires_at: Date.current, revoked: true) }
+
+ let_it_be(:user3) { create(:user) }
+ let_it_be(:impersonated_token) { create(:personal_access_token, user: user3, expires_at: Date.current, impersonation: true) }
+
+ let_it_be(:user4) { create(:user) }
+ let_it_be(:already_notified) { create(:personal_access_token, user: user4, expires_at: Date.current, after_expiry_notification_delivered: true) }
+
+ it 'returns users whose token has expired today' do
+ expect(described_class.with_personal_access_tokens_expired_today).to contain_exactly(user1)
+ end
+ end
+
describe '.active_without_ghosts' do
let_it_be(:user1) { create(:user, :external) }
let_it_be(:user2) { create(:user, state: 'blocked') }
diff --git a/spec/models/wiki_page_spec.rb b/spec/models/wiki_page_spec.rb
index a2ca6441f28..aa8b9ce58b9 100644
--- a/spec/models/wiki_page_spec.rb
+++ b/spec/models/wiki_page_spec.rb
@@ -7,7 +7,11 @@ RSpec.describe WikiPage do
let(:container) { create(:project, :wiki_repo) }
let(:wiki) { Wiki.for_container(container, user) }
let(:new_page) { build(:wiki_page, wiki: wiki, title: 'test page', content: 'test content') }
- let(:existing_page) { create(:wiki_page, wiki: wiki, title: 'test page', content: 'test content', message: 'test commit') }
+
+ let(:existing_page) do
+ create(:wiki_page, wiki: wiki, title: 'test page', content: 'test content', message: 'test commit')
+ wiki.find_page('test page')
+ end
subject { new_page }
@@ -45,9 +49,11 @@ RSpec.describe WikiPage do
let(:dir_1) do
WikiDirectory.new('dir_1', [wiki.find_page('dir_1/page_2')])
end
+
let(:dir_1_1) do
WikiDirectory.new('dir_1/dir_1_1', [wiki.find_page('dir_1/dir_1_1/page_3')])
end
+
let(:dir_2) do
pages = [wiki.find_page('dir_2/page_5'),
wiki.find_page('dir_2/page_4')]
@@ -257,14 +263,68 @@ RSpec.describe WikiPage do
subject.attributes.delete(:title)
expect(subject).not_to be_valid
- expect(subject.errors.keys).to contain_exactly(:title)
+ expect(subject.errors.messages).to eq(title: ["can't be blank"])
end
it "validates presence of content" do
subject.attributes.delete(:content)
expect(subject).not_to be_valid
- expect(subject.errors.keys).to contain_exactly(:content)
+ expect(subject.errors.messages).to eq(content: ["can't be blank"])
+ end
+
+ describe '#validate_content_size_limit' do
+ context 'with a new page' do
+ before do
+ stub_application_setting(wiki_page_max_content_bytes: 10)
+ end
+
+ it 'accepts content below the limit' do
+ subject.attributes[:content] = 'a' * 10
+
+ expect(subject).to be_valid
+ end
+
+ it 'rejects content exceeding the limit' do
+ subject.attributes[:content] = 'a' * 11
+
+ expect(subject).not_to be_valid
+ expect(subject.errors.messages).to eq(
+ content: ['is too long (11 Bytes). The maximum size is 10 Bytes.']
+ )
+ end
+
+ it 'counts content size in bytes rather than characters' do
+ subject.attributes[:content] = '💩💩💩'
+
+ expect(subject).not_to be_valid
+ expect(subject.errors.messages).to eq(
+ content: ['is too long (12 Bytes). The maximum size is 10 Bytes.']
+ )
+ end
+ end
+
+ context 'with an existing page exceeding the limit' do
+ subject { existing_page }
+
+ before do
+ subject
+ stub_application_setting(wiki_page_max_content_bytes: 11)
+ end
+
+ it 'accepts content when it has not changed' do
+ expect(subject).to be_valid
+ end
+
+ it 'rejects content when it has changed' do
+ subject.attributes[:content] = 'a' * 12
+
+ expect(subject).not_to be_valid
+ expect(subject.errors.messages).to eq(
+ content: ['is too long (12 Bytes). The maximum size is 11 Bytes.']
+ )
+ end
+ end
end
describe '#validate_path_limits' do
@@ -702,6 +762,58 @@ RSpec.describe WikiPage do
end
end
+ describe '#content_changed?' do
+ context 'with a new page' do
+ subject { new_page }
+
+ it 'returns true if content is set' do
+ subject.attributes[:content] = 'new'
+
+ expect(subject.content_changed?).to be(true)
+ end
+
+ it 'returns false if content is blank' do
+ subject.attributes[:content] = ' '
+
+ expect(subject.content_changed?).to be(false)
+ end
+ end
+
+ context 'with an existing page' do
+ subject { existing_page }
+
+ it 'returns false' do
+ expect(subject.content_changed?).to be(false)
+ end
+
+ it 'returns false if content is set to the same value' do
+ subject.attributes[:content] = 'test content'
+
+ expect(subject.content_changed?).to be(false)
+ end
+
+ it 'returns true if content is changed' do
+ subject.attributes[:content] = 'new'
+
+ expect(subject.content_changed?).to be(true)
+ end
+
+ it 'returns true if content is changed to a blank string' do
+ subject.attributes[:content] = ' '
+
+ expect(subject.content_changed?).to be(true)
+ end
+
+ it 'returns false if only the newline format has changed' do
+ expect(subject.page).to receive(:text_data).and_return("foo\nbar")
+
+ subject.attributes[:content] = "foo\r\nbar"
+
+ expect(subject.content_changed?).to be(false)
+ end
+ end
+ end
+
describe '#path' do
it 'returns the path when persisted' do
expect(existing_page.path).to eq('test-page.md')
diff --git a/spec/models/wiki_spec.rb b/spec/models/wiki_spec.rb
new file mode 100644
index 00000000000..8dd510a0b98
--- /dev/null
+++ b/spec/models/wiki_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Wiki do
+ describe '.new' do
+ it 'verifies that the user is a User' do
+ expect { described_class.new(double, 1) }.to raise_error(ArgumentError)
+ expect { described_class.new(double, build(:group)) }.to raise_error(ArgumentError)
+ expect { described_class.new(double, build(:user)) }.not_to raise_error
+ expect { described_class.new(double, nil) }.not_to raise_error
+ end
+ end
+end
diff --git a/spec/policies/ci/build_policy_spec.rb b/spec/policies/ci/build_policy_spec.rb
index d2547338855..098efd7daa6 100644
--- a/spec/policies/ci/build_policy_spec.rb
+++ b/spec/policies/ci/build_policy_spec.rb
@@ -146,7 +146,7 @@ RSpec.describe Ci::BuildPolicy do
create(:protected_tag, :no_one_can_create,
name: build.ref, project: project)
- build.update(tag: true)
+ build.update!(tag: true)
end
it 'does not include ability to update build' do
@@ -247,6 +247,36 @@ RSpec.describe Ci::BuildPolicy do
it { expect(policy).to be_disallowed :erase_build }
end
end
+
+ context 'when an admin erases a build', :enable_admin_mode do
+ let(:owner) { create(:user) }
+
+ before do
+ user.update!(admin: true)
+ end
+
+ context 'when the build was created for a protected branch' do
+ before do
+ create(:protected_branch, :developers_can_push,
+ name: build.ref, project: project)
+ end
+
+ it { expect(policy).to be_allowed :erase_build }
+ end
+
+ context 'when the build was created for a protected tag' do
+ before do
+ create(:protected_tag, :developers_can_create,
+ name: build.ref, project: project)
+ end
+
+ it { expect(policy).to be_allowed :erase_build }
+ end
+
+ context 'when the build was created for an unprotected ref' do
+ it { expect(policy).to be_allowed :erase_build }
+ end
+ end
end
end
diff --git a/spec/policies/ci/pipeline_policy_spec.rb b/spec/policies/ci/pipeline_policy_spec.rb
index fcd96bc6653..9a65823c950 100644
--- a/spec/policies/ci/pipeline_policy_spec.rb
+++ b/spec/policies/ci/pipeline_policy_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe Ci::PipelinePolicy, :models do
create(:protected_tag, :no_one_can_create,
name: pipeline.ref, project: project)
- pipeline.update(tag: true)
+ pipeline.update!(tag: true)
end
it 'does not include ability to update pipeline' do
diff --git a/spec/policies/ci/pipeline_schedule_policy_spec.rb b/spec/policies/ci/pipeline_schedule_policy_spec.rb
index b455384d17a..1e36f455f6f 100644
--- a/spec/policies/ci/pipeline_schedule_policy_spec.rb
+++ b/spec/policies/ci/pipeline_schedule_policy_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe Ci::PipelineSchedulePolicy, :models do
let(:tag) { 'v1.0.0' }
before do
- pipeline_schedule.update(ref: tag)
+ pipeline_schedule.update!(ref: tag)
create(:protected_tag, :no_one_can_create,
name: pipeline_schedule.ref, project: project)
@@ -69,7 +69,7 @@ RSpec.describe Ci::PipelineSchedulePolicy, :models do
describe 'rules for owner of schedule' do
before do
project.add_developer(user)
- pipeline_schedule.update(owner: user)
+ pipeline_schedule.update!(owner: user)
end
it 'includes abilities to do all operations on pipeline schedule' do
@@ -97,7 +97,7 @@ RSpec.describe Ci::PipelineSchedulePolicy, :models do
before do
project.add_maintainer(owner)
project.add_maintainer(user)
- pipeline_schedule.update(owner: owner)
+ pipeline_schedule.update!(owner: owner)
end
it 'includes abilities to take ownership' do
diff --git a/spec/policies/concerns/crud_policy_helpers_spec.rb b/spec/policies/concerns/crud_policy_helpers_spec.rb
new file mode 100644
index 00000000000..69bf9ad12d6
--- /dev/null
+++ b/spec/policies/concerns/crud_policy_helpers_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe CrudPolicyHelpers do
+ let(:policy_test_class) do
+ Class.new do
+ include CrudPolicyHelpers
+ end
+ end
+
+ let(:feature_name) { :foo }
+
+ before do
+ stub_const('PolicyTestClass', policy_test_class)
+ end
+
+ describe '.create_read_update_admin_destroy' do
+ it 'returns an array of the appropriate abilites given a feature name' do
+ expect(PolicyTestClass.create_read_update_admin_destroy(feature_name)).to eq([
+ :read_foo,
+ :create_foo,
+ :update_foo,
+ :admin_foo,
+ :destroy_foo
+ ])
+ end
+ end
+
+ describe '.create_update_admin_destroy' do
+ it 'returns an array of the appropriate abilites given a feature name' do
+ expect(PolicyTestClass.create_update_admin_destroy(feature_name)).to eq([
+ :create_foo,
+ :update_foo,
+ :admin_foo,
+ :destroy_foo
+ ])
+ end
+ end
+
+ describe '.create_update_admin' do
+ it 'returns an array of the appropriate abilites given a feature name' do
+ expect(PolicyTestClass.create_update_admin(feature_name)).to eq([
+ :create_foo,
+ :update_foo,
+ :admin_foo
+ ])
+ end
+ end
+end
diff --git a/spec/policies/concerns/readonly_abilities_spec.rb b/spec/policies/concerns/readonly_abilities_spec.rb
new file mode 100644
index 00000000000..864924a091d
--- /dev/null
+++ b/spec/policies/concerns/readonly_abilities_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ReadonlyAbilities do
+ let(:test_class) do
+ Class.new do
+ include ReadonlyAbilities
+ end
+ end
+
+ before do
+ stub_const('TestClass', test_class)
+ end
+
+ describe '.readonly_abilities' do
+ it 'returns an array of abilites to be prevented when readonly' do
+ expect(TestClass.readonly_abilities).to include(*described_class::READONLY_ABILITIES)
+ end
+ end
+
+ describe '.readonly_features' do
+ it 'returns an array of features to be prevented when readonly' do
+ expect(TestClass.readonly_features).to include(*described_class::READONLY_FEATURES)
+ end
+ end
+end
diff --git a/spec/policies/design_management/design_policy_spec.rb b/spec/policies/design_management/design_policy_spec.rb
index 5dde5f896c9..5cf2f376edf 100644
--- a/spec/policies/design_management/design_policy_spec.rb
+++ b/spec/policies/design_management/design_policy_spec.rb
@@ -1,22 +1,32 @@
# frozen_string_literal: true
-require 'spec_helper'
+require "spec_helper"
RSpec.describe DesignManagement::DesignPolicy do
include DesignManagementTestHelpers
- include_context 'ProjectPolicy context'
-
let(:guest_design_abilities) { %i[read_design] }
- let(:developer_design_abilities) do
- %i[create_design destroy_design]
- end
+ let(:developer_design_abilities) { %i[create_design destroy_design move_design] }
let(:design_abilities) { guest_design_abilities + developer_design_abilities }
- let(:issue) { create(:issue, project: project) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:project) { create(:project, :public, namespace: owner.namespace) }
+ let_it_be(:issue) { create(:issue, project: project) }
let(:design) { create(:design, issue: issue) }
subject(:design_policy) { described_class.new(current_user, design) }
+ before_all do
+ project.add_guest(guest)
+ project.add_maintainer(maintainer)
+ project.add_developer(developer)
+ project.add_reporter(reporter)
+ end
+
shared_examples_for "design abilities not available" do
context "for owners" do
let(:current_user) { owner }
@@ -71,11 +81,11 @@ RSpec.describe DesignManagement::DesignPolicy do
context "for admins" do
let(:current_user) { admin }
- context 'when admin mode enabled', :enable_admin_mode do
+ context "when admin mode enabled", :enable_admin_mode do
it { is_expected.to be_allowed(*design_abilities) }
end
- context 'when admin mode disabled' do
+ context "when admin mode disabled" do
it { is_expected.to be_allowed(*guest_design_abilities) }
it { is_expected.to be_disallowed(*developer_design_abilities) }
end
@@ -121,8 +131,19 @@ RSpec.describe DesignManagement::DesignPolicy do
it_behaves_like "design abilities available for members"
+ context 'when reorder_designs is not enabled' do
+ before do
+ stub_feature_flags(reorder_designs: false)
+ end
+
+ let(:current_user) { developer }
+
+ it { is_expected.to be_allowed(*(developer_design_abilities - [:move_design])) }
+ it { is_expected.to be_disallowed(:move_design) }
+ end
+
context "for guests in private projects" do
- let(:project) { create(:project, :private) }
+ let_it_be(:project) { create(:project, :private) }
let(:current_user) { guest }
it { is_expected.to be_allowed(*guest_design_abilities) }
@@ -137,7 +158,7 @@ RSpec.describe DesignManagement::DesignPolicy do
end
context "when the issue is confidential" do
- let(:issue) { create(:issue, :confidential, project: project) }
+ let_it_be(:issue) { create(:issue, :confidential, project: project) }
it_behaves_like "design abilities available for members"
@@ -155,26 +176,24 @@ RSpec.describe DesignManagement::DesignPolicy do
end
context "when the issue is locked" do
+ let_it_be(:issue) { create(:issue, :locked, project: project) }
let(:current_user) { owner }
- let(:issue) { create(:issue, :locked, project: project) }
it_behaves_like "read-only design abilities"
end
context "when the issue has moved" do
+ let_it_be(:issue) { create(:issue, project: project, moved_to: create(:issue)) }
let(:current_user) { owner }
- let(:issue) { create(:issue, project: project, moved_to: create(:issue)) }
it_behaves_like "read-only design abilities"
end
context "when the project is archived" do
+ let_it_be(:project) { create(:project, :public, :archived) }
+ let_it_be(:issue) { create(:issue, project: project) }
let(:current_user) { owner }
- before do
- project.update!(archived: true)
- end
-
it_behaves_like "read-only design abilities"
end
end
diff --git a/spec/policies/group_deploy_key_policy_spec.rb b/spec/policies/group_deploy_key_policy_spec.rb
new file mode 100644
index 00000000000..c3903a3fa55
--- /dev/null
+++ b/spec/policies/group_deploy_key_policy_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GroupDeployKeyPolicy do
+ subject { described_class.new(user, group_deploy_key) }
+
+ let_it_be(:user) { create(:user) }
+
+ describe 'edit a group deploy key' do
+ context 'when the user does not own the group deploy key' do
+ let(:group_deploy_key) { create(:group_deploy_key) }
+
+ it { is_expected.to be_disallowed(:update_group_deploy_key) }
+ end
+
+ context 'when the user owns the group deploy key' do
+ let(:group_deploy_key) { create(:group_deploy_key, user: user) }
+
+ before do
+ user.reload
+ end
+
+ it { is_expected.to be_allowed(:update_group_deploy_key) }
+ end
+ end
+end
diff --git a/spec/policies/group_deploy_keys_group_policy_spec.rb b/spec/policies/group_deploy_keys_group_policy_spec.rb
new file mode 100644
index 00000000000..7ad9b655411
--- /dev/null
+++ b/spec/policies/group_deploy_keys_group_policy_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GroupDeployKeysGroupPolicy do
+ subject { described_class.new(user, group_deploy_keys_group) }
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:group_deploy_key) { create(:group_deploy_key) }
+ let(:group_deploy_keys_group) { create(:group_deploy_keys_group, group: group, group_deploy_key: group_deploy_key) }
+
+ describe 'edit a group deploy key for a given group' do
+ it 'is allowed when the user is an owner of this group' do
+ group.add_owner(user)
+
+ expect(subject).to be_allowed(:update_group_deploy_key_for_group)
+ end
+
+ it 'is not allowed when the user is not an owner of this group' do
+ expect(subject).to be_disallowed(:update_group_deploy_key_for_group)
+ end
+ end
+end
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index 9bd692b45c3..3e0ea164e3d 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -137,7 +137,7 @@ RSpec.describe GroupPolicy do
context 'with subgroup_creation level set to maintainer' do
before_all do
- group.update(subgroup_creation_level: ::Gitlab::Access::MAINTAINER_SUBGROUP_ACCESS)
+ group.update!(subgroup_creation_level: ::Gitlab::Access::MAINTAINER_SUBGROUP_ACCESS)
end
it 'allows every maintainer permission plus creating subgroups' do
@@ -409,7 +409,7 @@ RSpec.describe GroupPolicy do
context 'transfer_projects' do
shared_examples_for 'allowed to transfer projects' do
before do
- group.update(project_creation_level: project_creation_level)
+ group.update!(project_creation_level: project_creation_level)
end
it { is_expected.to be_allowed(:transfer_projects) }
@@ -417,7 +417,7 @@ RSpec.describe GroupPolicy do
shared_examples_for 'not allowed to transfer projects' do
before do
- group.update(project_creation_level: project_creation_level)
+ group.update!(project_creation_level: project_creation_level)
end
it { is_expected.to be_disallowed(:transfer_projects) }
@@ -491,7 +491,7 @@ RSpec.describe GroupPolicy do
context 'create_projects' do
context 'when group has no project creation level set' do
before_all do
- group.update(project_creation_level: nil)
+ group.update!(project_creation_level: nil)
end
context 'reporter' do
@@ -521,7 +521,7 @@ RSpec.describe GroupPolicy do
context 'when group has project creation level set to no one' do
before_all do
- group.update(project_creation_level: ::Gitlab::Access::NO_ONE_PROJECT_ACCESS)
+ group.update!(project_creation_level: ::Gitlab::Access::NO_ONE_PROJECT_ACCESS)
end
context 'reporter' do
@@ -551,7 +551,7 @@ RSpec.describe GroupPolicy do
context 'when group has project creation level set to maintainer only' do
before_all do
- group.update(project_creation_level: ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS)
+ group.update!(project_creation_level: ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS)
end
context 'reporter' do
@@ -581,7 +581,7 @@ RSpec.describe GroupPolicy do
context 'when group has project creation level set to developers + maintainer' do
before_all do
- group.update(project_creation_level: ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS)
+ group.update!(project_creation_level: ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS)
end
context 'reporter' do
@@ -613,7 +613,7 @@ RSpec.describe GroupPolicy do
context 'create_subgroup' do
context 'when group has subgroup creation level set to owner' do
before_all do
- group.update(subgroup_creation_level: ::Gitlab::Access::OWNER_SUBGROUP_ACCESS)
+ group.update!(subgroup_creation_level: ::Gitlab::Access::OWNER_SUBGROUP_ACCESS)
end
context 'reporter' do
@@ -643,7 +643,7 @@ RSpec.describe GroupPolicy do
context 'when group has subgroup creation level set to maintainer' do
before_all do
- group.update(subgroup_creation_level: ::Gitlab::Access::MAINTAINER_SUBGROUP_ACCESS)
+ group.update!(subgroup_creation_level: ::Gitlab::Access::MAINTAINER_SUBGROUP_ACCESS)
end
context 'reporter' do
@@ -752,7 +752,7 @@ RSpec.describe GroupPolicy do
context 'which does not have design management enabled' do
before do
- project.update(lfs_enabled: false)
+ project.update!(lfs_enabled: false)
end
it { is_expected.not_to be_allowed(:read_design_activity) }
diff --git a/spec/policies/issue_policy_spec.rb b/spec/policies/issue_policy_spec.rb
index b3ca37b17c2..e352b990159 100644
--- a/spec/policies/issue_policy_spec.rb
+++ b/spec/policies/issue_policy_spec.rb
@@ -104,7 +104,7 @@ RSpec.describe IssuePolicy do
end
it 'does not allow issue author to read or update confidential issue moved to an private project' do
- confidential_issue.project = build(:project, :private)
+ confidential_issue.project = create(:project, :private)
expect(permissions(author, confidential_issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue)
end
@@ -117,7 +117,7 @@ RSpec.describe IssuePolicy do
end
it 'does not allow issue assignees to read or update confidential issue moved to an private project' do
- confidential_issue.project = build(:project, :private)
+ confidential_issue.project = create(:project, :private)
expect(permissions(assignee, confidential_issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue)
end
@@ -188,7 +188,7 @@ RSpec.describe IssuePolicy do
context 'when issues are private' do
before do
- project.project_feature.update(issues_access_level: ProjectFeature::PRIVATE)
+ project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE)
end
let(:issue) { create(:issue, project: project, author: author) }
let(:visitor) { create(:user) }
diff --git a/spec/policies/merge_request_policy_spec.rb b/spec/policies/merge_request_policy_spec.rb
index 2f3cb2e998a..3a46d5b9226 100644
--- a/spec/policies/merge_request_policy_spec.rb
+++ b/spec/policies/merge_request_policy_spec.rb
@@ -51,7 +51,7 @@ RSpec.describe MergeRequestPolicy do
let!(:merge_request) { create(:merge_request, source_project: project, target_project: project, author: author) }
before do
- project.project_feature.update(merge_requests_access_level: ProjectFeature::DISABLED)
+ project.project_feature.update!(merge_requests_access_level: ProjectFeature::DISABLED)
end
describe 'the author' do
@@ -83,8 +83,8 @@ RSpec.describe MergeRequestPolicy do
let!(:merge_request) { create(:merge_request, source_project: project, target_project: project, author: author) }
before do
- project.update(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
- project.project_feature.update(merge_requests_access_level: ProjectFeature::PRIVATE)
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ project.project_feature.update!(merge_requests_access_level: ProjectFeature::PRIVATE)
end
describe 'a non-team-member' do
diff --git a/spec/policies/personal_access_token_policy_spec.rb b/spec/policies/personal_access_token_policy_spec.rb
new file mode 100644
index 00000000000..71795202e13
--- /dev/null
+++ b/spec/policies/personal_access_token_policy_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe PersonalAccessTokenPolicy do
+ include AdminModeHelper
+
+ subject { described_class.new(current_user, token) }
+
+ context 'current_user is an administrator', :enable_admin_mode do
+ let_it_be(:current_user) { build(:admin) }
+
+ context 'not the owner of the token' do
+ let_it_be(:token) { build(:personal_access_token) }
+
+ it { is_expected.to be_allowed(:read_token) }
+ it { is_expected.to be_allowed(:revoke_token) }
+ end
+
+ context 'owner of the token' do
+ let_it_be(:token) { build(:personal_access_token, user: current_user) }
+
+ it { is_expected.to be_allowed(:read_token) }
+ it { is_expected.to be_allowed(:revoke_token) }
+ end
+ end
+
+ context 'current_user is not an administrator' do
+ let_it_be(:current_user) { build(:user) }
+
+ context 'not the owner of the token' do
+ let_it_be(:token) { build(:personal_access_token) }
+
+ it { is_expected.to be_disallowed(:read_token) }
+ it { is_expected.to be_disallowed(:revoke_token) }
+ end
+
+ context 'owner of the token' do
+ let_it_be(:token) { build(:personal_access_token, user: current_user) }
+
+ it { is_expected.to be_allowed(:read_token) }
+ it { is_expected.to be_allowed(:revoke_token) }
+ end
+ end
+
+ context 'current_user is a blocked administrator', :enable_admin_mode do
+ let_it_be(:current_user) { build(:admin, :blocked) }
+
+ context 'owner of the token' do
+ let_it_be(:token) { build(:personal_access_token, user: current_user) }
+
+ it { is_expected.to be_disallowed(:read_token) }
+ it { is_expected.to be_disallowed(:revoke_token) }
+ end
+
+ context 'not the owner of the token' do
+ let_it_be(:token) { build(:personal_access_token) }
+
+ it { is_expected.to be_disallowed(:read_token) }
+ it { is_expected.to be_disallowed(:revoke_token) }
+ end
+ end
+end
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index dc6ed94309b..9879fc53461 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe ProjectPolicy do
read_project_for_iids read_issue_iid read_label
read_milestone read_snippet read_project_member read_note
create_project create_issue create_note upload_file create_merge_request_in
- award_emoji read_release
+ award_emoji read_release read_issue_link
]
end
@@ -30,7 +30,7 @@ RSpec.describe ProjectPolicy do
admin_issue admin_label admin_list read_commit_status read_build
read_container_image read_pipeline read_environment read_deployment
read_merge_request download_wiki_code read_sentry_issue read_metrics_dashboard_annotation
- metrics_dashboard read_confidential_issues
+ metrics_dashboard read_confidential_issues admin_issue_link
]
end
@@ -46,7 +46,7 @@ RSpec.describe ProjectPolicy do
resolve_note create_container_image update_container_image destroy_container_image daily_statistics
create_environment update_environment create_deployment update_deployment create_release update_release
create_metrics_dashboard_annotation delete_metrics_dashboard_annotation update_metrics_dashboard_annotation
- read_terraform_state
+ read_terraform_state read_pod_logs
]
end
@@ -105,7 +105,7 @@ RSpec.describe ProjectPolicy do
subject { described_class.new(owner, project) }
before do
- project.project_feature.destroy
+ project.project_feature.destroy!
project.reload
end
@@ -325,6 +325,7 @@ RSpec.describe ProjectPolicy do
allow_collaboration: true
)
end
+
let(:maintainer_abilities) do
%w(create_build create_pipeline)
end
@@ -953,7 +954,12 @@ RSpec.describe ProjectPolicy do
context 'when repository is disabled' do
before do
- project.project_feature.update(repository_access_level: ProjectFeature::DISABLED)
+ project.project_feature.update!(
+ # Disable merge_requests and builds as well, since merge_requests and
+ # builds cannot have higher visibility than repository.
+ merge_requests_access_level: ProjectFeature::DISABLED,
+ builds_access_level: ProjectFeature::DISABLED,
+ repository_access_level: ProjectFeature::DISABLED)
end
it { is_expected.to be_disallowed(:read_package) }
diff --git a/spec/policies/user_policy_spec.rb b/spec/policies/user_policy_spec.rb
index 1cc3581ebdd..d7338622c86 100644
--- a/spec/policies/user_policy_spec.rb
+++ b/spec/policies/user_policy_spec.rb
@@ -12,6 +12,34 @@ RSpec.describe UserPolicy do
it { is_expected.to be_allowed(:read_user) }
end
+ describe "reading a different user's Personal Access Tokens" do
+ let(:token) { create(:personal_access_token, user: user) }
+
+ context 'when user is admin' do
+ let(:current_user) { create(:user, :admin) }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it { is_expected.to be_allowed(:read_user_personal_access_tokens) }
+ end
+
+ context 'when admin mode is disabled' do
+ it { is_expected.not_to be_allowed(:read_user_personal_access_tokens) }
+ end
+ end
+
+ context 'when user is not an admin' do
+ context 'requesting their own personal access tokens' do
+ subject { described_class.new(current_user, current_user) }
+
+ it { is_expected.to be_allowed(:read_user_personal_access_tokens) }
+ end
+
+ context "requesting a different user's personal access tokens" do
+ it { is_expected.not_to be_allowed(:read_user_personal_access_tokens) }
+ end
+ end
+ end
+
shared_examples 'changing a user' do |ability|
context "when a regular user tries to destroy another regular user" do
it { is_expected.not_to be_allowed(ability) }
diff --git a/spec/presenters/alert_management/alert_presenter_spec.rb b/spec/presenters/alert_management/alert_presenter_spec.rb
index b1bf7029f3e..394007a802f 100644
--- a/spec/presenters/alert_management/alert_presenter_spec.rb
+++ b/spec/presenters/alert_management/alert_presenter_spec.rb
@@ -4,17 +4,22 @@ require 'spec_helper'
RSpec.describe AlertManagement::AlertPresenter do
let_it_be(:project) { create(:project) }
+
let_it_be(:generic_payload) do
{
'title' => 'Alert title',
'start_time' => '2020-04-27T10:10:22.265949279Z',
- 'custom' => { 'param' => 73 }
+ 'custom' => { 'param' => 73 },
+ 'runbook' => 'https://runbook.com'
}
end
+
let_it_be(:alert) do
create(:alert_management_alert, :with_description, :with_host, :with_service, :with_monitoring_tool, project: project, payload: generic_payload)
end
+ let(:alert_url) { "http://localhost/#{project.full_path}/-/alert_management/#{alert.iid}/details" }
+
subject(:presenter) { described_class.new(alert) }
describe '#issue_description' do
@@ -30,11 +35,13 @@ RSpec.describe AlertManagement::AlertPresenter do
**Service:** #{alert.service}#{markdown_line_break}
**Monitoring tool:** #{alert.monitoring_tool}#{markdown_line_break}
**Hosts:** #{alert.hosts.join(' ')}#{markdown_line_break}
- **Description:** #{alert.description}
+ **Description:** #{alert.description}#{markdown_line_break}
+ **GitLab alert:** #{alert_url}
#### Alert Details
- **custom.param:** 73
+ **custom.param:** 73#{markdown_line_break}
+ **runbook:** https://runbook.com
MARKDOWN
)
end
@@ -45,4 +52,16 @@ RSpec.describe AlertManagement::AlertPresenter do
expect(presenter.metrics_dashboard_url).to be_nil
end
end
+
+ describe '#runbook' do
+ it 'shows the runbook from the payload' do
+ expect(presenter.runbook).to eq('https://runbook.com')
+ end
+ end
+
+ describe '#details_url' do
+ it 'returns the details URL' do
+ expect(presenter.details_url).to match(%r{#{project.web_url}/-/alert_management/#{alert.iid}/details})
+ end
+ end
end
diff --git a/spec/presenters/alert_management/prometheus_alert_presenter_spec.rb b/spec/presenters/alert_management/prometheus_alert_presenter_spec.rb
index 95246914140..3cfff3c1b2f 100644
--- a/spec/presenters/alert_management/prometheus_alert_presenter_spec.rb
+++ b/spec/presenters/alert_management/prometheus_alert_presenter_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe AlertManagement::PrometheusAlertPresenter do
let_it_be(:project) { create(:project) }
- let_it_be(:payload) do
+ let(:payload) do
{
'annotations' => {
'title' => 'Alert title',
@@ -15,10 +15,13 @@ RSpec.describe AlertManagement::PrometheusAlertPresenter do
'generatorURL' => 'http://8d467bd4607a:9090/graph?g0.expr=vector%281%29&g0.tab=1'
}
end
- let(:alert) do
+
+ let!(:alert) do
create(:alert_management_alert, :prometheus, project: project, payload: payload)
end
+ let(:alert_url) { "http://localhost/#{project.full_path}/-/alert_management/#{alert.iid}/details" }
+
subject(:presenter) { described_class.new(alert) }
describe '#issue_description' do
@@ -32,7 +35,8 @@ RSpec.describe AlertManagement::PrometheusAlertPresenter do
**Start time:** #{presenter.start_time}#{markdown_line_break}
**Severity:** #{presenter.severity}#{markdown_line_break}
**full_query:** `vector(1)`#{markdown_line_break}
- **Monitoring tool:** Prometheus
+ **Monitoring tool:** Prometheus#{markdown_line_break}
+ **GitLab alert:** #{alert_url}
#### Alert Details
@@ -65,4 +69,17 @@ RSpec.describe AlertManagement::PrometheusAlertPresenter do
it { is_expected.to eq(dashboard_url_for_alert) }
end
end
+
+ describe '#runbook' do
+ subject { presenter.runbook }
+
+ it { is_expected.to be_nil }
+
+ context 'with runbook in payload' do
+ let(:expected_runbook) { 'https://awesome-runbook.com' }
+ let(:payload) { { 'annotations' => { 'runbook' => expected_runbook } } }
+
+ it { is_expected.to eq(expected_runbook) }
+ end
+ end
end
diff --git a/spec/presenters/blob_presenter_spec.rb b/spec/presenters/blob_presenter_spec.rb
index bf926ce62b3..47402fea2b5 100644
--- a/spec/presenters/blob_presenter_spec.rb
+++ b/spec/presenters/blob_presenter_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe BlobPresenter, :seed_helper do
'files/ruby/regex.rb'
)
end
+
let(:blob) { Blob.new(git_blob) }
describe '.web_url' do
@@ -24,6 +25,16 @@ RSpec.describe BlobPresenter, :seed_helper do
it { expect(subject.web_url).to eq("http://localhost/#{project.full_path}/-/blob/#{blob.commit_id}/#{blob.path}") }
end
+ describe '#web_path' do
+ let(:project) { create(:project, :repository) }
+ let(:repository) { project.repository }
+ let(:blob) { Gitlab::Graphql::Representation::TreeEntry.new(repository.tree.blobs.first, repository) }
+
+ subject { described_class.new(blob) }
+
+ it { expect(subject.web_path).to eq("/#{project.full_path}/-/blob/#{blob.commit_id}/#{blob.path}") }
+ end
+
describe '#highlight' do
subject { described_class.new(blob) }
diff --git a/spec/presenters/clusters/cluster_presenter_spec.rb b/spec/presenters/clusters/cluster_presenter_spec.rb
index 5b75b281297..e99b04fda8d 100644
--- a/spec/presenters/clusters/cluster_presenter_spec.rb
+++ b/spec/presenters/clusters/cluster_presenter_spec.rb
@@ -265,7 +265,7 @@ RSpec.describe Clusters::ClusterPresenter do
is_expected.to include('clusters-path': clusterable_presenter.index_path,
'dashboard-endpoint': clusterable_presenter.metrics_dashboard_path(cluster),
'documentation-path': help_page_path('user/project/clusters/index', anchor: 'monitoring-your-kubernetes-cluster-ultimate'),
- 'add-dashboard-documentation-path': help_page_path('user/project/integrations/prometheus.md', anchor: 'adding-a-new-dashboard-to-your-project'),
+ 'add-dashboard-documentation-path': help_page_path('operations/metrics/dashboards/index.md', anchor: 'add-a-new-dashboard-to-your-project'),
'empty-getting-started-svg-path': match_asset_path('/assets/illustrations/monitoring/getting_started.svg'),
'empty-loading-svg-path': match_asset_path('/assets/illustrations/monitoring/loading.svg'),
'empty-no-data-svg-path': match_asset_path('/assets/illustrations/monitoring/no_data.svg'),
diff --git a/spec/presenters/commit_presenter_spec.rb b/spec/presenters/commit_presenter_spec.rb
index bc6be07f415..b221c9ca8f7 100644
--- a/spec/presenters/commit_presenter_spec.rb
+++ b/spec/presenters/commit_presenter_spec.rb
@@ -8,6 +8,10 @@ RSpec.describe CommitPresenter do
let(:user) { create(:user) }
let(:presenter) { described_class.new(commit, current_user: user) }
+ describe '#web_path' do
+ it { expect(presenter.web_path).to eq("/#{project.full_path}/-/commit/#{commit.sha}") }
+ end
+
describe '#status_for' do
subject { presenter.status_for('ref') }
diff --git a/spec/presenters/event_presenter_spec.rb b/spec/presenters/event_presenter_spec.rb
index 2d4872ea29e..6798be21d28 100644
--- a/spec/presenters/event_presenter_spec.rb
+++ b/spec/presenters/event_presenter_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe EventPresenter do
context 'with project label' do
subject { project_event.present.target_link_options }
- it { is_expected.to eq([group.becomes(Namespace), project, target]) }
+ it { is_expected.to eq([project, target]) }
end
end
end
diff --git a/spec/presenters/packages/detail/package_presenter_spec.rb b/spec/presenters/packages/detail/package_presenter_spec.rb
index 34582957364..3a13aca6c7a 100644
--- a/spec/presenters/packages/detail/package_presenter_spec.rb
+++ b/spec/presenters/packages/detail/package_presenter_spec.rb
@@ -9,15 +9,18 @@ RSpec.describe ::Packages::Detail::PackagePresenter do
let(:presenter) { described_class.new(package) }
let_it_be(:user_info) { { name: user.name, avatar_url: user.avatar_url } }
+
let!(:expected_package_files) do
- npm_file = package.package_files.first
- [{
- created_at: npm_file.created_at,
- download_path: npm_file.download_path,
- file_name: npm_file.file_name,
- size: npm_file.size
- }]
+ package.package_files.map do |file|
+ {
+ created_at: file.created_at,
+ download_path: file.download_path,
+ file_name: file.file_name,
+ size: file.size
+ }
+ end
end
+
let(:pipeline_info) do
pipeline = package.build_info.pipeline
{
@@ -29,11 +32,15 @@ RSpec.describe ::Packages::Detail::PackagePresenter do
user: user_info,
project: {
name: pipeline.project.name,
- web_url: pipeline.project.web_url
+ web_url: pipeline.project.web_url,
+ pipeline_url: include("pipelines/#{pipeline.id}"),
+ commit_url: include("commit/#{pipeline.sha}")
}
}
end
+
let!(:dependency_links) { [] }
+
let!(:expected_package_details) do
{
id: package.id,
@@ -55,7 +62,7 @@ RSpec.describe ::Packages::Detail::PackagePresenter do
let(:expected_package_details) { super().merge(pipeline: pipeline_info) }
it 'returns details with pipeline' do
- expect(presenter.detail_view).to eq expected_package_details
+ expect(presenter.detail_view).to match expected_package_details
end
end
@@ -67,6 +74,24 @@ RSpec.describe ::Packages::Detail::PackagePresenter do
end
end
+ context 'with conan metadata' do
+ let(:package) { create(:conan_package, project: project) }
+ let(:expected_package_details) { super().merge(conan_metadatum: package.conan_metadatum) }
+
+ it 'returns conan_metadatum' do
+ expect(presenter.detail_view).to eq expected_package_details
+ end
+ end
+
+ context 'with composer metadata' do
+ let(:package) { create(:composer_package, :with_metadatum, sha: '123', project: project) }
+ let(:expected_package_details) { super().merge(composer_metadatum: package.composer_metadatum) }
+
+ it 'returns composer_metadatum' do
+ expect(presenter.detail_view).to eq expected_package_details
+ end
+ end
+
context 'with nuget_metadatum' do
let_it_be(:package) { create(:nuget_package, project: project) }
let_it_be(:nuget_metadatum) { create(:nuget_metadatum, package: package) }
@@ -81,6 +106,7 @@ RSpec.describe ::Packages::Detail::PackagePresenter do
let_it_be(:package) { create(:nuget_package, project: project) }
let_it_be(:dependency_link) { create(:packages_dependency_link, package: package) }
let_it_be(:nuget_dependency) { create(:nuget_dependency_link_metadatum, dependency_link: dependency_link) }
+
let_it_be(:expected_link) do
{
name: dependency_link.dependency.name,
@@ -88,6 +114,7 @@ RSpec.describe ::Packages::Detail::PackagePresenter do
target_framework: nuget_dependency.target_framework
}
end
+
let_it_be(:dependency_links) { [expected_link] }
it 'returns the correct dependency link' do
diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb
index eb1ff628d14..4b4d8ee85db 100644
--- a/spec/presenters/project_presenter_spec.rb
+++ b/spec/presenters/project_presenter_spec.rb
@@ -525,7 +525,7 @@ RSpec.describe ProjectPresenter do
end
describe '#statistics_buttons' do
- let(:project) { build(:project) }
+ let(:project) { build_stubbed(:project) }
it 'orders the items correctly' do
allow(project.repository).to receive(:readme).and_return(double(name: 'readme'))
diff --git a/spec/presenters/projects/prometheus/alert_presenter_spec.rb b/spec/presenters/projects/prometheus/alert_presenter_spec.rb
index 89c5438b074..2d58a7f2cfa 100644
--- a/spec/presenters/projects/prometheus/alert_presenter_spec.rb
+++ b/spec/presenters/projects/prometheus/alert_presenter_spec.rb
@@ -293,6 +293,19 @@ RSpec.describe Projects::Prometheus::AlertPresenter do
end
end
+ describe '#details_url' do
+ subject { presenter.details_url }
+
+ it { is_expected.to eq(nil) }
+
+ context 'alert management alert present' do
+ let_it_be(:am_alert) { create(:alert_management_alert, project: project) }
+ let(:alert) { create(:alerting_alert, project: project, payload: payload, am_alert: am_alert) }
+
+ it { is_expected.to eq("http://localhost/#{project.full_path}/-/alert_management/#{am_alert.iid}/details") }
+ end
+ end
+
context 'with gitlab alert' do
include_context 'gitlab alert'
diff --git a/spec/presenters/prometheus_alert_presenter_spec.rb b/spec/presenters/prometheus_alert_presenter_spec.rb
new file mode 100644
index 00000000000..b9f18e2be28
--- /dev/null
+++ b/spec/presenters/prometheus_alert_presenter_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe PrometheusAlertPresenter do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:environment) { create(:environment, project: project) }
+
+ let(:presenter) { described_class.new(prometheus_alert) }
+
+ describe '#humanized_text' do
+ subject { presenter.humanized_text }
+
+ let_it_be(:prometheus_metric) { create(:prometheus_metric, project: project) }
+ let(:prometheus_alert) { create(:prometheus_alert, operator: operator, project: project, environment: environment, prometheus_metric: prometheus_metric) }
+ let(:operator) { :gt }
+
+ it { is_expected.to eq('exceeded 1.0m/s') }
+
+ context 'when operator is eq' do
+ let(:operator) { :eq }
+
+ it { is_expected.to eq('is equal to 1.0m/s') }
+ end
+
+ context 'when operator is lt' do
+ let(:operator) { :lt }
+
+ it { is_expected.to eq('is less than 1.0m/s') }
+ end
+ end
+end
diff --git a/spec/presenters/snippet_blob_presenter_spec.rb b/spec/presenters/snippet_blob_presenter_spec.rb
index 7464c0ac15b..915f43fe572 100644
--- a/spec/presenters/snippet_blob_presenter_spec.rb
+++ b/spec/presenters/snippet_blob_presenter_spec.rb
@@ -13,13 +13,14 @@ RSpec.describe SnippetBlobPresenter do
subject { described_class.new(snippet.blob).rich_data }
context 'with PersonalSnippet' do
- let(:raw_url) { "http://127.0.0.1:3000/snippets/#{snippet.id}/raw" }
- let(:snippet) { build(:personal_snippet) }
+ let(:snippet) { create(:personal_snippet, :repository) }
- it 'returns nil when the snippet blob is binary' do
- allow(snippet.blob).to receive(:binary?).and_return(true)
+ context 'when blob is binary' do
+ it 'returns the HTML associated with the binary' do
+ allow(snippet).to receive(:blob).and_return(snippet.repository.blob_at('master', 'files/images/logo-black.png'))
- expect(subject).to be_nil
+ expect(subject).to include('file-content image_file')
+ end
end
context 'with markdown format' do
@@ -40,7 +41,7 @@ RSpec.describe SnippetBlobPresenter do
let(:snippet) { create(:personal_snippet, file_name: 'test.ipynb') }
it 'returns rich notebook content' do
- expect(subject.strip).to eq %Q(<div class="file-content" data-endpoint="/snippets/#{snippet.id}/raw" id="js-notebook-viewer"></div>)
+ expect(subject.strip).to eq %Q(<div class="file-content" data-endpoint="/-/snippets/#{snippet.id}/raw" id="js-notebook-viewer"></div>)
end
end
@@ -48,7 +49,7 @@ RSpec.describe SnippetBlobPresenter do
let(:snippet) { create(:personal_snippet, file_name: 'openapi.yml') }
it 'returns rich openapi content' do
- expect(subject).to eq %Q(<div class="file-content" data-endpoint="/snippets/#{snippet.id}/raw" id="js-openapi-viewer"></div>\n)
+ expect(subject).to eq %Q(<div class="file-content" data-endpoint="/-/snippets/#{snippet.id}/raw" id="js-openapi-viewer"></div>\n)
end
end
@@ -108,7 +109,7 @@ RSpec.describe SnippetBlobPresenter do
end
end
- describe '#raw_path' do
+ describe 'route helpers' do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:personal_snippet) { create(:personal_snippet, :repository, author: user) }
@@ -118,28 +119,62 @@ RSpec.describe SnippetBlobPresenter do
project.add_developer(user)
end
- subject { described_class.new(snippet.blobs.first, current_user: user).raw_path }
+ describe '#raw_path' do
+ subject { described_class.new(snippet.blobs.first, current_user: user).raw_path }
+
+ it_behaves_like 'snippet blob raw path'
+
+ context 'with snippet_multiple_files feature disabled' do
+ before do
+ stub_feature_flags(snippet_multiple_files: false)
+ end
+
+ context 'with ProjectSnippet' do
+ let(:snippet) { project_snippet }
+
+ it 'returns the raw path' do
+ expect(subject).to eq "/#{snippet.project.full_path}/-/snippets/#{snippet.id}/raw"
+ end
+ end
+
+ context 'with PersonalSnippet' do
+ let(:snippet) { personal_snippet }
- it_behaves_like 'snippet blob raw path'
+ it 'returns the raw path' do
+ expect(subject).to eq "/-/snippets/#{snippet.id}/raw"
+ end
+ end
+ end
+ end
+
+ describe '#raw_url' do
+ subject { described_class.new(snippet.blobs.first, current_user: user).raw_url }
- context 'with snippet_multiple_files feature disabled' do
before do
- stub_feature_flags(snippet_multiple_files: false)
+ stub_default_url_options(host: 'test.host')
end
- context 'with ProjectSnippet' do
- let(:snippet) { project_snippet }
+ it_behaves_like 'snippet blob raw url'
- it 'returns the raw path' do
- expect(subject).to eq "/#{snippet.project.full_path}/snippets/#{snippet.id}/raw"
+ context 'with snippet_multiple_files feature disabled' do
+ before do
+ stub_feature_flags(snippet_multiple_files: false)
+ end
+
+ context 'with ProjectSnippet' do
+ let(:snippet) { project_snippet }
+
+ it 'returns the raw project snippet url' do
+ expect(subject).to eq("http://test.host/#{project_snippet.project.full_path}/-/snippets/#{project_snippet.id}/raw")
+ end
end
- end
- context 'with PersonalSnippet' do
- let(:snippet) { personal_snippet }
+ context 'with PersonalSnippet' do
+ let(:snippet) { personal_snippet }
- it 'returns the raw path' do
- expect(subject).to eq "/snippets/#{snippet.id}/raw"
+ it 'returns the raw personal snippet url' do
+ expect(subject).to eq("http://test.host/-/snippets/#{personal_snippet.id}/raw")
+ end
end
end
end
diff --git a/spec/presenters/snippet_presenter_spec.rb b/spec/presenters/snippet_presenter_spec.rb
index 98c291bdd02..681564ed2b0 100644
--- a/spec/presenters/snippet_presenter_spec.rb
+++ b/spec/presenters/snippet_presenter_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe SnippetPresenter do
let(:snippet) { personal_snippet }
it 'returns snippet web url' do
- expect(subject).to match "/snippets/#{snippet.id}"
+ expect(subject).to match "/-/snippets/#{snippet.id}"
end
end
@@ -31,7 +31,7 @@ RSpec.describe SnippetPresenter do
let(:snippet) { project_snippet }
it 'returns snippet web url' do
- expect(subject).to match "/#{project.full_path}/snippets/#{snippet.id}"
+ expect(subject).to match "/#{project.full_path}/-/snippets/#{snippet.id}"
end
end
end
@@ -43,7 +43,7 @@ RSpec.describe SnippetPresenter do
let(:snippet) { personal_snippet }
it 'returns snippet web url' do
- expect(subject).to match "/snippets/#{snippet.id}/raw"
+ expect(subject).to match "/-/snippets/#{snippet.id}/raw"
end
end
@@ -51,7 +51,7 @@ RSpec.describe SnippetPresenter do
let(:snippet) { project_snippet }
it 'returns snippet web url' do
- expect(subject).to match "/#{project.full_path}/snippets/#{snippet.id}/raw"
+ expect(subject).to match "/#{project.full_path}/-/snippets/#{snippet.id}/raw"
end
end
end
diff --git a/spec/presenters/tree_entry_presenter_spec.rb b/spec/presenters/tree_entry_presenter_spec.rb
index d29a7a6ab04..de84f36c5e6 100644
--- a/spec/presenters/tree_entry_presenter_spec.rb
+++ b/spec/presenters/tree_entry_presenter_spec.rb
@@ -13,4 +13,8 @@ RSpec.describe TreeEntryPresenter do
describe '.web_url' do
it { expect(presenter.web_url).to eq("http://localhost/#{project.full_path}/-/tree/#{tree.commit_id}/#{tree.path}") }
end
+
+ describe '#web_path' do
+ it { expect(presenter.web_path).to eq("/#{project.full_path}/-/tree/#{tree.commit_id}/#{tree.path}") }
+ end
end
diff --git a/spec/presenters/user_presenter_spec.rb b/spec/presenters/user_presenter_spec.rb
new file mode 100644
index 00000000000..fdc20216a02
--- /dev/null
+++ b/spec/presenters/user_presenter_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe UserPresenter do
+ let_it_be(:user) { create(:user) }
+ subject(:presenter) { described_class.new(user) }
+
+ describe '#web_path' do
+ it { expect(presenter.web_path).to eq("/#{user.username}") }
+ end
+
+ describe '#web_url' do
+ it { expect(presenter.web_url).to eq("http://localhost/#{user.username}") }
+ end
+end
diff --git a/spec/requests/api/admin/ci/variables_spec.rb b/spec/requests/api/admin/ci/variables_spec.rb
index 812ee93ad21..f89964411f8 100644
--- a/spec/requests/api/admin/ci/variables_spec.rb
+++ b/spec/requests/api/admin/ci/variables_spec.rb
@@ -110,20 +110,19 @@ RSpec.describe ::API::Admin::Ci::Variables do
expect(response).to have_gitlab_http_status(:bad_request)
end
- it 'does not allow values above 700 characters' do
+ it 'does not allow values above 10,000 characters' do
too_long_message = <<~MESSAGE.strip
- The encrypted value of the provided variable exceeds 1024 bytes. \
- Variables over 700 characters risk exceeding the limit.
+ The value of the provided variable exceeds the 10000 character limit
MESSAGE
expect do
post api('/admin/ci/variables', admin),
- params: { key: 'too_long', value: SecureRandom.hex(701) }
+ params: { key: 'too_long', value: SecureRandom.hex(10_001) }
end.not_to change { ::Ci::InstanceVariable.count }
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response).to match('message' =>
- a_hash_including('encrypted_value' => [too_long_message]))
+ a_hash_including('value' => [too_long_message]))
end
end
diff --git a/spec/requests/api/branches_spec.rb b/spec/requests/api/branches_spec.rb
index 46acd92803f..4b9b82b3a5b 100644
--- a/spec/requests/api/branches_spec.rb
+++ b/spec/requests/api/branches_spec.rb
@@ -39,9 +39,11 @@ RSpec.describe API::Branches do
end
context 'with branch_list_keyset_pagination feature off' do
- context 'with legacy pagination params' do
+ let(:base_params) { {} }
+
+ context 'with offset pagination params' do
it 'returns the repository branches' do
- get api(route, current_user), params: { per_page: 100 }
+ get api(route, current_user), params: base_params.merge(per_page: 100)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/branches')
@@ -53,7 +55,7 @@ RSpec.describe API::Branches do
it 'determines only a limited number of merged branch names' do
expect(API::Entities::Branch).to receive(:represent).with(anything, has_up_to_merged_branch_names_count(2)).and_call_original
- get api(route, current_user), params: { per_page: 2 }
+ get api(route, current_user), params: base_params.merge(per_page: 2)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.count).to eq 2
@@ -64,7 +66,7 @@ RSpec.describe API::Branches do
it 'merge status matches reality on paginated input' do
expected_first_branch_name = project.repository.branches_sorted_by('name')[20].name
- get api(route, current_user), params: { per_page: 20, page: 2 }
+ get api(route, current_user), params: base_params.merge(per_page: 20, page: 2)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.count).to eq 20
@@ -74,11 +76,11 @@ RSpec.describe API::Branches do
end
end
- context 'with gitaly pagination params ' do
+ context 'with gitaly pagination params' do
it 'merge status matches reality on paginated input' do
expected_first_branch_name = project.repository.branches_sorted_by('name').first.name
- get api(route, current_user), params: { per_page: 20, page_token: 'feature' }
+ get api(route, current_user), params: base_params.merge(per_page: 20, page_token: 'feature')
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.count).to eq 20
@@ -91,52 +93,58 @@ RSpec.describe API::Branches do
context 'with branch_list_keyset_pagination feature on' do
before do
- stub_feature_flags(branch_list_keyset_pagination: true)
+ stub_feature_flags(branch_list_keyset_pagination: project)
end
- context 'with gitaly pagination params ' do
- it 'returns the repository branches' do
- get api(route, current_user), params: { per_page: 100 }
+ context 'with keyset pagination option' do
+ let(:base_params) { { pagination: 'keyset' } }
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('public_api/v4/branches')
- branch_names = json_response.map { |x| x['name'] }
- expect(branch_names).to match_array(project.repository.branch_names)
- end
+ context 'with gitaly pagination params ' do
+ it 'returns the repository branches' do
+ get api(route, current_user), params: base_params.merge(per_page: 100)
- it 'determines only a limited number of merged branch names' do
- expect(API::Entities::Branch).to receive(:represent).with(anything, has_up_to_merged_branch_names_count(2)).and_call_original
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/branches')
+ expect(response.headers).not_to include('Link', 'Links')
+ branch_names = json_response.map { |x| x['name'] }
+ expect(branch_names).to match_array(project.repository.branch_names)
+ end
- get api(route, current_user), params: { per_page: 2 }
+ it 'determines only a limited number of merged branch names' do
+ expect(API::Entities::Branch).to receive(:represent).with(anything, has_up_to_merged_branch_names_count(2)).and_call_original
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.count).to eq 2
+ get api(route, current_user), params: base_params.merge(per_page: 2)
- check_merge_status(json_response)
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers).to include('Link', 'Links')
+ expect(json_response.count).to eq 2
- it 'merge status matches reality on paginated input' do
- expected_first_branch_name = project.repository.branches_sorted_by('name').drop_while { |b| b.name <= 'feature' }.first.name
+ check_merge_status(json_response)
+ end
- get api(route, current_user), params: { per_page: 20, page_token: 'feature' }
+ it 'merge status matches reality on paginated input' do
+ expected_first_branch_name = project.repository.branches_sorted_by('name').drop_while { |b| b.name <= 'feature' }.first.name
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.count).to eq 20
- expect(json_response.first['name']).to eq(expected_first_branch_name)
+ get api(route, current_user), params: base_params.merge(per_page: 20, page_token: 'feature')
- check_merge_status(json_response)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.count).to eq 20
+ expect(json_response.first['name']).to eq(expected_first_branch_name)
+
+ check_merge_status(json_response)
+ end
end
- end
- context 'with legacy pagination params' do
- it 'ignores legacy pagination params' do
- expected_first_branch_name = project.repository.branches_sorted_by('name').first.name
- get api(route, current_user), params: { per_page: 20, page: 2 }
+ context 'with offset pagination params' do
+ it 'ignores legacy pagination params' do
+ expected_first_branch_name = project.repository.branches_sorted_by('name').first.name
+ get api(route, current_user), params: base_params.merge(per_page: 20, page: 2)
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.first['name']).to eq(expected_first_branch_name)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.first['name']).to eq(expected_first_branch_name)
- check_merge_status(json_response)
+ check_merge_status(json_response)
+ end
end
end
end
diff --git a/spec/requests/api/ci/pipelines_spec.rb b/spec/requests/api/ci/pipelines_spec.rb
index c9ca806e2c4..111bc933ea4 100644
--- a/spec/requests/api/ci/pipelines_spec.rb
+++ b/spec/requests/api/ci/pipelines_spec.rb
@@ -438,7 +438,7 @@ RSpec.describe API::Ci::Pipelines do
expect(response).to match_response_schema('public_api/v4/pipeline/detail')
end
- it 'returns project pipelines' do
+ it 'returns project pipeline' do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}", user)
expect(response).to have_gitlab_http_status(:ok)
@@ -475,6 +475,20 @@ RSpec.describe API::Ci::Pipelines do
expect(json_response['id']).to be nil
end
end
+
+ context 'when config source is not ci' do
+ let(:non_ci_config_source) { ::Ci::PipelineEnums.non_ci_config_source_values.first }
+ let(:pipeline_not_ci) do
+ create(:ci_pipeline, config_source: non_ci_config_source, project: project)
+ end
+
+ it 'returns the specified pipeline' do
+ get api("/projects/#{project.id}/pipelines/#{pipeline_not_ci.id}", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['sha']).to eq(pipeline_not_ci.sha)
+ end
+ end
end
describe 'GET /projects/:id/pipelines/latest' do
@@ -721,55 +735,36 @@ RSpec.describe API::Ci::Pipelines do
let(:pipeline) { create(:ci_pipeline, project: project) }
- context 'when feature is enabled' do
- before do
- stub_feature_flags(junit_pipeline_view: true)
- end
-
- context 'when pipeline does not have a test report' do
- it 'returns an empty test report' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['total_count']).to eq(0)
- end
- end
-
- context 'when pipeline has a test report' do
- let(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
-
- it 'returns the test report' do
- subject
+ context 'when pipeline does not have a test report' do
+ it 'returns an empty test report' do
+ subject
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['total_count']).to eq(4)
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['total_count']).to eq(0)
end
+ end
- context 'when pipeline has corrupt test reports' do
- before do
- job = create(:ci_build, pipeline: pipeline)
- create(:ci_job_artifact, :junit_with_corrupted_data, job: job, project: project)
- end
+ context 'when pipeline has a test report' do
+ let(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
- it 'returns a suite_error' do
- subject
+ it 'returns the test report' do
+ subject
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['test_suites'].first['suite_error']).to eq('JUnit XML parsing failed: 1:1: FATAL: Document is empty')
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['total_count']).to eq(4)
end
end
- context 'when feature is disabled' do
+ context 'when pipeline has corrupt test reports' do
before do
- stub_feature_flags(junit_pipeline_view: false)
+ create(:ci_build, :broken_test_reports, name: 'rspec', pipeline: pipeline)
end
- it 'renders empty response' do
+ it 'returns a suite_error' do
subject
- expect(response).to have_gitlab_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['test_suites'].first['suite_error']).to eq('JUnit XML parsing failed: 1:1: FATAL: Document is empty')
end
end
end
diff --git a/spec/requests/api/ci/runner/jobs_artifacts_spec.rb b/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
new file mode 100644
index 00000000000..e5c60bb539b
--- /dev/null
+++ b/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
@@ -0,0 +1,901 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
+ include StubGitlabCalls
+ include RedisHelpers
+ include WorkhorseHelpers
+
+ let(:registration_token) { 'abcdefg123456' }
+
+ before do
+ stub_feature_flags(ci_enable_live_trace: true)
+ stub_gitlab_calls
+ stub_application_setting(runners_registration_token: registration_token)
+ allow_any_instance_of(::Ci::Runner).to receive(:cache_attributes)
+ end
+
+ describe '/api/v4/jobs' do
+ let(:root_namespace) { create(:namespace) }
+ let(:namespace) { create(:namespace, parent: root_namespace) }
+ let(:project) { create(:project, namespace: namespace, shared_runners_enabled: false) }
+ let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master') }
+ let(:runner) { create(:ci_runner, :project, projects: [project]) }
+ let(:user) { create(:user) }
+ let(:job) do
+ create(:ci_build, :artifacts, :extended_options,
+ pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0)
+ end
+
+ describe 'artifacts' do
+ let(:job) { create(:ci_build, :pending, user: user, project: project, pipeline: pipeline, runner_id: runner.id) }
+ let(:jwt) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
+ let(:headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => jwt } }
+ let(:headers_with_token) { headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.token) }
+ let(:file_upload) { fixture_file_upload('spec/fixtures/banana_sample.gif', 'image/gif') }
+ let(:file_upload2) { fixture_file_upload('spec/fixtures/dk.png', 'image/gif') }
+
+ before do
+ stub_artifacts_object_storage
+ job.run!
+ end
+
+ shared_examples_for 'rejecting artifacts that are too large' do
+ let(:filesize) { 100.megabytes.to_i }
+ let(:sample_max_size) { (filesize / 1.megabyte) - 10 } # Set max size to be smaller than file size to trigger error
+
+ shared_examples_for 'failed request' do
+ it 'responds with payload too large error' do
+ send_request
+
+ expect(response).to have_gitlab_http_status(:payload_too_large)
+ end
+ end
+
+ context 'based on plan limit setting' do
+ let(:application_max_size) { sample_max_size + 100 }
+ let(:limit_name) { "#{Ci::JobArtifact::PLAN_LIMIT_PREFIX}archive" }
+
+ before do
+ create(:plan_limits, :default_plan, limit_name => sample_max_size)
+ stub_application_setting(max_artifacts_size: application_max_size)
+ end
+
+ it_behaves_like 'failed request'
+ end
+
+ context 'based on application setting' do
+ before do
+ stub_application_setting(max_artifacts_size: sample_max_size)
+ end
+
+ it_behaves_like 'failed request'
+ end
+
+ context 'based on root namespace setting' do
+ let(:application_max_size) { sample_max_size + 10 }
+
+ before do
+ stub_application_setting(max_artifacts_size: application_max_size)
+ root_namespace.update!(max_artifacts_size: sample_max_size)
+ end
+
+ it_behaves_like 'failed request'
+ end
+
+ context 'based on child namespace setting' do
+ let(:application_max_size) { sample_max_size + 10 }
+ let(:root_namespace_max_size) { sample_max_size + 10 }
+
+ before do
+ stub_application_setting(max_artifacts_size: application_max_size)
+ root_namespace.update!(max_artifacts_size: root_namespace_max_size)
+ namespace.update!(max_artifacts_size: sample_max_size)
+ end
+
+ it_behaves_like 'failed request'
+ end
+
+ context 'based on project setting' do
+ let(:application_max_size) { sample_max_size + 10 }
+ let(:root_namespace_max_size) { sample_max_size + 10 }
+ let(:child_namespace_max_size) { sample_max_size + 10 }
+
+ before do
+ stub_application_setting(max_artifacts_size: application_max_size)
+ root_namespace.update!(max_artifacts_size: root_namespace_max_size)
+ namespace.update!(max_artifacts_size: child_namespace_max_size)
+ project.update!(max_artifacts_size: sample_max_size)
+ end
+
+ it_behaves_like 'failed request'
+ end
+ end
+
+ describe 'POST /api/v4/jobs/:id/artifacts/authorize' do
+ context 'when using token as parameter' do
+ context 'and the artifact is too large' do
+ it_behaves_like 'rejecting artifacts that are too large' do
+ let(:success_code) { :ok }
+ let(:send_request) { authorize_artifacts_with_token_in_params(filesize: filesize) }
+ end
+ end
+
+ context 'posting artifacts to running job' do
+ subject do
+ authorize_artifacts_with_token_in_params
+ end
+
+ it_behaves_like 'API::CI::Runner application context metadata', '/api/:version/jobs/:id/artifacts/authorize' do
+ let(:send_request) { subject }
+ end
+
+ it 'updates runner info' do
+ expect { subject }.to change { runner.reload.contacted_at }
+ end
+
+ shared_examples 'authorizes local file' do
+ it 'succeeds' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path)
+ expect(json_response['RemoteObject']).to be_nil
+ end
+ end
+
+ context 'when using local storage' do
+ it_behaves_like 'authorizes local file'
+ end
+
+ context 'when using remote storage' do
+ context 'when direct upload is enabled' do
+ before do
+ stub_artifacts_object_storage(enabled: true, direct_upload: true)
+ end
+
+ it 'succeeds' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(json_response).not_to have_key('TempPath')
+ expect(json_response['RemoteObject']).to have_key('ID')
+ expect(json_response['RemoteObject']).to have_key('GetURL')
+ expect(json_response['RemoteObject']).to have_key('StoreURL')
+ expect(json_response['RemoteObject']).to have_key('DeleteURL')
+ expect(json_response['RemoteObject']).to have_key('MultipartUpload')
+ end
+ end
+
+ context 'when direct upload is disabled' do
+ before do
+ stub_artifacts_object_storage(enabled: true, direct_upload: false)
+ end
+
+ it_behaves_like 'authorizes local file'
+ end
+ end
+ end
+ end
+
+ context 'when using token as header' do
+ it 'authorizes posting artifacts to running job' do
+ authorize_artifacts_with_token_in_headers
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
+ expect(json_response['TempPath']).not_to be_nil
+ end
+
+ it 'fails to post too large artifact' do
+ stub_application_setting(max_artifacts_size: 0)
+
+ authorize_artifacts_with_token_in_headers(filesize: 100)
+
+ expect(response).to have_gitlab_http_status(:payload_too_large)
+ end
+ end
+
+ context 'when using runners token' do
+ it 'fails to authorize artifacts posting' do
+ authorize_artifacts(token: job.project.runners_token)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ it 'reject requests that did not go through gitlab-workhorse' do
+ headers.delete(Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER)
+
+ authorize_artifacts
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ context 'authorization token is invalid' do
+ it 'responds with forbidden' do
+ authorize_artifacts(token: 'invalid', filesize: 100 )
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'authorize uploading of an lsif artifact' do
+ before do
+ stub_feature_flags(code_navigation: job.project)
+ end
+
+ it 'adds ProcessLsif header' do
+ authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['ProcessLsif']).to be_truthy
+ end
+
+ it 'adds ProcessLsifReferences header' do
+ authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['ProcessLsifReferences']).to be_truthy
+ end
+
+ context 'code_navigation feature flag is disabled' do
+ it 'responds with a forbidden error' do
+ stub_feature_flags(code_navigation: false)
+ authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
+
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['ProcessLsif']).to be_falsy
+ expect(json_response['ProcessLsifReferences']).to be_falsy
+ end
+ end
+ end
+
+ context 'code_navigation_references feature flag is disabled' do
+ it 'sets ProcessLsifReferences header to false' do
+ stub_feature_flags(code_navigation_references: false)
+ authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
+
+ aggregate_failures do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['ProcessLsif']).to be_truthy
+ expect(json_response['ProcessLsifReferences']).to be_falsy
+ end
+ end
+ end
+ end
+
+ def authorize_artifacts(params = {}, request_headers = headers)
+ post api("/jobs/#{job.id}/artifacts/authorize"), params: params, headers: request_headers
+ end
+
+ def authorize_artifacts_with_token_in_params(params = {}, request_headers = headers)
+ params = params.merge(token: job.token)
+ authorize_artifacts(params, request_headers)
+ end
+
+ def authorize_artifacts_with_token_in_headers(params = {}, request_headers = headers_with_token)
+ authorize_artifacts(params, request_headers)
+ end
+ end
+
+ describe 'POST /api/v4/jobs/:id/artifacts' do
+ it_behaves_like 'API::CI::Runner application context metadata', '/api/:version/jobs/:id/artifacts' do
+ let(:send_request) do
+ upload_artifacts(file_upload, headers_with_token)
+ end
+ end
+
+ it 'updates runner info' do
+ expect { upload_artifacts(file_upload, headers_with_token) }.to change { runner.reload.contacted_at }
+ end
+
+ context 'when the artifact is too large' do
+ it_behaves_like 'rejecting artifacts that are too large' do
+ # This filesize validation also happens in non remote stored files,
+ # it's just that it's hard to stub the filesize in other cases to be
+ # more than a megabyte.
+ let!(:fog_connection) do
+ stub_artifacts_object_storage(direct_upload: true)
+ end
+
+ let(:file_upload) { fog_to_uploaded_file(object) }
+ let(:success_code) { :created }
+
+ let(:object) do
+ fog_connection.directories.new(key: 'artifacts').files.create( # rubocop:disable Rails/SaveBang
+ key: 'tmp/uploads/12312300',
+ body: 'content'
+ )
+ end
+
+ let(:send_request) do
+ upload_artifacts(file_upload, headers_with_token, 'file.remote_id' => '12312300')
+ end
+
+ before do
+ allow(object).to receive(:content_length).and_return(filesize)
+ end
+ end
+ end
+
+ context 'when artifacts are being stored inside of tmp path' do
+ before do
+ # by configuring this path we allow to pass temp file from any path
+ allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return('/')
+ end
+
+ context 'when job has been erased' do
+ let(:job) { create(:ci_build, erased_at: Time.now) }
+
+ before do
+ upload_artifacts(file_upload, headers_with_token)
+ end
+
+ it 'responds with forbidden' do
+ upload_artifacts(file_upload, headers_with_token)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when job is running' do
+ shared_examples 'successful artifacts upload' do
+ it 'updates successfully' do
+ expect(response).to have_gitlab_http_status(:created)
+ end
+ end
+
+ context 'when uses accelerated file post' do
+ context 'for file stored locally' do
+ before do
+ upload_artifacts(file_upload, headers_with_token)
+ end
+
+ it_behaves_like 'successful artifacts upload'
+ end
+
+ context 'for file stored remotely' do
+ let!(:fog_connection) do
+ stub_artifacts_object_storage(direct_upload: true)
+ end
+
+ let(:object) do
+ fog_connection.directories.new(key: 'artifacts').files.create( # rubocop:disable Rails/SaveBang
+ key: 'tmp/uploads/12312300',
+ body: 'content'
+ )
+ end
+
+ let(:file_upload) { fog_to_uploaded_file(object) }
+
+ before do
+ upload_artifacts(file_upload, headers_with_token, 'file.remote_id' => remote_id)
+ end
+
+ context 'when valid remote_id is used' do
+ let(:remote_id) { '12312300' }
+
+ it_behaves_like 'successful artifacts upload'
+ end
+
+ context 'when invalid remote_id is used' do
+ let(:remote_id) { 'invalid id' }
+
+ it 'responds with bad request' do
+ expect(response).to have_gitlab_http_status(:internal_server_error)
+ expect(json_response['message']).to eq("Missing file")
+ end
+ end
+ end
+ end
+
+ context 'when using runners token' do
+ it 'responds with forbidden' do
+ upload_artifacts(file_upload, headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token))
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
+ context 'when artifacts post request does not contain file' do
+ it 'fails to post artifacts without file' do
+ post api("/jobs/#{job.id}/artifacts"), params: {}, headers: headers_with_token
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'GitLab Workhorse is not configured' do
+ it 'fails to post artifacts without GitLab-Workhorse' do
+ post api("/jobs/#{job.id}/artifacts"), params: { token: job.token }, headers: {}
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'Is missing GitLab Workhorse token headers' do
+ let(:jwt) { JWT.encode({ 'iss' => 'invalid-header' }, Gitlab::Workhorse.secret, 'HS256') }
+
+ it 'fails to post artifacts without GitLab-Workhorse' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).once
+
+ upload_artifacts(file_upload, headers_with_token)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when setting an expire date' do
+ let(:default_artifacts_expire_in) {}
+ let(:post_data) do
+ { file: file_upload,
+ expire_in: expire_in }
+ end
+
+ before do
+ stub_application_setting(default_artifacts_expire_in: default_artifacts_expire_in)
+
+ upload_artifacts(file_upload, headers_with_token, post_data)
+ end
+
+ context 'when an expire_in is given' do
+ let(:expire_in) { '7 days' }
+
+ it 'updates when specified' do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.artifacts_expire_at).to be_within(5.minutes).of(7.days.from_now)
+ end
+ end
+
+ context 'when no expire_in is given' do
+ let(:expire_in) { nil }
+
+ it 'ignores if not specified' do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.artifacts_expire_at).to be_nil
+ end
+
+ context 'with application default' do
+ context 'when default is 5 days' do
+ let(:default_artifacts_expire_in) { '5 days' }
+
+ it 'sets to application default' do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.artifacts_expire_at).to be_within(5.minutes).of(5.days.from_now)
+ end
+ end
+
+ context 'when default is 0' do
+ let(:default_artifacts_expire_in) { '0' }
+
+ it 'does not set expire_in' do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.artifacts_expire_at).to be_nil
+ end
+ end
+
+ context 'when value is never' do
+ let(:expire_in) { 'never' }
+ let(:default_artifacts_expire_in) { '5 days' }
+
+ it 'does not set expire_in' do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.artifacts_expire_at).to be_nil
+ end
+ end
+ end
+ end
+ end
+
+ context 'posts artifacts file and metadata file' do
+ let!(:artifacts) { file_upload }
+ let!(:artifacts_sha256) { Digest::SHA256.file(artifacts.path).hexdigest }
+ let!(:metadata) { file_upload2 }
+ let!(:metadata_sha256) { Digest::SHA256.file(metadata.path).hexdigest }
+
+ let(:stored_artifacts_file) { job.reload.artifacts_file }
+ let(:stored_metadata_file) { job.reload.artifacts_metadata }
+ let(:stored_artifacts_size) { job.reload.artifacts_size }
+ let(:stored_artifacts_sha256) { job.reload.job_artifacts_archive.file_sha256 }
+ let(:stored_metadata_sha256) { job.reload.job_artifacts_metadata.file_sha256 }
+ let(:file_keys) { post_data.keys }
+ let(:send_rewritten_field) { true }
+
+ before do
+ workhorse_finalize_with_multiple_files(
+ api("/jobs/#{job.id}/artifacts"),
+ method: :post,
+ file_keys: file_keys,
+ params: post_data,
+ headers: headers_with_token,
+ send_rewritten_field: send_rewritten_field
+ )
+ end
+
+ context 'when posts data accelerated by workhorse is correct' do
+ let(:post_data) { { file: artifacts, metadata: metadata } }
+
+ it 'stores artifacts and artifacts metadata' do
+ expect(response).to have_gitlab_http_status(:created)
+ expect(stored_artifacts_file.filename).to eq(artifacts.original_filename)
+ expect(stored_metadata_file.filename).to eq(metadata.original_filename)
+ expect(stored_artifacts_size).to eq(artifacts.size)
+ expect(stored_artifacts_sha256).to eq(artifacts_sha256)
+ expect(stored_metadata_sha256).to eq(metadata_sha256)
+ end
+ end
+
+ context 'with a malicious file.path param' do
+ let(:post_data) { {} }
+ let(:tmp_file) { Tempfile.new('crafted.file.path') }
+ let(:url) { "/jobs/#{job.id}/artifacts?file.path=#{tmp_file.path}" }
+
+ it 'rejects the request' do
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(stored_artifacts_size).to be_nil
+ end
+ end
+
+ context 'when workhorse header is missing' do
+ let(:post_data) { { file: artifacts, metadata: metadata } }
+ let(:send_rewritten_field) { false }
+
+ it 'rejects the request' do
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(stored_artifacts_size).to be_nil
+ end
+ end
+
+ context 'when there is no artifacts file in post data' do
+ let(:post_data) do
+ { metadata: metadata }
+ end
+
+ it 'is expected to respond with bad request' do
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'does not store metadata' do
+ expect(stored_metadata_file).to be_nil
+ end
+ end
+ end
+
+ context 'when artifact_type is archive' do
+ context 'when artifact_format is zip' do
+ let(:params) { { artifact_type: :archive, artifact_format: :zip } }
+
+ it 'stores junit test report' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.job_artifacts_archive).not_to be_nil
+ end
+ end
+
+ context 'when artifact_format is gzip' do
+ let(:params) { { artifact_type: :archive, artifact_format: :gzip } }
+
+ it 'returns an error' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(job.reload.job_artifacts_archive).to be_nil
+ end
+ end
+ end
+
+ context 'when artifact_type is junit' do
+ context 'when artifact_format is gzip' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/junit/junit.xml.gz') }
+ let(:params) { { artifact_type: :junit, artifact_format: :gzip } }
+
+ it 'stores junit test report' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.job_artifacts_junit).not_to be_nil
+ end
+ end
+
+ context 'when artifact_format is raw' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/junit/junit.xml.gz') }
+ let(:params) { { artifact_type: :junit, artifact_format: :raw } }
+
+ it 'returns an error' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(job.reload.job_artifacts_junit).to be_nil
+ end
+ end
+ end
+
+ context 'when artifact_type is metrics_referee' do
+ context 'when artifact_format is gzip' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/referees/metrics_referee.json.gz') }
+ let(:params) { { artifact_type: :metrics_referee, artifact_format: :gzip } }
+
+ it 'stores metrics_referee data' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.job_artifacts_metrics_referee).not_to be_nil
+ end
+ end
+
+ context 'when artifact_format is raw' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/referees/metrics_referee.json.gz') }
+ let(:params) { { artifact_type: :metrics_referee, artifact_format: :raw } }
+
+ it 'returns an error' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(job.reload.job_artifacts_metrics_referee).to be_nil
+ end
+ end
+ end
+
+ context 'when artifact_type is network_referee' do
+ context 'when artifact_format is gzip' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/referees/network_referee.json.gz') }
+ let(:params) { { artifact_type: :network_referee, artifact_format: :gzip } }
+
+ it 'stores network_referee data' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.job_artifacts_network_referee).not_to be_nil
+ end
+ end
+
+ context 'when artifact_format is raw' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/referees/network_referee.json.gz') }
+ let(:params) { { artifact_type: :network_referee, artifact_format: :raw } }
+
+ it 'returns an error' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(job.reload.job_artifacts_network_referee).to be_nil
+ end
+ end
+ end
+
+ context 'when artifact_type is dotenv' do
+ context 'when artifact_format is gzip' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/build.env.gz') }
+ let(:params) { { artifact_type: :dotenv, artifact_format: :gzip } }
+
+ it 'stores dotenv file' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.job_artifacts_dotenv).not_to be_nil
+ end
+
+ it 'parses dotenv file' do
+ expect do
+ upload_artifacts(file_upload, headers_with_token, params)
+ end.to change { job.job_variables.count }.from(0).to(2)
+ end
+
+ context 'when parse error happens' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/ci_build_artifacts_metadata.gz') }
+
+ it 'returns an error' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq('Invalid Format')
+ end
+ end
+ end
+
+ context 'when artifact_format is raw' do
+ let(:file_upload) { fixture_file_upload('spec/fixtures/build.env.gz') }
+ let(:params) { { artifact_type: :dotenv, artifact_format: :raw } }
+
+ it 'returns an error' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(job.reload.job_artifacts_dotenv).to be_nil
+ end
+ end
+ end
+ end
+
+ context 'when artifacts already exist for the job' do
+ let(:params) do
+ {
+ artifact_type: :archive,
+ artifact_format: :zip,
+ 'file.sha256' => uploaded_sha256
+ }
+ end
+
+ let(:existing_sha256) { '0' * 64 }
+
+ let!(:existing_artifact) do
+ create(:ci_job_artifact, :archive, file_sha256: existing_sha256, job: job)
+ end
+
+ context 'when sha256 is the same of the existing artifact' do
+ let(:uploaded_sha256) { existing_sha256 }
+
+ it 'ignores the new artifact' do
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.job_artifacts_archive).to eq(existing_artifact)
+ end
+ end
+
+ context 'when sha256 is different than the existing artifact' do
+ let(:uploaded_sha256) { '1' * 64 }
+
+ it 'logs and returns an error' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(job.reload.job_artifacts_archive).to eq(existing_artifact)
+ end
+ end
+ end
+
+ context 'when object storage throws errors' do
+ let(:params) { { artifact_type: :archive, artifact_format: :zip } }
+
+ it 'does not store artifacts' do
+ allow_next_instance_of(JobArtifactUploader) do |uploader|
+ allow(uploader).to receive(:store!).and_raise(Errno::EIO)
+ end
+
+ upload_artifacts(file_upload, headers_with_token, params)
+
+ expect(response).to have_gitlab_http_status(:service_unavailable)
+ expect(job.reload.job_artifacts_archive).to be_nil
+ end
+ end
+
+ context 'when artifacts are being stored outside of tmp path' do
+ let(:new_tmpdir) { Dir.mktmpdir }
+
+ before do
+ # init before overwriting tmp dir
+ file_upload
+
+ # by configuring this path we allow to pass file from @tmpdir only
+ # but all temporary files are stored in system tmp directory
+ allow(Dir).to receive(:tmpdir).and_return(new_tmpdir)
+ end
+
+ after do
+ FileUtils.remove_entry(new_tmpdir)
+ end
+
+ it 'fails to post artifacts for outside of tmp path' do
+ upload_artifacts(file_upload, headers_with_token)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ def upload_artifacts(file, headers = {}, params = {})
+ workhorse_finalize(
+ api("/jobs/#{job.id}/artifacts"),
+ method: :post,
+ file_key: :file,
+ params: params.merge(file: file),
+ headers: headers,
+ send_rewritten_field: true
+ )
+ end
+ end
+
+ describe 'GET /api/v4/jobs/:id/artifacts' do
+ let(:token) { job.token }
+
+ it_behaves_like 'API::CI::Runner application context metadata', '/api/:version/jobs/:id/artifacts' do
+ let(:send_request) { download_artifact }
+ end
+
+ it 'updates runner info' do
+ expect { download_artifact }.to change { runner.reload.contacted_at }
+ end
+
+ context 'when job has artifacts' do
+ let(:job) { create(:ci_build) }
+ let(:store) { JobArtifactUploader::Store::LOCAL }
+
+ before do
+ create(:ci_job_artifact, :archive, file_store: store, job: job)
+ end
+
+ context 'when using job token' do
+ context 'when artifacts are stored locally' do
+ let(:download_headers) do
+ { 'Content-Transfer-Encoding' => 'binary',
+ 'Content-Disposition' => %q(attachment; filename="ci_build_artifacts.zip"; filename*=UTF-8''ci_build_artifacts.zip) }
+ end
+
+ before do
+ download_artifact
+ end
+
+ it 'download artifacts' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers.to_h).to include download_headers
+ end
+ end
+
+ context 'when artifacts are stored remotely' do
+ let(:store) { JobArtifactUploader::Store::REMOTE }
+ let!(:job) { create(:ci_build) }
+
+ context 'when proxy download is being used' do
+ before do
+ download_artifact(direct_download: false)
+ end
+
+ it 'uses workhorse send-url' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers.to_h).to include(
+ 'Gitlab-Workhorse-Send-Data' => /send-url:/)
+ end
+ end
+
+ context 'when direct download is being used' do
+ before do
+ download_artifact(direct_download: true)
+ end
+
+ it 'receive redirect for downloading artifacts' do
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response.headers).to include('Location')
+ end
+ end
+ end
+ end
+
+ context 'when using runnners token' do
+ let(:token) { job.project.runners_token }
+
+ before do
+ download_artifact
+ end
+
+ it 'responds with forbidden' do
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
+ context 'when job does not have artifacts' do
+ it 'responds with not found' do
+ download_artifact
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ def download_artifact(params = {}, request_headers = headers)
+ params = params.merge(token: token)
+ job.reload
+
+ get api("/jobs/#{job.id}/artifacts"), params: params, headers: request_headers
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/ci/runner/jobs_put_spec.rb b/spec/requests/api/ci/runner/jobs_put_spec.rb
new file mode 100644
index 00000000000..025747f2f0c
--- /dev/null
+++ b/spec/requests/api/ci/runner/jobs_put_spec.rb
@@ -0,0 +1,196 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
+ include StubGitlabCalls
+ include RedisHelpers
+ include WorkhorseHelpers
+
+ let(:registration_token) { 'abcdefg123456' }
+
+ before do
+ stub_feature_flags(ci_enable_live_trace: true)
+ stub_gitlab_calls
+ stub_application_setting(runners_registration_token: registration_token)
+ allow_any_instance_of(::Ci::Runner).to receive(:cache_attributes)
+ end
+
+ describe '/api/v4/jobs' do
+ let(:root_namespace) { create(:namespace) }
+ let(:namespace) { create(:namespace, parent: root_namespace) }
+ let(:project) { create(:project, namespace: namespace, shared_runners_enabled: false) }
+ let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master') }
+ let(:runner) { create(:ci_runner, :project, projects: [project]) }
+ let(:user) { create(:user) }
+ let(:job) do
+ create(:ci_build, :artifacts, :extended_options,
+ pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0)
+ end
+
+ describe 'PUT /api/v4/jobs/:id' do
+ let(:job) do
+ create(:ci_build, :pending, :trace_live, pipeline: pipeline, project: project, user: user, runner_id: runner.id)
+ end
+
+ before do
+ job.run!
+ end
+
+ it_behaves_like 'API::CI::Runner application context metadata', '/api/:version/jobs/:id' do
+ let(:send_request) { update_job(state: 'success') }
+ end
+
+ it 'updates runner info' do
+ expect { update_job(state: 'success') }.to change { runner.reload.contacted_at }
+ end
+
+ context 'when status is given' do
+ it 'mark job as succeeded' do
+ update_job(state: 'success')
+
+ job.reload
+ expect(job).to be_success
+ end
+
+ it 'mark job as failed' do
+ update_job(state: 'failed')
+
+ job.reload
+ expect(job).to be_failed
+ expect(job).to be_unknown_failure
+ end
+
+ context 'when failure_reason is script_failure' do
+ before do
+ update_job(state: 'failed', failure_reason: 'script_failure')
+ job.reload
+ end
+
+ it { expect(job).to be_script_failure }
+ end
+
+ context 'when failure_reason is runner_system_failure' do
+ before do
+ update_job(state: 'failed', failure_reason: 'runner_system_failure')
+ job.reload
+ end
+
+ it { expect(job).to be_runner_system_failure }
+ end
+
+ context 'when failure_reason is unrecognized value' do
+ before do
+ update_job(state: 'failed', failure_reason: 'what_is_this')
+ job.reload
+ end
+
+ it { expect(job).to be_unknown_failure }
+ end
+
+ context 'when failure_reason is job_execution_timeout' do
+ before do
+ update_job(state: 'failed', failure_reason: 'job_execution_timeout')
+ job.reload
+ end
+
+ it { expect(job).to be_job_execution_timeout }
+ end
+
+ context 'when failure_reason is unmet_prerequisites' do
+ before do
+ update_job(state: 'failed', failure_reason: 'unmet_prerequisites')
+ job.reload
+ end
+
+ it { expect(job).to be_unmet_prerequisites }
+ end
+ end
+
+ context 'when trace is given' do
+ it 'creates a trace artifact' do
+ allow(BuildFinishedWorker).to receive(:perform_async).with(job.id) do
+ ArchiveTraceWorker.new.perform(job.id)
+ end
+
+ update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
+
+ job.reload
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(job.trace.raw).to eq 'BUILD TRACE UPDATED'
+ expect(job.job_artifacts_trace.open.read).to eq 'BUILD TRACE UPDATED'
+ end
+
+ context 'when concurrent update of trace is happening' do
+ before do
+ job.trace.write('wb') do
+ update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
+ end
+ end
+
+ it 'returns that operation conflicts' do
+ expect(response).to have_gitlab_http_status(:conflict)
+ end
+ end
+ end
+
+ context 'when no trace is given' do
+ it 'does not override trace information' do
+ update_job
+
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE'
+ end
+
+ context 'when running state is sent' do
+ it 'updates update_at value' do
+ expect { update_job_after_time }.to change { job.reload.updated_at }
+ end
+ end
+
+ context 'when other state is sent' do
+ it "doesn't update update_at value" do
+ expect { update_job_after_time(20.minutes, state: 'success') }.not_to change { job.reload.updated_at }
+ end
+ end
+ end
+
+ context 'when job has been erased' do
+ let(:job) { create(:ci_build, runner_id: runner.id, erased_at: Time.now) }
+
+ it 'responds with forbidden' do
+ update_job
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when job has already been finished' do
+ before do
+ job.trace.set('Job failed')
+ job.drop!(:script_failure)
+ end
+
+ it 'does not update job status and job trace' do
+ update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
+
+ job.reload
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(response.header['Job-Status']).to eq 'failed'
+ expect(job.trace.raw).to eq 'Job failed'
+ expect(job).to be_failed
+ end
+ end
+
+ def update_job(token = job.token, **params)
+ new_params = params.merge(token: token)
+ put api("/jobs/#{job.id}"), params: new_params
+ end
+
+ def update_job_after_time(update_interval = 20.minutes, state = 'running')
+ Timecop.travel(job.updated_at + update_interval) do
+ update_job(job.token, state: state)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
new file mode 100644
index 00000000000..4fa95f8ebb2
--- /dev/null
+++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
@@ -0,0 +1,861 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
+ include StubGitlabCalls
+ include RedisHelpers
+ include WorkhorseHelpers
+
+ let(:registration_token) { 'abcdefg123456' }
+
+ before do
+ stub_feature_flags(ci_enable_live_trace: true)
+ stub_gitlab_calls
+ stub_application_setting(runners_registration_token: registration_token)
+ allow_any_instance_of(::Ci::Runner).to receive(:cache_attributes)
+ end
+
+ describe '/api/v4/jobs' do
+ let(:root_namespace) { create(:namespace) }
+ let(:namespace) { create(:namespace, parent: root_namespace) }
+ let(:project) { create(:project, namespace: namespace, shared_runners_enabled: false) }
+ let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master') }
+ let(:runner) { create(:ci_runner, :project, projects: [project]) }
+ let(:user) { create(:user) }
+ let(:job) do
+ create(:ci_build, :artifacts, :extended_options,
+ pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0)
+ end
+
+ describe 'POST /api/v4/jobs/request' do
+ let!(:last_update) {}
+ let!(:new_update) { }
+ let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' }
+
+ before do
+ job
+ stub_container_registry_config(enabled: false)
+ end
+
+ shared_examples 'no jobs available' do
+ before do
+ request_job
+ end
+
+ context 'when runner sends version in User-Agent' do
+ context 'for stable version' do
+ it 'gives 204 and set X-GitLab-Last-Update' do
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.header).to have_key('X-GitLab-Last-Update')
+ end
+ end
+
+ context 'when last_update is up-to-date' do
+ let(:last_update) { runner.ensure_runner_queue_value }
+
+ it 'gives 204 and set the same X-GitLab-Last-Update' do
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.header['X-GitLab-Last-Update']).to eq(last_update)
+ end
+ end
+
+ context 'when last_update is outdated' do
+ let(:last_update) { runner.ensure_runner_queue_value }
+ let(:new_update) { runner.tick_runner_queue }
+
+ it 'gives 204 and set a new X-GitLab-Last-Update' do
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.header['X-GitLab-Last-Update']).to eq(new_update)
+ end
+ end
+
+ context 'when beta version is sent' do
+ let(:user_agent) { 'gitlab-runner 9.0.0~beta.167.g2b2bacc (master; go1.7.4; linux/amd64)' }
+
+ it { expect(response).to have_gitlab_http_status(:no_content) }
+ end
+
+ context 'when pre-9-0 version is sent' do
+ let(:user_agent) { 'gitlab-ci-multi-runner 1.6.0 (1-6-stable; go1.6.3; linux/amd64)' }
+
+ it { expect(response).to have_gitlab_http_status(:no_content) }
+ end
+
+ context 'when pre-9-0 beta version is sent' do
+ let(:user_agent) { 'gitlab-ci-multi-runner 1.6.0~beta.167.g2b2bacc (master; go1.6.3; linux/amd64)' }
+
+ it { expect(response).to have_gitlab_http_status(:no_content) }
+ end
+ end
+ end
+
+ context 'when no token is provided' do
+ it 'returns 400 error' do
+ post api('/jobs/request')
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'when invalid token is provided' do
+ it 'returns 403 error' do
+ post api('/jobs/request'), params: { token: 'invalid' }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when valid token is provided' do
+ context 'when Runner is not active' do
+ let(:runner) { create(:ci_runner, :inactive) }
+ let(:update_value) { runner.ensure_runner_queue_value }
+
+ it 'returns 204 error' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(response.header['X-GitLab-Last-Update']).to eq(update_value)
+ end
+ end
+
+ context 'when jobs are finished' do
+ before do
+ job.success
+ end
+
+ it_behaves_like 'no jobs available'
+ end
+
+ context 'when other projects have pending jobs' do
+ before do
+ job.success
+ create(:ci_build, :pending)
+ end
+
+ it_behaves_like 'no jobs available'
+ end
+
+ context 'when shared runner requests job for project without shared_runners_enabled' do
+ let(:runner) { create(:ci_runner, :instance) }
+
+ it_behaves_like 'no jobs available'
+ end
+
+ context 'when there is a pending job' do
+ let(:expected_job_info) do
+ { 'name' => job.name,
+ 'stage' => job.stage,
+ 'project_id' => job.project.id,
+ 'project_name' => job.project.name }
+ end
+
+ let(:expected_git_info) do
+ { 'repo_url' => job.repo_url,
+ 'ref' => job.ref,
+ 'sha' => job.sha,
+ 'before_sha' => job.before_sha,
+ 'ref_type' => 'branch',
+ 'refspecs' => ["+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
+ "+refs/heads/#{job.ref}:refs/remotes/origin/#{job.ref}"],
+ 'depth' => project.ci_default_git_depth }
+ end
+
+ let(:expected_steps) do
+ [{ 'name' => 'script',
+ 'script' => %w(echo),
+ 'timeout' => job.metadata_timeout,
+ 'when' => 'on_success',
+ 'allow_failure' => false },
+ { 'name' => 'after_script',
+ 'script' => %w(ls date),
+ 'timeout' => job.metadata_timeout,
+ 'when' => 'always',
+ 'allow_failure' => true }]
+ end
+
+ let(:expected_variables) do
+ [{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true, 'masked' => false },
+ { 'key' => 'CI_JOB_STAGE', 'value' => 'test', 'public' => true, 'masked' => false },
+ { 'key' => 'DB_NAME', 'value' => 'postgres', 'public' => true, 'masked' => false }]
+ end
+
+ let(:expected_artifacts) do
+ [{ 'name' => 'artifacts_file',
+ 'untracked' => false,
+ 'paths' => %w(out/),
+ 'when' => 'always',
+ 'expire_in' => '7d',
+ "artifact_type" => "archive",
+ "artifact_format" => "zip" }]
+ end
+
+ let(:expected_cache) do
+ [{ 'key' => 'cache_key',
+ 'untracked' => false,
+ 'paths' => ['vendor/*'],
+ 'policy' => 'pull-push' }]
+ end
+
+ let(:expected_features) { { 'trace_sections' => true } }
+
+ it 'picks a job' do
+ request_job info: { platform: :darwin }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response.headers['Content-Type']).to eq('application/json')
+ expect(response.headers).not_to have_key('X-GitLab-Last-Update')
+ expect(runner.reload.platform).to eq('darwin')
+ expect(json_response['id']).to eq(job.id)
+ expect(json_response['token']).to eq(job.token)
+ expect(json_response['job_info']).to eq(expected_job_info)
+ expect(json_response['git_info']).to eq(expected_git_info)
+ expect(json_response['image']).to eq({ 'name' => 'ruby:2.7', 'entrypoint' => '/bin/sh', 'ports' => [] })
+ expect(json_response['services']).to eq([{ 'name' => 'postgres', 'entrypoint' => nil,
+ 'alias' => nil, 'command' => nil, 'ports' => [] },
+ { 'name' => 'docker:stable-dind', 'entrypoint' => '/bin/sh',
+ 'alias' => 'docker', 'command' => 'sleep 30', 'ports' => [] }])
+ expect(json_response['steps']).to eq(expected_steps)
+ expect(json_response['artifacts']).to eq(expected_artifacts)
+ expect(json_response['cache']).to eq(expected_cache)
+ expect(json_response['variables']).to include(*expected_variables)
+ expect(json_response['features']).to eq(expected_features)
+ end
+
+ it 'creates persistent ref' do
+ expect_any_instance_of(::Ci::PersistentRef).to receive(:create_ref)
+ .with(job.sha, "refs/#{Repository::REF_PIPELINES}/#{job.commit_id}")
+
+ request_job info: { platform: :darwin }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['id']).to eq(job.id)
+ end
+
+ context 'when job is made for tag' do
+ let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
+
+ it 'sets branch as ref_type' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['git_info']['ref_type']).to eq('tag')
+ end
+
+ context 'when GIT_DEPTH is specified' do
+ before do
+ create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 1, pipeline: pipeline)
+ end
+
+ it 'specifies refspecs' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['git_info']['refspecs']).to include("+refs/tags/#{job.ref}:refs/tags/#{job.ref}")
+ end
+ end
+
+ context 'when a Gitaly exception is thrown during response' do
+ before do
+ allow_next_instance_of(Ci::BuildRunnerPresenter) do |instance|
+ allow(instance).to receive(:artifacts).and_raise(GRPC::DeadlineExceeded)
+ end
+ end
+
+ it 'fails the job as a scheduler failure' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(job.reload.failed?).to be_truthy
+ expect(job.failure_reason).to eq('scheduler_failure')
+ expect(job.runner_id).to eq(runner.id)
+ expect(job.runner_session).to be_nil
+ end
+ end
+
+ context 'when GIT_DEPTH is not specified and there is no default git depth for the project' do
+ before do
+ project.update!(ci_default_git_depth: nil)
+ end
+
+ it 'specifies refspecs' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['git_info']['refspecs'])
+ .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
+ '+refs/tags/*:refs/tags/*',
+ '+refs/heads/*:refs/remotes/origin/*')
+ end
+ end
+ end
+
+ context 'when job filtered by job_age' do
+ let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0, queued_at: 60.seconds.ago) }
+
+ context 'job is queued less than job_age parameter' do
+ let(:job_age) { 120 }
+
+ it 'gives 204' do
+ request_job(job_age: job_age)
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ context 'job is queued more than job_age parameter' do
+ let(:job_age) { 30 }
+
+ it 'picks a job' do
+ request_job(job_age: job_age)
+
+ expect(response).to have_gitlab_http_status(:created)
+ end
+ end
+ end
+
+ context 'when job is made for branch' do
+ it 'sets tag as ref_type' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['git_info']['ref_type']).to eq('branch')
+ end
+
+ context 'when GIT_DEPTH is specified' do
+ before do
+ create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 1, pipeline: pipeline)
+ end
+
+ it 'specifies refspecs' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['git_info']['refspecs']).to include("+refs/heads/#{job.ref}:refs/remotes/origin/#{job.ref}")
+ end
+ end
+
+ context 'when GIT_DEPTH is not specified and there is no default git depth for the project' do
+ before do
+ project.update!(ci_default_git_depth: nil)
+ end
+
+ it 'specifies refspecs' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['git_info']['refspecs'])
+ .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
+ '+refs/tags/*:refs/tags/*',
+ '+refs/heads/*:refs/remotes/origin/*')
+ end
+ end
+ end
+
+ context 'when job is for a release' do
+ let!(:job) { create(:ci_build, :release_options, pipeline: pipeline) }
+
+ context 'when `multi_build_steps` is passed by the runner' do
+ it 'exposes release info' do
+ request_job info: { features: { multi_build_steps: true } }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(response.headers).not_to have_key('X-GitLab-Last-Update')
+ expect(json_response['steps']).to eq([
+ {
+ "name" => "script",
+ "script" => ["make changelog | tee release_changelog.txt"],
+ "timeout" => 3600,
+ "when" => "on_success",
+ "allow_failure" => false
+ },
+ {
+ "name" => "release",
+ "script" =>
+ ["release-cli create --name \"Release $CI_COMMIT_SHA\" --description \"Created using the release-cli $EXTRA_DESCRIPTION\" --tag-name \"release-$CI_COMMIT_SHA\" --ref \"$CI_COMMIT_SHA\""],
+ "timeout" => 3600,
+ "when" => "on_success",
+ "allow_failure" => false
+ }
+ ])
+ end
+ end
+
+ context 'when `multi_build_steps` is not passed by the runner' do
+ it 'drops the job' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+ end
+
+ context 'when job is made for merge request' do
+ let(:pipeline) { create(:ci_pipeline, source: :merge_request_event, project: project, ref: 'feature', merge_request: merge_request) }
+ let!(:job) { create(:ci_build, pipeline: pipeline, name: 'spinach', ref: 'feature', stage: 'test', stage_idx: 0) }
+ let(:merge_request) { create(:merge_request) }
+
+ it 'sets branch as ref_type' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['git_info']['ref_type']).to eq('branch')
+ end
+
+ context 'when GIT_DEPTH is specified' do
+ before do
+ create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 1, pipeline: pipeline)
+ end
+
+ it 'returns the overwritten git depth for merge request refspecs' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['git_info']['depth']).to eq(1)
+ end
+ end
+ end
+
+ it 'updates runner info' do
+ expect { request_job }.to change { runner.reload.contacted_at }
+ end
+
+ %w(version revision platform architecture).each do |param|
+ context "when info parameter '#{param}' is present" do
+ let(:value) { "#{param}_value" }
+
+ it "updates provided Runner's parameter" do
+ request_job info: { param => value }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(runner.reload.read_attribute(param.to_sym)).to eq(value)
+ end
+ end
+ end
+
+ it "sets the runner's ip_address" do
+ post api('/jobs/request'),
+ params: { token: runner.token },
+ headers: { 'User-Agent' => user_agent, 'X-Forwarded-For' => '123.222.123.222' }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(runner.reload.ip_address).to eq('123.222.123.222')
+ end
+
+ it "handles multiple X-Forwarded-For addresses" do
+ post api('/jobs/request'),
+ params: { token: runner.token },
+ headers: { 'User-Agent' => user_agent, 'X-Forwarded-For' => '123.222.123.222, 127.0.0.1' }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(runner.reload.ip_address).to eq('123.222.123.222')
+ end
+
+ context 'when concurrently updating a job' do
+ before do
+ expect_any_instance_of(::Ci::Build).to receive(:run!)
+ .and_raise(ActiveRecord::StaleObjectError.new(nil, nil))
+ end
+
+ it 'returns a conflict' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:conflict)
+ expect(response.headers).not_to have_key('X-GitLab-Last-Update')
+ end
+ end
+
+ context 'when project and pipeline have multiple jobs' do
+ let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
+ let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
+ let!(:test_job) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) }
+
+ before do
+ job.success
+ job2.success
+ end
+
+ it 'returns dependent jobs' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['id']).to eq(test_job.id)
+ expect(json_response['dependencies'].count).to eq(2)
+ expect(json_response['dependencies']).to include(
+ { 'id' => job.id, 'name' => job.name, 'token' => job.token },
+ { 'id' => job2.id, 'name' => job2.name, 'token' => job2.token })
+ end
+ end
+
+ context 'when pipeline have jobs with artifacts' do
+ let!(:job) { create(:ci_build, :tag, :artifacts, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
+ let!(:test_job) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) }
+
+ before do
+ job.success
+ end
+
+ it 'returns dependent jobs' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['id']).to eq(test_job.id)
+ expect(json_response['dependencies'].count).to eq(1)
+ expect(json_response['dependencies']).to include(
+ { 'id' => job.id, 'name' => job.name, 'token' => job.token,
+ 'artifacts_file' => { 'filename' => 'ci_build_artifacts.zip', 'size' => 107464 } })
+ end
+ end
+
+ context 'when explicit dependencies are defined' do
+ let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
+ let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
+ let!(:test_job) do
+ create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'deploy',
+ stage: 'deploy', stage_idx: 1,
+ options: { script: ['bash'], dependencies: [job2.name] })
+ end
+
+ before do
+ job.success
+ job2.success
+ end
+
+ it 'returns dependent jobs' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['id']).to eq(test_job.id)
+ expect(json_response['dependencies'].count).to eq(1)
+ expect(json_response['dependencies'][0]).to include('id' => job2.id, 'name' => job2.name, 'token' => job2.token)
+ end
+ end
+
+ context 'when dependencies is an empty array' do
+ let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
+ let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
+ let!(:empty_dependencies_job) do
+ create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'empty_dependencies_job',
+ stage: 'deploy', stage_idx: 1,
+ options: { script: ['bash'], dependencies: [] })
+ end
+
+ before do
+ job.success
+ job2.success
+ end
+
+ it 'returns an empty array' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['id']).to eq(empty_dependencies_job.id)
+ expect(json_response['dependencies'].count).to eq(0)
+ end
+ end
+
+ context 'when job has no tags' do
+ before do
+ job.update!(tags: [])
+ end
+
+ context 'when runner is allowed to pick untagged jobs' do
+ before do
+ runner.update_column(:run_untagged, true)
+ end
+
+ it 'picks job' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ end
+ end
+
+ context 'when runner is not allowed to pick untagged jobs' do
+ before do
+ runner.update_column(:run_untagged, false)
+ end
+
+ it_behaves_like 'no jobs available'
+ end
+ end
+
+ context 'when triggered job is available' do
+ let(:expected_variables) do
+ [{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true, 'masked' => false },
+ { 'key' => 'CI_JOB_STAGE', 'value' => 'test', 'public' => true, 'masked' => false },
+ { 'key' => 'CI_PIPELINE_TRIGGERED', 'value' => 'true', 'public' => true, 'masked' => false },
+ { 'key' => 'DB_NAME', 'value' => 'postgres', 'public' => true, 'masked' => false },
+ { 'key' => 'SECRET_KEY', 'value' => 'secret_value', 'public' => false, 'masked' => false },
+ { 'key' => 'TRIGGER_KEY_1', 'value' => 'TRIGGER_VALUE_1', 'public' => false, 'masked' => false }]
+ end
+
+ let(:trigger) { create(:ci_trigger, project: project) }
+ let!(:trigger_request) { create(:ci_trigger_request, pipeline: pipeline, builds: [job], trigger: trigger) }
+
+ before do
+ project.variables << ::Ci::Variable.new(key: 'SECRET_KEY', value: 'secret_value')
+ end
+
+ shared_examples 'expected variables behavior' do
+ it 'returns variables for triggers' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['variables']).to include(*expected_variables)
+ end
+ end
+
+ context 'when variables are stored in trigger_request' do
+ before do
+ trigger_request.update_attribute(:variables, { TRIGGER_KEY_1: 'TRIGGER_VALUE_1' } )
+ end
+
+ it_behaves_like 'expected variables behavior'
+ end
+
+ context 'when variables are stored in pipeline_variables' do
+ before do
+ create(:ci_pipeline_variable, pipeline: pipeline, key: :TRIGGER_KEY_1, value: 'TRIGGER_VALUE_1')
+ end
+
+ it_behaves_like 'expected variables behavior'
+ end
+ end
+
+ describe 'registry credentials support' do
+ let(:registry_url) { 'registry.example.com:5005' }
+ let(:registry_credentials) do
+ { 'type' => 'registry',
+ 'url' => registry_url,
+ 'username' => 'gitlab-ci-token',
+ 'password' => job.token }
+ end
+
+ context 'when registry is enabled' do
+ before do
+ stub_container_registry_config(enabled: true, host_port: registry_url)
+ end
+
+ it 'sends registry credentials key' do
+ request_job
+
+ expect(json_response).to have_key('credentials')
+ expect(json_response['credentials']).to include(registry_credentials)
+ end
+ end
+
+ context 'when registry is disabled' do
+ before do
+ stub_container_registry_config(enabled: false, host_port: registry_url)
+ end
+
+ it 'does not send registry credentials' do
+ request_job
+
+ expect(json_response).to have_key('credentials')
+ expect(json_response['credentials']).not_to include(registry_credentials)
+ end
+ end
+ end
+
+ describe 'timeout support' do
+ context 'when project specifies job timeout' do
+ let(:project) { create(:project, shared_runners_enabled: false, build_timeout: 1234) }
+
+ it 'contains info about timeout taken from project' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['runner_info']).to include({ 'timeout' => 1234 })
+ end
+
+ context 'when runner specifies lower timeout' do
+ let(:runner) { create(:ci_runner, :project, maximum_timeout: 1000, projects: [project]) }
+
+ it 'contains info about timeout overridden by runner' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['runner_info']).to include({ 'timeout' => 1000 })
+ end
+ end
+
+ context 'when runner specifies bigger timeout' do
+ let(:runner) { create(:ci_runner, :project, maximum_timeout: 2000, projects: [project]) }
+
+ it 'contains info about timeout not overridden by runner' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['runner_info']).to include({ 'timeout' => 1234 })
+ end
+ end
+ end
+ end
+ end
+
+ describe 'port support' do
+ let(:job) { create(:ci_build, pipeline: pipeline, options: options) }
+
+ context 'when job image has ports' do
+ let(:options) do
+ {
+ image: {
+ name: 'ruby',
+ ports: [80]
+ },
+ services: ['mysql']
+ }
+ end
+
+ it 'returns the image ports' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to include(
+ 'id' => job.id,
+ 'image' => a_hash_including('name' => 'ruby', 'ports' => [{ 'number' => 80, 'protocol' => 'http', 'name' => 'default_port' }]),
+ 'services' => all(a_hash_including('name' => 'mysql')))
+ end
+ end
+
+ context 'when job services settings has ports' do
+ let(:options) do
+ {
+ image: 'ruby',
+ services: [
+ {
+ name: 'tomcat',
+ ports: [{ number: 8081, protocol: 'http', name: 'custom_port' }]
+ }
+ ]
+ }
+ end
+
+ it 'returns the service ports' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to include(
+ 'id' => job.id,
+ 'image' => a_hash_including('name' => 'ruby'),
+ 'services' => all(a_hash_including('name' => 'tomcat', 'ports' => [{ 'number' => 8081, 'protocol' => 'http', 'name' => 'custom_port' }])))
+ end
+ end
+ end
+
+ describe 'a job with excluded artifacts' do
+ context 'when excluded paths are defined' do
+ let(:job) do
+ create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'test',
+ stage: 'deploy', stage_idx: 1,
+ options: { artifacts: { paths: ['abc'], exclude: ['cde'] } })
+ end
+
+ context 'when a runner supports this feature' do
+ it 'exposes excluded paths when the feature is enabled' do
+ stub_feature_flags(ci_artifacts_exclude: true)
+
+ request_job info: { features: { artifacts_exclude: true } }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response.dig('artifacts').first).to include('exclude' => ['cde'])
+ end
+
+ it 'does not expose excluded paths when the feature is disabled' do
+ stub_feature_flags(ci_artifacts_exclude: false)
+
+ request_job info: { features: { artifacts_exclude: true } }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response.dig('artifacts').first).not_to have_key('exclude')
+ end
+ end
+
+ context 'when a runner does not support this feature' do
+ it 'does not expose the build at all' do
+ stub_feature_flags(ci_artifacts_exclude: true)
+
+ request_job
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+ end
+
+ it 'does not expose excluded paths when these are empty' do
+ request_job
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response.dig('artifacts').first).not_to have_key('exclude')
+ end
+ end
+
+ def request_job(token = runner.token, **params)
+ new_params = params.merge(token: token, last_update: last_update)
+ post api('/jobs/request'), params: new_params.to_json, headers: { 'User-Agent' => user_agent, 'Content-Type': 'application/json' }
+ end
+ end
+
+ context 'for web-ide job' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:runner) { create(:ci_runner, :project, projects: [project]) }
+ let(:service) { ::Ci::CreateWebIdeTerminalService.new(project, user, ref: 'master').execute }
+ let(:pipeline) { service[:pipeline] }
+ let(:build) { pipeline.builds.first }
+ let(:job) { {} }
+ let(:config_content) do
+ 'terminal: { image: ruby, services: [mysql], before_script: [ls], tags: [tag-1], variables: { KEY: value } }'
+ end
+
+ before do
+ stub_webide_config_file(config_content)
+ project.add_maintainer(user)
+
+ pipeline
+ end
+
+ context 'when runner has matching tag' do
+ before do
+ runner.update!(tag_list: ['tag-1'])
+ end
+
+ it 'successfully picks job' do
+ request_job
+
+ build.reload
+
+ expect(build).to be_running
+ expect(build.runner).to eq(runner)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response).to include(
+ "id" => build.id,
+ "variables" => include("key" => 'KEY', "value" => 'value', "public" => true, "masked" => false),
+ "image" => a_hash_including("name" => 'ruby'),
+ "services" => all(a_hash_including("name" => 'mysql')),
+ "job_info" => a_hash_including("name" => 'terminal', "stage" => 'terminal'))
+ end
+ end
+
+ context 'when runner does not have matching tags' do
+ it 'does not pick a job' do
+ request_job
+
+ build.reload
+
+ expect(build).to be_pending
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
+
+ def request_job(token = runner.token, **params)
+ post api('/jobs/request'), params: params.merge(token: token)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/ci/runner/jobs_trace_spec.rb b/spec/requests/api/ci/runner/jobs_trace_spec.rb
new file mode 100644
index 00000000000..1980c1a9f51
--- /dev/null
+++ b/spec/requests/api/ci/runner/jobs_trace_spec.rb
@@ -0,0 +1,292 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
+ include StubGitlabCalls
+ include RedisHelpers
+ include WorkhorseHelpers
+
+ let(:registration_token) { 'abcdefg123456' }
+
+ before do
+ stub_feature_flags(ci_enable_live_trace: true)
+ stub_gitlab_calls
+ stub_application_setting(runners_registration_token: registration_token)
+ allow_any_instance_of(::Ci::Runner).to receive(:cache_attributes)
+ end
+
+ describe '/api/v4/jobs' do
+ let(:root_namespace) { create(:namespace) }
+ let(:namespace) { create(:namespace, parent: root_namespace) }
+ let(:project) { create(:project, namespace: namespace, shared_runners_enabled: false) }
+ let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master') }
+ let(:runner) { create(:ci_runner, :project, projects: [project]) }
+ let(:user) { create(:user) }
+ let(:job) do
+ create(:ci_build, :artifacts, :extended_options,
+ pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0)
+ end
+
+ describe 'PATCH /api/v4/jobs/:id/trace' do
+ let(:job) do
+ create(:ci_build, :running, :trace_live,
+ project: project, user: user, runner_id: runner.id, pipeline: pipeline)
+ end
+
+ let(:headers) { { API::Helpers::Runner::JOB_TOKEN_HEADER => job.token, 'Content-Type' => 'text/plain' } }
+ let(:headers_with_range) { headers.merge({ 'Content-Range' => '11-20' }) }
+ let(:update_interval) { 10.seconds.to_i }
+
+ before do
+ initial_patch_the_trace
+ end
+
+ it_behaves_like 'API::CI::Runner application context metadata', '/api/:version/jobs/:id/trace' do
+ let(:send_request) { patch_the_trace }
+ end
+
+ it 'updates runner info' do
+ runner.update!(contacted_at: 1.year.ago)
+
+ expect { patch_the_trace }.to change { runner.reload.contacted_at }
+ end
+
+ context 'when request is valid' do
+ it 'gets correct response' do
+ expect(response).to have_gitlab_http_status(:accepted)
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
+ expect(response.header).to have_key 'Range'
+ expect(response.header).to have_key 'Job-Status'
+ expect(response.header).to have_key 'X-GitLab-Trace-Update-Interval'
+ end
+
+ context 'when job has been updated recently' do
+ it { expect { patch_the_trace }.not_to change { job.updated_at }}
+
+ it "changes the job's trace" do
+ patch_the_trace
+
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
+ end
+
+ context 'when Runner makes a force-patch' do
+ it { expect { force_patch_the_trace }.not_to change { job.updated_at }}
+
+ it "doesn't change the build.trace" do
+ force_patch_the_trace
+
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
+ end
+ end
+ end
+
+ context 'when job was not updated recently' do
+ let(:update_interval) { 15.minutes.to_i }
+
+ it { expect { patch_the_trace }.to change { job.updated_at } }
+
+ it 'changes the job.trace' do
+ patch_the_trace
+
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
+ end
+
+ context 'when Runner makes a force-patch' do
+ it { expect { force_patch_the_trace }.to change { job.updated_at } }
+
+ it "doesn't change the job.trace" do
+ force_patch_the_trace
+
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
+ end
+ end
+ end
+
+ context 'when project for the build has been deleted' do
+ let(:job) do
+ create(:ci_build, :running, :trace_live, runner_id: runner.id, pipeline: pipeline) do |job|
+ job.project.update!(pending_delete: true)
+ end
+ end
+
+ it 'responds with forbidden' do
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when trace is patched' do
+ before do
+ patch_the_trace
+ end
+
+ it 'has valid trace' do
+ expect(response).to have_gitlab_http_status(:accepted)
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
+ end
+
+ context 'when job is cancelled' do
+ before do
+ job.cancel
+ end
+
+ context 'when trace is patched' do
+ before do
+ patch_the_trace
+ end
+
+ it 'returns Forbidden ' do
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
+ context 'when redis data are flushed' do
+ before do
+ redis_shared_state_cleanup!
+ end
+
+ it 'has empty trace' do
+ expect(job.reload.trace.raw).to eq ''
+ end
+
+ context 'when we perform partial patch' do
+ before do
+ patch_the_trace('hello', headers.merge({ 'Content-Range' => "28-32/5" }))
+ end
+
+ it 'returns an error' do
+ expect(response).to have_gitlab_http_status(:range_not_satisfiable)
+ expect(response.header['Range']).to eq('0-0')
+ end
+ end
+
+ context 'when we resend full trace' do
+ before do
+ patch_the_trace('BUILD TRACE appended appended hello', headers.merge({ 'Content-Range' => "0-34/35" }))
+ end
+
+ it 'succeeds with updating trace' do
+ expect(response).to have_gitlab_http_status(:accepted)
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended hello'
+ end
+ end
+ end
+ end
+
+ context 'when concurrent update of trace is happening' do
+ before do
+ job.trace.write('wb') do
+ patch_the_trace
+ end
+ end
+
+ it 'returns that operation conflicts' do
+ expect(response).to have_gitlab_http_status(:conflict)
+ end
+ end
+
+ context 'when the job is canceled' do
+ before do
+ job.cancel
+ patch_the_trace
+ end
+
+ it 'receives status in header' do
+ expect(response.header['Job-Status']).to eq 'canceled'
+ end
+ end
+
+ context 'when build trace is being watched' do
+ before do
+ job.trace.being_watched!
+ end
+
+ it 'returns X-GitLab-Trace-Update-Interval as 3' do
+ patch_the_trace
+
+ expect(response).to have_gitlab_http_status(:accepted)
+ expect(response.header['X-GitLab-Trace-Update-Interval']).to eq('3')
+ end
+ end
+
+ context 'when build trace is not being watched' do
+ it 'returns X-GitLab-Trace-Update-Interval as 30' do
+ patch_the_trace
+
+ expect(response).to have_gitlab_http_status(:accepted)
+ expect(response.header['X-GitLab-Trace-Update-Interval']).to eq('30')
+ end
+ end
+ end
+
+ context 'when Runner makes a force-patch' do
+ before do
+ force_patch_the_trace
+ end
+
+ it 'gets correct response' do
+ expect(response).to have_gitlab_http_status(:accepted)
+ expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
+ expect(response.header).to have_key 'Range'
+ expect(response.header).to have_key 'Job-Status'
+ end
+ end
+
+ context 'when content-range start is too big' do
+ let(:headers_with_range) { headers.merge({ 'Content-Range' => '15-20/6' }) }
+
+ it 'gets 416 error response with range headers' do
+ expect(response).to have_gitlab_http_status(:range_not_satisfiable)
+ expect(response.header).to have_key 'Range'
+ expect(response.header['Range']).to eq '0-11'
+ end
+ end
+
+ context 'when content-range start is too small' do
+ let(:headers_with_range) { headers.merge({ 'Content-Range' => '8-20/13' }) }
+
+ it 'gets 416 error response with range headers' do
+ expect(response).to have_gitlab_http_status(:range_not_satisfiable)
+ expect(response.header).to have_key 'Range'
+ expect(response.header['Range']).to eq '0-11'
+ end
+ end
+
+ context 'when Content-Range header is missing' do
+ let(:headers_with_range) { headers }
+
+ it { expect(response).to have_gitlab_http_status(:bad_request) }
+ end
+
+ context 'when job has been errased' do
+ let(:job) { create(:ci_build, runner_id: runner.id, erased_at: Time.now) }
+
+ it { expect(response).to have_gitlab_http_status(:forbidden) }
+ end
+
+ def patch_the_trace(content = ' appended', request_headers = nil)
+ unless request_headers
+ job.trace.read do |stream|
+ offset = stream.size
+ limit = offset + content.length - 1
+ request_headers = headers.merge({ 'Content-Range' => "#{offset}-#{limit}" })
+ end
+ end
+
+ Timecop.travel(job.updated_at + update_interval) do
+ patch api("/jobs/#{job.id}/trace"), params: content, headers: request_headers
+ job.reload
+ end
+ end
+
+ def initial_patch_the_trace
+ patch_the_trace(' appended', headers_with_range)
+ end
+
+ def force_patch_the_trace
+ 2.times { patch_the_trace('') }
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/ci/runner/runners_delete_spec.rb b/spec/requests/api/ci/runner/runners_delete_spec.rb
new file mode 100644
index 00000000000..75960a1a1c0
--- /dev/null
+++ b/spec/requests/api/ci/runner/runners_delete_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
+ include StubGitlabCalls
+ include RedisHelpers
+ include WorkhorseHelpers
+
+ let(:registration_token) { 'abcdefg123456' }
+
+ before do
+ stub_feature_flags(ci_enable_live_trace: true)
+ stub_gitlab_calls
+ stub_application_setting(runners_registration_token: registration_token)
+ allow_any_instance_of(::Ci::Runner).to receive(:cache_attributes)
+ end
+
+ describe '/api/v4/runners' do
+ describe 'DELETE /api/v4/runners' do
+ context 'when no token is provided' do
+ it 'returns 400 error' do
+ delete api('/runners')
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'when invalid token is provided' do
+ it 'returns 403 error' do
+ delete api('/runners'), params: { token: 'invalid' }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when valid token is provided' do
+ let(:runner) { create(:ci_runner) }
+
+ it 'deletes Runner' do
+ delete api('/runners'), params: { token: runner.token }
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(::Ci::Runner.count).to eq(0)
+ end
+
+ it_behaves_like '412 response' do
+ let(:request) { api('/runners') }
+ let(:params) { { token: runner.token } }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/ci/runner/runners_post_spec.rb b/spec/requests/api/ci/runner/runners_post_spec.rb
new file mode 100644
index 00000000000..7c362fae7d2
--- /dev/null
+++ b/spec/requests/api/ci/runner/runners_post_spec.rb
@@ -0,0 +1,250 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
+ include StubGitlabCalls
+ include RedisHelpers
+ include WorkhorseHelpers
+
+ let(:registration_token) { 'abcdefg123456' }
+
+ before do
+ stub_feature_flags(ci_enable_live_trace: true)
+ stub_gitlab_calls
+ stub_application_setting(runners_registration_token: registration_token)
+ allow_any_instance_of(::Ci::Runner).to receive(:cache_attributes)
+ end
+
+ describe '/api/v4/runners' do
+ describe 'POST /api/v4/runners' do
+ context 'when no token is provided' do
+ it 'returns 400 error' do
+ post api('/runners')
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context 'when invalid token is provided' do
+ it 'returns 403 error' do
+ post api('/runners'), params: { token: 'invalid' }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when valid token is provided' do
+ it 'creates runner with default values' do
+ post api('/runners'), params: { token: registration_token }
+
+ runner = ::Ci::Runner.first
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['id']).to eq(runner.id)
+ expect(json_response['token']).to eq(runner.token)
+ expect(runner.run_untagged).to be true
+ expect(runner.active).to be true
+ expect(runner.token).not_to eq(registration_token)
+ expect(runner).to be_instance_type
+ end
+
+ context 'when project token is used' do
+ let(:project) { create(:project) }
+
+ it 'creates project runner' do
+ post api('/runners'), params: { token: project.runners_token }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(project.runners.size).to eq(1)
+ runner = ::Ci::Runner.first
+ expect(runner.token).not_to eq(registration_token)
+ expect(runner.token).not_to eq(project.runners_token)
+ expect(runner).to be_project_type
+ end
+ end
+
+ context 'when group token is used' do
+ let(:group) { create(:group) }
+
+ it 'creates a group runner' do
+ post api('/runners'), params: { token: group.runners_token }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(group.runners.reload.size).to eq(1)
+ runner = ::Ci::Runner.first
+ expect(runner.token).not_to eq(registration_token)
+ expect(runner.token).not_to eq(group.runners_token)
+ expect(runner).to be_group_type
+ end
+ end
+ end
+
+ context 'when runner description is provided' do
+ it 'creates runner' do
+ post api('/runners'), params: {
+ token: registration_token,
+ description: 'server.hostname'
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.description).to eq('server.hostname')
+ end
+ end
+
+ context 'when runner tags are provided' do
+ it 'creates runner' do
+ post api('/runners'), params: {
+ token: registration_token,
+ tag_list: 'tag1, tag2'
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.tag_list.sort).to eq(%w(tag1 tag2))
+ end
+ end
+
+ context 'when option for running untagged jobs is provided' do
+ context 'when tags are provided' do
+ it 'creates runner' do
+ post api('/runners'), params: {
+ token: registration_token,
+ run_untagged: false,
+ tag_list: ['tag']
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.run_untagged).to be false
+ expect(::Ci::Runner.first.tag_list.sort).to eq(['tag'])
+ end
+ end
+
+ context 'when tags are not provided' do
+ it 'returns 400 error' do
+ post api('/runners'), params: {
+ token: registration_token,
+ run_untagged: false
+ }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to include(
+ 'tags_list' => ['can not be empty when runner is not allowed to pick untagged jobs'])
+ end
+ end
+ end
+
+ context 'when option for locking Runner is provided' do
+ it 'creates runner' do
+ post api('/runners'), params: {
+ token: registration_token,
+ locked: true
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.locked).to be true
+ end
+ end
+
+ context 'when option for activating a Runner is provided' do
+ context 'when active is set to true' do
+ it 'creates runner' do
+ post api('/runners'), params: {
+ token: registration_token,
+ active: true
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.active).to be true
+ end
+ end
+
+ context 'when active is set to false' do
+ it 'creates runner' do
+ post api('/runners'), params: {
+ token: registration_token,
+ active: false
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.active).to be false
+ end
+ end
+ end
+
+ context 'when access_level is provided for Runner' do
+ context 'when access_level is set to ref_protected' do
+ it 'creates runner' do
+ post api('/runners'), params: {
+ token: registration_token,
+ access_level: 'ref_protected'
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.ref_protected?).to be true
+ end
+ end
+
+ context 'when access_level is set to not_protected' do
+ it 'creates runner' do
+ post api('/runners'), params: {
+ token: registration_token,
+ access_level: 'not_protected'
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.ref_protected?).to be false
+ end
+ end
+ end
+
+ context 'when maximum job timeout is specified' do
+ it 'creates runner' do
+ post api('/runners'), params: {
+ token: registration_token,
+ maximum_timeout: 9000
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.maximum_timeout).to eq(9000)
+ end
+
+ context 'when maximum job timeout is empty' do
+ it 'creates runner' do
+ post api('/runners'), params: {
+ token: registration_token,
+ maximum_timeout: ''
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.maximum_timeout).to be_nil
+ end
+ end
+ end
+
+ %w(name version revision platform architecture).each do |param|
+ context "when info parameter '#{param}' info is present" do
+ let(:value) { "#{param}_value" }
+
+ it "updates provided Runner's parameter" do
+ post api('/runners'), params: {
+ token: registration_token,
+ info: { param => value }
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.read_attribute(param.to_sym)).to eq(value)
+ end
+ end
+ end
+
+ it "sets the runner's ip_address" do
+ post api('/runners'),
+ params: { token: registration_token },
+ headers: { 'X-Forwarded-For' => '123.111.123.111' }
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(::Ci::Runner.first.ip_address).to eq('123.111.123.111')
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/ci/runner/runners_verify_post_spec.rb b/spec/requests/api/ci/runner/runners_verify_post_spec.rb
new file mode 100644
index 00000000000..e2f5f9b2d68
--- /dev/null
+++ b/spec/requests/api/ci/runner/runners_verify_post_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
+ include StubGitlabCalls
+ include RedisHelpers
+ include WorkhorseHelpers
+
+ let(:registration_token) { 'abcdefg123456' }
+
+ before do
+ stub_feature_flags(ci_enable_live_trace: true)
+ stub_gitlab_calls
+ stub_application_setting(runners_registration_token: registration_token)
+ allow_any_instance_of(::Ci::Runner).to receive(:cache_attributes)
+ end
+
+ describe '/api/v4/runners' do
+ describe 'POST /api/v4/runners/verify' do
+ let(:runner) { create(:ci_runner) }
+
+ context 'when no token is provided' do
+ it 'returns 400 error' do
+ post api('/runners/verify')
+
+ expect(response).to have_gitlab_http_status :bad_request
+ end
+ end
+
+ context 'when invalid token is provided' do
+ it 'returns 403 error' do
+ post api('/runners/verify'), params: { token: 'invalid-token' }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'when valid token is provided' do
+ it 'verifies Runner credentials' do
+ post api('/runners/verify'), params: { token: runner.token }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/ci/runner_spec.rb b/spec/requests/api/ci/runner_spec.rb
deleted file mode 100644
index c8718309bf2..00000000000
--- a/spec/requests/api/ci/runner_spec.rb
+++ /dev/null
@@ -1,2474 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
- include StubGitlabCalls
- include RedisHelpers
- include WorkhorseHelpers
-
- let(:registration_token) { 'abcdefg123456' }
-
- before do
- stub_feature_flags(ci_enable_live_trace: true)
- stub_gitlab_calls
- stub_application_setting(runners_registration_token: registration_token)
- allow_any_instance_of(::Ci::Runner).to receive(:cache_attributes)
- end
-
- describe '/api/v4/runners' do
- describe 'POST /api/v4/runners' do
- context 'when no token is provided' do
- it 'returns 400 error' do
- post api('/runners')
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
-
- context 'when invalid token is provided' do
- it 'returns 403 error' do
- post api('/runners'), params: { token: 'invalid' }
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'when valid token is provided' do
- it 'creates runner with default values' do
- post api('/runners'), params: { token: registration_token }
-
- runner = ::Ci::Runner.first
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['id']).to eq(runner.id)
- expect(json_response['token']).to eq(runner.token)
- expect(runner.run_untagged).to be true
- expect(runner.active).to be true
- expect(runner.token).not_to eq(registration_token)
- expect(runner).to be_instance_type
- end
-
- context 'when project token is used' do
- let(:project) { create(:project) }
-
- it 'creates project runner' do
- post api('/runners'), params: { token: project.runners_token }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(project.runners.size).to eq(1)
- runner = ::Ci::Runner.first
- expect(runner.token).not_to eq(registration_token)
- expect(runner.token).not_to eq(project.runners_token)
- expect(runner).to be_project_type
- end
- end
-
- context 'when group token is used' do
- let(:group) { create(:group) }
-
- it 'creates a group runner' do
- post api('/runners'), params: { token: group.runners_token }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(group.runners.reload.size).to eq(1)
- runner = ::Ci::Runner.first
- expect(runner.token).not_to eq(registration_token)
- expect(runner.token).not_to eq(group.runners_token)
- expect(runner).to be_group_type
- end
- end
- end
-
- context 'when runner description is provided' do
- it 'creates runner' do
- post api('/runners'), params: {
- token: registration_token,
- description: 'server.hostname'
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.description).to eq('server.hostname')
- end
- end
-
- context 'when runner tags are provided' do
- it 'creates runner' do
- post api('/runners'), params: {
- token: registration_token,
- tag_list: 'tag1, tag2'
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.tag_list.sort).to eq(%w(tag1 tag2))
- end
- end
-
- context 'when option for running untagged jobs is provided' do
- context 'when tags are provided' do
- it 'creates runner' do
- post api('/runners'), params: {
- token: registration_token,
- run_untagged: false,
- tag_list: ['tag']
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.run_untagged).to be false
- expect(::Ci::Runner.first.tag_list.sort).to eq(['tag'])
- end
- end
-
- context 'when tags are not provided' do
- it 'returns 400 error' do
- post api('/runners'), params: {
- token: registration_token,
- run_untagged: false
- }
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']).to include(
- 'tags_list' => ['can not be empty when runner is not allowed to pick untagged jobs'])
- end
- end
- end
-
- context 'when option for locking Runner is provided' do
- it 'creates runner' do
- post api('/runners'), params: {
- token: registration_token,
- locked: true
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.locked).to be true
- end
- end
-
- context 'when option for activating a Runner is provided' do
- context 'when active is set to true' do
- it 'creates runner' do
- post api('/runners'), params: {
- token: registration_token,
- active: true
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.active).to be true
- end
- end
-
- context 'when active is set to false' do
- it 'creates runner' do
- post api('/runners'), params: {
- token: registration_token,
- active: false
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.active).to be false
- end
- end
- end
-
- context 'when access_level is provided for Runner' do
- context 'when access_level is set to ref_protected' do
- it 'creates runner' do
- post api('/runners'), params: {
- token: registration_token,
- access_level: 'ref_protected'
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.ref_protected?).to be true
- end
- end
-
- context 'when access_level is set to not_protected' do
- it 'creates runner' do
- post api('/runners'), params: {
- token: registration_token,
- access_level: 'not_protected'
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.ref_protected?).to be false
- end
- end
- end
-
- context 'when maximum job timeout is specified' do
- it 'creates runner' do
- post api('/runners'), params: {
- token: registration_token,
- maximum_timeout: 9000
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.maximum_timeout).to eq(9000)
- end
-
- context 'when maximum job timeout is empty' do
- it 'creates runner' do
- post api('/runners'), params: {
- token: registration_token,
- maximum_timeout: ''
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.maximum_timeout).to be_nil
- end
- end
- end
-
- %w(name version revision platform architecture).each do |param|
- context "when info parameter '#{param}' info is present" do
- let(:value) { "#{param}_value" }
-
- it "updates provided Runner's parameter" do
- post api('/runners'), params: {
- token: registration_token,
- info: { param => value }
- }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.read_attribute(param.to_sym)).to eq(value)
- end
- end
- end
-
- it "sets the runner's ip_address" do
- post api('/runners'),
- params: { token: registration_token },
- headers: { 'X-Forwarded-For' => '123.111.123.111' }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(::Ci::Runner.first.ip_address).to eq('123.111.123.111')
- end
- end
-
- describe 'DELETE /api/v4/runners' do
- context 'when no token is provided' do
- it 'returns 400 error' do
- delete api('/runners')
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
-
- context 'when invalid token is provided' do
- it 'returns 403 error' do
- delete api('/runners'), params: { token: 'invalid' }
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'when valid token is provided' do
- let(:runner) { create(:ci_runner) }
-
- it 'deletes Runner' do
- delete api('/runners'), params: { token: runner.token }
-
- expect(response).to have_gitlab_http_status(:no_content)
- expect(::Ci::Runner.count).to eq(0)
- end
-
- it_behaves_like '412 response' do
- let(:request) { api('/runners') }
- let(:params) { { token: runner.token } }
- end
- end
- end
-
- describe 'POST /api/v4/runners/verify' do
- let(:runner) { create(:ci_runner) }
-
- context 'when no token is provided' do
- it 'returns 400 error' do
- post api('/runners/verify')
-
- expect(response).to have_gitlab_http_status :bad_request
- end
- end
-
- context 'when invalid token is provided' do
- it 'returns 403 error' do
- post api('/runners/verify'), params: { token: 'invalid-token' }
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'when valid token is provided' do
- it 'verifies Runner credentials' do
- post api('/runners/verify'), params: { token: runner.token }
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
- end
- end
-
- describe '/api/v4/jobs' do
- shared_examples 'application context metadata' do |api_route|
- it 'contains correct context metadata' do
- # Avoids popping the context from the thread so we can
- # check its content after the request.
- allow(Labkit::Context).to receive(:pop)
-
- send_request
-
- Labkit::Context.with_context do |context|
- expected_context = {
- 'meta.caller_id' => api_route,
- 'meta.user' => job.user.username,
- 'meta.project' => job.project.full_path,
- 'meta.root_namespace' => job.project.full_path_components.first
- }
-
- expect(context.to_h).to include(expected_context)
- end
- end
- end
-
- let(:root_namespace) { create(:namespace) }
- let(:namespace) { create(:namespace, parent: root_namespace) }
- let(:project) { create(:project, namespace: namespace, shared_runners_enabled: false) }
- let(:pipeline) { create(:ci_pipeline, project: project, ref: 'master') }
- let(:runner) { create(:ci_runner, :project, projects: [project]) }
- let(:user) { create(:user) }
- let(:job) do
- create(:ci_build, :artifacts, :extended_options,
- pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0)
- end
-
- describe 'POST /api/v4/jobs/request' do
- let!(:last_update) {}
- let!(:new_update) { }
- let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' }
-
- before do
- job
- stub_container_registry_config(enabled: false)
- end
-
- shared_examples 'no jobs available' do
- before do
- request_job
- end
-
- context 'when runner sends version in User-Agent' do
- context 'for stable version' do
- it 'gives 204 and set X-GitLab-Last-Update' do
- expect(response).to have_gitlab_http_status(:no_content)
- expect(response.header).to have_key('X-GitLab-Last-Update')
- end
- end
-
- context 'when last_update is up-to-date' do
- let(:last_update) { runner.ensure_runner_queue_value }
-
- it 'gives 204 and set the same X-GitLab-Last-Update' do
- expect(response).to have_gitlab_http_status(:no_content)
- expect(response.header['X-GitLab-Last-Update']).to eq(last_update)
- end
- end
-
- context 'when last_update is outdated' do
- let(:last_update) { runner.ensure_runner_queue_value }
- let(:new_update) { runner.tick_runner_queue }
-
- it 'gives 204 and set a new X-GitLab-Last-Update' do
- expect(response).to have_gitlab_http_status(:no_content)
- expect(response.header['X-GitLab-Last-Update']).to eq(new_update)
- end
- end
-
- context 'when beta version is sent' do
- let(:user_agent) { 'gitlab-runner 9.0.0~beta.167.g2b2bacc (master; go1.7.4; linux/amd64)' }
-
- it { expect(response).to have_gitlab_http_status(:no_content) }
- end
-
- context 'when pre-9-0 version is sent' do
- let(:user_agent) { 'gitlab-ci-multi-runner 1.6.0 (1-6-stable; go1.6.3; linux/amd64)' }
-
- it { expect(response).to have_gitlab_http_status(:no_content) }
- end
-
- context 'when pre-9-0 beta version is sent' do
- let(:user_agent) { 'gitlab-ci-multi-runner 1.6.0~beta.167.g2b2bacc (master; go1.6.3; linux/amd64)' }
-
- it { expect(response).to have_gitlab_http_status(:no_content) }
- end
- end
- end
-
- context 'when no token is provided' do
- it 'returns 400 error' do
- post api('/jobs/request')
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
-
- context 'when invalid token is provided' do
- it 'returns 403 error' do
- post api('/jobs/request'), params: { token: 'invalid' }
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'when valid token is provided' do
- context 'when Runner is not active' do
- let(:runner) { create(:ci_runner, :inactive) }
- let(:update_value) { runner.ensure_runner_queue_value }
-
- it 'returns 204 error' do
- request_job
-
- expect(response).to have_gitlab_http_status(:no_content)
- expect(response.header['X-GitLab-Last-Update']).to eq(update_value)
- end
- end
-
- context 'when jobs are finished' do
- before do
- job.success
- end
-
- it_behaves_like 'no jobs available'
- end
-
- context 'when other projects have pending jobs' do
- before do
- job.success
- create(:ci_build, :pending)
- end
-
- it_behaves_like 'no jobs available'
- end
-
- context 'when shared runner requests job for project without shared_runners_enabled' do
- let(:runner) { create(:ci_runner, :instance) }
-
- it_behaves_like 'no jobs available'
- end
-
- context 'when there is a pending job' do
- let(:expected_job_info) do
- { 'name' => job.name,
- 'stage' => job.stage,
- 'project_id' => job.project.id,
- 'project_name' => job.project.name }
- end
-
- let(:expected_git_info) do
- { 'repo_url' => job.repo_url,
- 'ref' => job.ref,
- 'sha' => job.sha,
- 'before_sha' => job.before_sha,
- 'ref_type' => 'branch',
- 'refspecs' => ["+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
- "+refs/heads/#{job.ref}:refs/remotes/origin/#{job.ref}"],
- 'depth' => project.ci_default_git_depth }
- end
-
- let(:expected_steps) do
- [{ 'name' => 'script',
- 'script' => %w(echo),
- 'timeout' => job.metadata_timeout,
- 'when' => 'on_success',
- 'allow_failure' => false },
- { 'name' => 'after_script',
- 'script' => %w(ls date),
- 'timeout' => job.metadata_timeout,
- 'when' => 'always',
- 'allow_failure' => true }]
- end
-
- let(:expected_variables) do
- [{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true, 'masked' => false },
- { 'key' => 'CI_JOB_STAGE', 'value' => 'test', 'public' => true, 'masked' => false },
- { 'key' => 'DB_NAME', 'value' => 'postgres', 'public' => true, 'masked' => false }]
- end
-
- let(:expected_artifacts) do
- [{ 'name' => 'artifacts_file',
- 'untracked' => false,
- 'paths' => %w(out/),
- 'when' => 'always',
- 'expire_in' => '7d',
- "artifact_type" => "archive",
- "artifact_format" => "zip" }]
- end
-
- let(:expected_cache) do
- [{ 'key' => 'cache_key',
- 'untracked' => false,
- 'paths' => ['vendor/*'],
- 'policy' => 'pull-push' }]
- end
-
- let(:expected_features) { { 'trace_sections' => true } }
-
- it 'picks a job' do
- request_job info: { platform: :darwin }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(response.headers['Content-Type']).to eq('application/json')
- expect(response.headers).not_to have_key('X-GitLab-Last-Update')
- expect(runner.reload.platform).to eq('darwin')
- expect(json_response['id']).to eq(job.id)
- expect(json_response['token']).to eq(job.token)
- expect(json_response['job_info']).to eq(expected_job_info)
- expect(json_response['git_info']).to eq(expected_git_info)
- expect(json_response['image']).to eq({ 'name' => 'ruby:2.7', 'entrypoint' => '/bin/sh', 'ports' => [] })
- expect(json_response['services']).to eq([{ 'name' => 'postgres', 'entrypoint' => nil,
- 'alias' => nil, 'command' => nil, 'ports' => [] },
- { 'name' => 'docker:stable-dind', 'entrypoint' => '/bin/sh',
- 'alias' => 'docker', 'command' => 'sleep 30', 'ports' => [] }])
- expect(json_response['steps']).to eq(expected_steps)
- expect(json_response['artifacts']).to eq(expected_artifacts)
- expect(json_response['cache']).to eq(expected_cache)
- expect(json_response['variables']).to include(*expected_variables)
- expect(json_response['features']).to eq(expected_features)
- end
-
- it 'creates persistent ref' do
- expect_any_instance_of(::Ci::PersistentRef).to receive(:create_ref)
- .with(job.sha, "refs/#{Repository::REF_PIPELINES}/#{job.commit_id}")
-
- request_job info: { platform: :darwin }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['id']).to eq(job.id)
- end
-
- context 'when job is made for tag' do
- let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
-
- it 'sets branch as ref_type' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['git_info']['ref_type']).to eq('tag')
- end
-
- context 'when GIT_DEPTH is specified' do
- before do
- create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 1, pipeline: pipeline)
- end
-
- it 'specifies refspecs' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['git_info']['refspecs']).to include("+refs/tags/#{job.ref}:refs/tags/#{job.ref}")
- end
- end
-
- context 'when a Gitaly exception is thrown during response' do
- before do
- allow_next_instance_of(Ci::BuildRunnerPresenter) do |instance|
- allow(instance).to receive(:artifacts).and_raise(GRPC::DeadlineExceeded)
- end
- end
-
- it 'fails the job as a scheduler failure' do
- request_job
-
- expect(response).to have_gitlab_http_status(:no_content)
- expect(job.reload.failed?).to be_truthy
- expect(job.failure_reason).to eq('scheduler_failure')
- expect(job.runner_id).to eq(runner.id)
- expect(job.runner_session).to be_nil
- end
- end
-
- context 'when GIT_DEPTH is not specified and there is no default git depth for the project' do
- before do
- project.update!(ci_default_git_depth: nil)
- end
-
- it 'specifies refspecs' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['git_info']['refspecs'])
- .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
- '+refs/tags/*:refs/tags/*',
- '+refs/heads/*:refs/remotes/origin/*')
- end
- end
- end
-
- context 'when job filtered by job_age' do
- let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0, queued_at: 60.seconds.ago) }
-
- context 'job is queued less than job_age parameter' do
- let(:job_age) { 120 }
-
- it 'gives 204' do
- request_job(job_age: job_age)
-
- expect(response).to have_gitlab_http_status(:no_content)
- end
- end
-
- context 'job is queued more than job_age parameter' do
- let(:job_age) { 30 }
-
- it 'picks a job' do
- request_job(job_age: job_age)
-
- expect(response).to have_gitlab_http_status(:created)
- end
- end
- end
-
- context 'when job is made for branch' do
- it 'sets tag as ref_type' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['git_info']['ref_type']).to eq('branch')
- end
-
- context 'when GIT_DEPTH is specified' do
- before do
- create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 1, pipeline: pipeline)
- end
-
- it 'specifies refspecs' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['git_info']['refspecs']).to include("+refs/heads/#{job.ref}:refs/remotes/origin/#{job.ref}")
- end
- end
-
- context 'when GIT_DEPTH is not specified and there is no default git depth for the project' do
- before do
- project.update!(ci_default_git_depth: nil)
- end
-
- it 'specifies refspecs' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['git_info']['refspecs'])
- .to contain_exactly("+refs/pipelines/#{pipeline.id}:refs/pipelines/#{pipeline.id}",
- '+refs/tags/*:refs/tags/*',
- '+refs/heads/*:refs/remotes/origin/*')
- end
- end
- end
-
- context 'when job is for a release' do
- let!(:job) { create(:ci_build, :release_options, pipeline: pipeline) }
-
- context 'when `multi_build_steps` is passed by the runner' do
- it 'exposes release info' do
- request_job info: { features: { multi_build_steps: true } }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(response.headers).not_to have_key('X-GitLab-Last-Update')
- expect(json_response['steps']).to eq([
- {
- "name" => "script",
- "script" => ["make changelog | tee release_changelog.txt"],
- "timeout" => 3600,
- "when" => "on_success",
- "allow_failure" => false
- },
- {
- "name" => "release",
- "script" =>
- ["release-cli create --name \"Release $CI_COMMIT_SHA\" --description \"Created using the release-cli $EXTRA_DESCRIPTION\" --tag-name \"release-$CI_COMMIT_SHA\" --ref \"$CI_COMMIT_SHA\""],
- "timeout" => 3600,
- "when" => "on_success",
- "allow_failure" => false
- }
- ])
- end
- end
-
- context 'when `multi_build_steps` is not passed by the runner' do
- it 'drops the job' do
- request_job
-
- expect(response).to have_gitlab_http_status(:no_content)
- end
- end
- end
-
- context 'when job is made for merge request' do
- let(:pipeline) { create(:ci_pipeline, source: :merge_request_event, project: project, ref: 'feature', merge_request: merge_request) }
- let!(:job) { create(:ci_build, pipeline: pipeline, name: 'spinach', ref: 'feature', stage: 'test', stage_idx: 0) }
- let(:merge_request) { create(:merge_request) }
-
- it 'sets branch as ref_type' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['git_info']['ref_type']).to eq('branch')
- end
-
- context 'when GIT_DEPTH is specified' do
- before do
- create(:ci_pipeline_variable, key: 'GIT_DEPTH', value: 1, pipeline: pipeline)
- end
-
- it 'returns the overwritten git depth for merge request refspecs' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['git_info']['depth']).to eq(1)
- end
- end
- end
-
- it 'updates runner info' do
- expect { request_job }.to change { runner.reload.contacted_at }
- end
-
- %w(version revision platform architecture).each do |param|
- context "when info parameter '#{param}' is present" do
- let(:value) { "#{param}_value" }
-
- it "updates provided Runner's parameter" do
- request_job info: { param => value }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(runner.reload.read_attribute(param.to_sym)).to eq(value)
- end
- end
- end
-
- it "sets the runner's ip_address" do
- post api('/jobs/request'),
- params: { token: runner.token },
- headers: { 'User-Agent' => user_agent, 'X-Forwarded-For' => '123.222.123.222' }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(runner.reload.ip_address).to eq('123.222.123.222')
- end
-
- it "handles multiple X-Forwarded-For addresses" do
- post api('/jobs/request'),
- params: { token: runner.token },
- headers: { 'User-Agent' => user_agent, 'X-Forwarded-For' => '123.222.123.222, 127.0.0.1' }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(runner.reload.ip_address).to eq('123.222.123.222')
- end
-
- context 'when concurrently updating a job' do
- before do
- expect_any_instance_of(::Ci::Build).to receive(:run!)
- .and_raise(ActiveRecord::StaleObjectError.new(nil, nil))
- end
-
- it 'returns a conflict' do
- request_job
-
- expect(response).to have_gitlab_http_status(:conflict)
- expect(response.headers).not_to have_key('X-GitLab-Last-Update')
- end
- end
-
- context 'when project and pipeline have multiple jobs' do
- let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
- let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
- let!(:test_job) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) }
-
- before do
- job.success
- job2.success
- end
-
- it 'returns dependent jobs' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['id']).to eq(test_job.id)
- expect(json_response['dependencies'].count).to eq(2)
- expect(json_response['dependencies']).to include(
- { 'id' => job.id, 'name' => job.name, 'token' => job.token },
- { 'id' => job2.id, 'name' => job2.name, 'token' => job2.token })
- end
- end
-
- context 'when pipeline have jobs with artifacts' do
- let!(:job) { create(:ci_build, :tag, :artifacts, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
- let!(:test_job) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) }
-
- before do
- job.success
- end
-
- it 'returns dependent jobs' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['id']).to eq(test_job.id)
- expect(json_response['dependencies'].count).to eq(1)
- expect(json_response['dependencies']).to include(
- { 'id' => job.id, 'name' => job.name, 'token' => job.token,
- 'artifacts_file' => { 'filename' => 'ci_build_artifacts.zip', 'size' => 107464 } })
- end
- end
-
- context 'when explicit dependencies are defined' do
- let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
- let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
- let!(:test_job) do
- create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'deploy',
- stage: 'deploy', stage_idx: 1,
- options: { script: ['bash'], dependencies: [job2.name] })
- end
-
- before do
- job.success
- job2.success
- end
-
- it 'returns dependent jobs' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['id']).to eq(test_job.id)
- expect(json_response['dependencies'].count).to eq(1)
- expect(json_response['dependencies'][0]).to include('id' => job2.id, 'name' => job2.name, 'token' => job2.token)
- end
- end
-
- context 'when dependencies is an empty array' do
- let!(:job) { create(:ci_build, :tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
- let!(:job2) { create(:ci_build, :tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
- let!(:empty_dependencies_job) do
- create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'empty_dependencies_job',
- stage: 'deploy', stage_idx: 1,
- options: { script: ['bash'], dependencies: [] })
- end
-
- before do
- job.success
- job2.success
- end
-
- it 'returns an empty array' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['id']).to eq(empty_dependencies_job.id)
- expect(json_response['dependencies'].count).to eq(0)
- end
- end
-
- context 'when job has no tags' do
- before do
- job.update(tags: [])
- end
-
- context 'when runner is allowed to pick untagged jobs' do
- before do
- runner.update_column(:run_untagged, true)
- end
-
- it 'picks job' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- end
- end
-
- context 'when runner is not allowed to pick untagged jobs' do
- before do
- runner.update_column(:run_untagged, false)
- end
-
- it_behaves_like 'no jobs available'
- end
- end
-
- context 'when triggered job is available' do
- let(:expected_variables) do
- [{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true, 'masked' => false },
- { 'key' => 'CI_JOB_STAGE', 'value' => 'test', 'public' => true, 'masked' => false },
- { 'key' => 'CI_PIPELINE_TRIGGERED', 'value' => 'true', 'public' => true, 'masked' => false },
- { 'key' => 'DB_NAME', 'value' => 'postgres', 'public' => true, 'masked' => false },
- { 'key' => 'SECRET_KEY', 'value' => 'secret_value', 'public' => false, 'masked' => false },
- { 'key' => 'TRIGGER_KEY_1', 'value' => 'TRIGGER_VALUE_1', 'public' => false, 'masked' => false }]
- end
-
- let(:trigger) { create(:ci_trigger, project: project) }
- let!(:trigger_request) { create(:ci_trigger_request, pipeline: pipeline, builds: [job], trigger: trigger) }
-
- before do
- project.variables << ::Ci::Variable.new(key: 'SECRET_KEY', value: 'secret_value')
- end
-
- shared_examples 'expected variables behavior' do
- it 'returns variables for triggers' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['variables']).to include(*expected_variables)
- end
- end
-
- context 'when variables are stored in trigger_request' do
- before do
- trigger_request.update_attribute(:variables, { TRIGGER_KEY_1: 'TRIGGER_VALUE_1' } )
- end
-
- it_behaves_like 'expected variables behavior'
- end
-
- context 'when variables are stored in pipeline_variables' do
- before do
- create(:ci_pipeline_variable, pipeline: pipeline, key: :TRIGGER_KEY_1, value: 'TRIGGER_VALUE_1')
- end
-
- it_behaves_like 'expected variables behavior'
- end
- end
-
- describe 'registry credentials support' do
- let(:registry_url) { 'registry.example.com:5005' }
- let(:registry_credentials) do
- { 'type' => 'registry',
- 'url' => registry_url,
- 'username' => 'gitlab-ci-token',
- 'password' => job.token }
- end
-
- context 'when registry is enabled' do
- before do
- stub_container_registry_config(enabled: true, host_port: registry_url)
- end
-
- it 'sends registry credentials key' do
- request_job
-
- expect(json_response).to have_key('credentials')
- expect(json_response['credentials']).to include(registry_credentials)
- end
- end
-
- context 'when registry is disabled' do
- before do
- stub_container_registry_config(enabled: false, host_port: registry_url)
- end
-
- it 'does not send registry credentials' do
- request_job
-
- expect(json_response).to have_key('credentials')
- expect(json_response['credentials']).not_to include(registry_credentials)
- end
- end
- end
-
- describe 'timeout support' do
- context 'when project specifies job timeout' do
- let(:project) { create(:project, shared_runners_enabled: false, build_timeout: 1234) }
-
- it 'contains info about timeout taken from project' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['runner_info']).to include({ 'timeout' => 1234 })
- end
-
- context 'when runner specifies lower timeout' do
- let(:runner) { create(:ci_runner, :project, maximum_timeout: 1000, projects: [project]) }
-
- it 'contains info about timeout overridden by runner' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['runner_info']).to include({ 'timeout' => 1000 })
- end
- end
-
- context 'when runner specifies bigger timeout' do
- let(:runner) { create(:ci_runner, :project, maximum_timeout: 2000, projects: [project]) }
-
- it 'contains info about timeout not overridden by runner' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['runner_info']).to include({ 'timeout' => 1234 })
- end
- end
- end
- end
- end
-
- describe 'port support' do
- let(:job) { create(:ci_build, pipeline: pipeline, options: options) }
-
- context 'when job image has ports' do
- let(:options) do
- {
- image: {
- name: 'ruby',
- ports: [80]
- },
- services: ['mysql']
- }
- end
-
- it 'returns the image ports' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response).to include(
- 'id' => job.id,
- 'image' => a_hash_including('name' => 'ruby', 'ports' => [{ 'number' => 80, 'protocol' => 'http', 'name' => 'default_port' }]),
- 'services' => all(a_hash_including('name' => 'mysql')))
- end
- end
-
- context 'when job services settings has ports' do
- let(:options) do
- {
- image: 'ruby',
- services: [
- {
- name: 'tomcat',
- ports: [{ number: 8081, protocol: 'http', name: 'custom_port' }]
- }
- ]
- }
- end
-
- it 'returns the service ports' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response).to include(
- 'id' => job.id,
- 'image' => a_hash_including('name' => 'ruby'),
- 'services' => all(a_hash_including('name' => 'tomcat', 'ports' => [{ 'number' => 8081, 'protocol' => 'http', 'name' => 'custom_port' }])))
- end
- end
- end
-
- describe 'a job with excluded artifacts' do
- context 'when excluded paths are defined' do
- let(:job) do
- create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'test',
- stage: 'deploy', stage_idx: 1,
- options: { artifacts: { paths: ['abc'], exclude: ['cde'] } })
- end
-
- context 'when a runner supports this feature' do
- it 'exposes excluded paths when the feature is enabled' do
- stub_feature_flags(ci_artifacts_exclude: true)
-
- request_job info: { features: { artifacts_exclude: true } }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response.dig('artifacts').first).to include('exclude' => ['cde'])
- end
-
- it 'does not expose excluded paths when the feature is disabled' do
- stub_feature_flags(ci_artifacts_exclude: false)
-
- request_job info: { features: { artifacts_exclude: true } }
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response.dig('artifacts').first).not_to have_key('exclude')
- end
- end
-
- context 'when a runner does not support this feature' do
- it 'does not expose the build at all' do
- stub_feature_flags(ci_artifacts_exclude: true)
-
- request_job
-
- expect(response).to have_gitlab_http_status(:no_content)
- end
- end
- end
-
- it 'does not expose excluded paths when these are empty' do
- request_job
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response.dig('artifacts').first).not_to have_key('exclude')
- end
- end
-
- def request_job(token = runner.token, **params)
- new_params = params.merge(token: token, last_update: last_update)
- post api('/jobs/request'), params: new_params.to_json, headers: { 'User-Agent' => user_agent, 'Content-Type': 'application/json' }
- end
- end
-
- context 'for web-ide job' do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :repository) }
-
- let(:runner) { create(:ci_runner, :project, projects: [project]) }
- let(:service) { ::Ci::CreateWebIdeTerminalService.new(project, user, ref: 'master').execute }
- let(:pipeline) { service[:pipeline] }
- let(:build) { pipeline.builds.first }
- let(:job) { {} }
- let(:config_content) do
- 'terminal: { image: ruby, services: [mysql], before_script: [ls], tags: [tag-1], variables: { KEY: value } }'
- end
-
- before do
- stub_webide_config_file(config_content)
- project.add_maintainer(user)
-
- pipeline
- end
-
- context 'when runner has matching tag' do
- before do
- runner.update!(tag_list: ['tag-1'])
- end
-
- it 'successfully picks job' do
- request_job
-
- build.reload
-
- expect(build).to be_running
- expect(build.runner).to eq(runner)
-
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response).to include(
- "id" => build.id,
- "variables" => include("key" => 'KEY', "value" => 'value', "public" => true, "masked" => false),
- "image" => a_hash_including("name" => 'ruby'),
- "services" => all(a_hash_including("name" => 'mysql')),
- "job_info" => a_hash_including("name" => 'terminal', "stage" => 'terminal'))
- end
- end
-
- context 'when runner does not have matching tags' do
- it 'does not pick a job' do
- request_job
-
- build.reload
-
- expect(build).to be_pending
- expect(response).to have_gitlab_http_status(:no_content)
- end
- end
-
- def request_job(token = runner.token, **params)
- post api('/jobs/request'), params: params.merge(token: token)
- end
- end
- end
-
- describe 'PUT /api/v4/jobs/:id' do
- let(:job) do
- create(:ci_build, :pending, :trace_live, pipeline: pipeline, project: project, user: user, runner_id: runner.id)
- end
-
- before do
- job.run!
- end
-
- it_behaves_like 'application context metadata', '/api/:version/jobs/:id' do
- let(:send_request) { update_job(state: 'success') }
- end
-
- it 'updates runner info' do
- expect { update_job(state: 'success') }.to change { runner.reload.contacted_at }
- end
-
- context 'when status is given' do
- it 'mark job as succeeded' do
- update_job(state: 'success')
-
- job.reload
- expect(job).to be_success
- end
-
- it 'mark job as failed' do
- update_job(state: 'failed')
-
- job.reload
- expect(job).to be_failed
- expect(job).to be_unknown_failure
- end
-
- context 'when failure_reason is script_failure' do
- before do
- update_job(state: 'failed', failure_reason: 'script_failure')
- job.reload
- end
-
- it { expect(job).to be_script_failure }
- end
-
- context 'when failure_reason is runner_system_failure' do
- before do
- update_job(state: 'failed', failure_reason: 'runner_system_failure')
- job.reload
- end
-
- it { expect(job).to be_runner_system_failure }
- end
-
- context 'when failure_reason is unrecognized value' do
- before do
- update_job(state: 'failed', failure_reason: 'what_is_this')
- job.reload
- end
-
- it { expect(job).to be_unknown_failure }
- end
-
- context 'when failure_reason is job_execution_timeout' do
- before do
- update_job(state: 'failed', failure_reason: 'job_execution_timeout')
- job.reload
- end
-
- it { expect(job).to be_job_execution_timeout }
- end
-
- context 'when failure_reason is unmet_prerequisites' do
- before do
- update_job(state: 'failed', failure_reason: 'unmet_prerequisites')
- job.reload
- end
-
- it { expect(job).to be_unmet_prerequisites }
- end
- end
-
- context 'when trace is given' do
- it 'creates a trace artifact' do
- allow(BuildFinishedWorker).to receive(:perform_async).with(job.id) do
- ArchiveTraceWorker.new.perform(job.id)
- end
-
- update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
-
- job.reload
- expect(response).to have_gitlab_http_status(:ok)
- expect(job.trace.raw).to eq 'BUILD TRACE UPDATED'
- expect(job.job_artifacts_trace.open.read).to eq 'BUILD TRACE UPDATED'
- end
-
- context 'when concurrent update of trace is happening' do
- before do
- job.trace.write('wb') do
- update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
- end
- end
-
- it 'returns that operation conflicts' do
- expect(response).to have_gitlab_http_status(:conflict)
- end
- end
- end
-
- context 'when no trace is given' do
- it 'does not override trace information' do
- update_job
-
- expect(job.reload.trace.raw).to eq 'BUILD TRACE'
- end
-
- context 'when running state is sent' do
- it 'updates update_at value' do
- expect { update_job_after_time }.to change { job.reload.updated_at }
- end
- end
-
- context 'when other state is sent' do
- it "doesn't update update_at value" do
- expect { update_job_after_time(20.minutes, state: 'success') }.not_to change { job.reload.updated_at }
- end
- end
- end
-
- context 'when job has been erased' do
- let(:job) { create(:ci_build, runner_id: runner.id, erased_at: Time.now) }
-
- it 'responds with forbidden' do
- update_job
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'when job has already been finished' do
- before do
- job.trace.set('Job failed')
- job.drop!(:script_failure)
- end
-
- it 'does not update job status and job trace' do
- update_job(state: 'success', trace: 'BUILD TRACE UPDATED')
-
- job.reload
- expect(response).to have_gitlab_http_status(:forbidden)
- expect(response.header['Job-Status']).to eq 'failed'
- expect(job.trace.raw).to eq 'Job failed'
- expect(job).to be_failed
- end
- end
-
- def update_job(token = job.token, **params)
- new_params = params.merge(token: token)
- put api("/jobs/#{job.id}"), params: new_params
- end
-
- def update_job_after_time(update_interval = 20.minutes, state = 'running')
- Timecop.travel(job.updated_at + update_interval) do
- update_job(job.token, state: state)
- end
- end
- end
-
- describe 'PATCH /api/v4/jobs/:id/trace' do
- let(:job) do
- create(:ci_build, :running, :trace_live,
- project: project, user: user, runner_id: runner.id, pipeline: pipeline)
- end
- let(:headers) { { API::Helpers::Runner::JOB_TOKEN_HEADER => job.token, 'Content-Type' => 'text/plain' } }
- let(:headers_with_range) { headers.merge({ 'Content-Range' => '11-20' }) }
- let(:update_interval) { 10.seconds.to_i }
-
- before do
- initial_patch_the_trace
- end
-
- it_behaves_like 'application context metadata', '/api/:version/jobs/:id/trace' do
- let(:send_request) { patch_the_trace }
- end
-
- it 'updates runner info' do
- runner.update!(contacted_at: 1.year.ago)
-
- expect { patch_the_trace }.to change { runner.reload.contacted_at }
- end
-
- context 'when request is valid' do
- it 'gets correct response' do
- expect(response).to have_gitlab_http_status(:accepted)
- expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
- expect(response.header).to have_key 'Range'
- expect(response.header).to have_key 'Job-Status'
- expect(response.header).to have_key 'X-GitLab-Trace-Update-Interval'
- end
-
- context 'when job has been updated recently' do
- it { expect { patch_the_trace }.not_to change { job.updated_at }}
-
- it "changes the job's trace" do
- patch_the_trace
-
- expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
- end
-
- context 'when Runner makes a force-patch' do
- it { expect { force_patch_the_trace }.not_to change { job.updated_at }}
-
- it "doesn't change the build.trace" do
- force_patch_the_trace
-
- expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
- end
- end
- end
-
- context 'when job was not updated recently' do
- let(:update_interval) { 15.minutes.to_i }
-
- it { expect { patch_the_trace }.to change { job.updated_at } }
-
- it 'changes the job.trace' do
- patch_the_trace
-
- expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
- end
-
- context 'when Runner makes a force-patch' do
- it { expect { force_patch_the_trace }.to change { job.updated_at } }
-
- it "doesn't change the job.trace" do
- force_patch_the_trace
-
- expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
- end
- end
- end
-
- context 'when project for the build has been deleted' do
- let(:job) do
- create(:ci_build, :running, :trace_live, runner_id: runner.id, pipeline: pipeline) do |job|
- job.project.update(pending_delete: true)
- end
- end
-
- it 'responds with forbidden' do
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'when trace is patched' do
- before do
- patch_the_trace
- end
-
- it 'has valid trace' do
- expect(response).to have_gitlab_http_status(:accepted)
- expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
- end
-
- context 'when job is cancelled' do
- before do
- job.cancel
- end
-
- context 'when trace is patched' do
- before do
- patch_the_trace
- end
-
- it 'returns Forbidden ' do
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
- end
-
- context 'when redis data are flushed' do
- before do
- redis_shared_state_cleanup!
- end
-
- it 'has empty trace' do
- expect(job.reload.trace.raw).to eq ''
- end
-
- context 'when we perform partial patch' do
- before do
- patch_the_trace('hello', headers.merge({ 'Content-Range' => "28-32/5" }))
- end
-
- it 'returns an error' do
- expect(response).to have_gitlab_http_status(:range_not_satisfiable)
- expect(response.header['Range']).to eq('0-0')
- end
- end
-
- context 'when we resend full trace' do
- before do
- patch_the_trace('BUILD TRACE appended appended hello', headers.merge({ 'Content-Range' => "0-34/35" }))
- end
-
- it 'succeeds with updating trace' do
- expect(response).to have_gitlab_http_status(:accepted)
- expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended hello'
- end
- end
- end
- end
-
- context 'when concurrent update of trace is happening' do
- before do
- job.trace.write('wb') do
- patch_the_trace
- end
- end
-
- it 'returns that operation conflicts' do
- expect(response).to have_gitlab_http_status(:conflict)
- end
- end
-
- context 'when the job is canceled' do
- before do
- job.cancel
- patch_the_trace
- end
-
- it 'receives status in header' do
- expect(response.header['Job-Status']).to eq 'canceled'
- end
- end
-
- context 'when build trace is being watched' do
- before do
- job.trace.being_watched!
- end
-
- it 'returns X-GitLab-Trace-Update-Interval as 3' do
- patch_the_trace
-
- expect(response).to have_gitlab_http_status(:accepted)
- expect(response.header['X-GitLab-Trace-Update-Interval']).to eq('3')
- end
- end
-
- context 'when build trace is not being watched' do
- it 'returns X-GitLab-Trace-Update-Interval as 30' do
- patch_the_trace
-
- expect(response).to have_gitlab_http_status(:accepted)
- expect(response.header['X-GitLab-Trace-Update-Interval']).to eq('30')
- end
- end
- end
-
- context 'when Runner makes a force-patch' do
- before do
- force_patch_the_trace
- end
-
- it 'gets correct response' do
- expect(response).to have_gitlab_http_status(:accepted)
- expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
- expect(response.header).to have_key 'Range'
- expect(response.header).to have_key 'Job-Status'
- end
- end
-
- context 'when content-range start is too big' do
- let(:headers_with_range) { headers.merge({ 'Content-Range' => '15-20/6' }) }
-
- it 'gets 416 error response with range headers' do
- expect(response).to have_gitlab_http_status(:range_not_satisfiable)
- expect(response.header).to have_key 'Range'
- expect(response.header['Range']).to eq '0-11'
- end
- end
-
- context 'when content-range start is too small' do
- let(:headers_with_range) { headers.merge({ 'Content-Range' => '8-20/13' }) }
-
- it 'gets 416 error response with range headers' do
- expect(response).to have_gitlab_http_status(:range_not_satisfiable)
- expect(response.header).to have_key 'Range'
- expect(response.header['Range']).to eq '0-11'
- end
- end
-
- context 'when Content-Range header is missing' do
- let(:headers_with_range) { headers }
-
- it { expect(response).to have_gitlab_http_status(:bad_request) }
- end
-
- context 'when job has been errased' do
- let(:job) { create(:ci_build, runner_id: runner.id, erased_at: Time.now) }
-
- it { expect(response).to have_gitlab_http_status(:forbidden) }
- end
-
- def patch_the_trace(content = ' appended', request_headers = nil)
- unless request_headers
- job.trace.read do |stream|
- offset = stream.size
- limit = offset + content.length - 1
- request_headers = headers.merge({ 'Content-Range' => "#{offset}-#{limit}" })
- end
- end
-
- Timecop.travel(job.updated_at + update_interval) do
- patch api("/jobs/#{job.id}/trace"), params: content, headers: request_headers
- job.reload
- end
- end
-
- def initial_patch_the_trace
- patch_the_trace(' appended', headers_with_range)
- end
-
- def force_patch_the_trace
- 2.times { patch_the_trace('') }
- end
- end
-
- describe 'artifacts' do
- let(:job) { create(:ci_build, :pending, user: user, project: project, pipeline: pipeline, runner_id: runner.id) }
- let(:jwt) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
- let(:headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => jwt } }
- let(:headers_with_token) { headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.token) }
- let(:file_upload) { fixture_file_upload('spec/fixtures/banana_sample.gif', 'image/gif') }
- let(:file_upload2) { fixture_file_upload('spec/fixtures/dk.png', 'image/gif') }
-
- before do
- stub_artifacts_object_storage
- job.run!
- end
-
- shared_examples_for 'rejecting artifacts that are too large' do
- let(:filesize) { 100.megabytes.to_i }
- let(:sample_max_size) { (filesize / 1.megabyte) - 10 } # Set max size to be smaller than file size to trigger error
-
- shared_examples_for 'failed request' do
- it 'responds with payload too large error' do
- send_request
-
- expect(response).to have_gitlab_http_status(:payload_too_large)
- end
- end
-
- context 'based on plan limit setting' do
- let(:application_max_size) { sample_max_size + 100 }
- let(:limit_name) { "#{Ci::JobArtifact::PLAN_LIMIT_PREFIX}archive" }
-
- before do
- create(:plan_limits, :default_plan, limit_name => sample_max_size)
- stub_application_setting(max_artifacts_size: application_max_size)
- end
-
- context 'and feature flag ci_max_artifact_size_per_type is enabled' do
- before do
- stub_feature_flags(ci_max_artifact_size_per_type: true)
- end
-
- it_behaves_like 'failed request'
- end
-
- context 'and feature flag ci_max_artifact_size_per_type is disabled' do
- before do
- stub_feature_flags(ci_max_artifact_size_per_type: false)
- end
-
- it 'bases of project closest setting' do
- send_request
-
- expect(response).to have_gitlab_http_status(success_code)
- end
- end
- end
-
- context 'based on application setting' do
- before do
- stub_application_setting(max_artifacts_size: sample_max_size)
- end
-
- it_behaves_like 'failed request'
- end
-
- context 'based on root namespace setting' do
- let(:application_max_size) { sample_max_size + 10 }
-
- before do
- stub_application_setting(max_artifacts_size: application_max_size)
- root_namespace.update!(max_artifacts_size: sample_max_size)
- end
-
- it_behaves_like 'failed request'
- end
-
- context 'based on child namespace setting' do
- let(:application_max_size) { sample_max_size + 10 }
- let(:root_namespace_max_size) { sample_max_size + 10 }
-
- before do
- stub_application_setting(max_artifacts_size: application_max_size)
- root_namespace.update!(max_artifacts_size: root_namespace_max_size)
- namespace.update!(max_artifacts_size: sample_max_size)
- end
-
- it_behaves_like 'failed request'
- end
-
- context 'based on project setting' do
- let(:application_max_size) { sample_max_size + 10 }
- let(:root_namespace_max_size) { sample_max_size + 10 }
- let(:child_namespace_max_size) { sample_max_size + 10 }
-
- before do
- stub_application_setting(max_artifacts_size: application_max_size)
- root_namespace.update!(max_artifacts_size: root_namespace_max_size)
- namespace.update!(max_artifacts_size: child_namespace_max_size)
- project.update!(max_artifacts_size: sample_max_size)
- end
-
- it_behaves_like 'failed request'
- end
- end
-
- describe 'POST /api/v4/jobs/:id/artifacts/authorize' do
- context 'when using token as parameter' do
- context 'and the artifact is too large' do
- it_behaves_like 'rejecting artifacts that are too large' do
- let(:success_code) { :ok }
- let(:send_request) { authorize_artifacts_with_token_in_params(filesize: filesize) }
- end
- end
-
- context 'posting artifacts to running job' do
- subject do
- authorize_artifacts_with_token_in_params
- end
-
- it_behaves_like 'application context metadata', '/api/:version/jobs/:id/artifacts/authorize' do
- let(:send_request) { subject }
- end
-
- it 'updates runner info' do
- expect { subject }.to change { runner.reload.contacted_at }
- end
-
- shared_examples 'authorizes local file' do
- it 'succeeds' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
- expect(json_response['TempPath']).to eq(JobArtifactUploader.workhorse_local_upload_path)
- expect(json_response['RemoteObject']).to be_nil
- end
- end
-
- context 'when using local storage' do
- it_behaves_like 'authorizes local file'
- end
-
- context 'when using remote storage' do
- context 'when direct upload is enabled' do
- before do
- stub_artifacts_object_storage(enabled: true, direct_upload: true)
- end
-
- it 'succeeds' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
- expect(json_response).not_to have_key('TempPath')
- expect(json_response['RemoteObject']).to have_key('ID')
- expect(json_response['RemoteObject']).to have_key('GetURL')
- expect(json_response['RemoteObject']).to have_key('StoreURL')
- expect(json_response['RemoteObject']).to have_key('DeleteURL')
- expect(json_response['RemoteObject']).to have_key('MultipartUpload')
- end
- end
-
- context 'when direct upload is disabled' do
- before do
- stub_artifacts_object_storage(enabled: true, direct_upload: false)
- end
-
- it_behaves_like 'authorizes local file'
- end
- end
- end
- end
-
- context 'when using token as header' do
- it 'authorizes posting artifacts to running job' do
- authorize_artifacts_with_token_in_headers
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
- expect(json_response['TempPath']).not_to be_nil
- end
-
- it 'fails to post too large artifact' do
- stub_application_setting(max_artifacts_size: 0)
-
- authorize_artifacts_with_token_in_headers(filesize: 100)
-
- expect(response).to have_gitlab_http_status(:payload_too_large)
- end
- end
-
- context 'when using runners token' do
- it 'fails to authorize artifacts posting' do
- authorize_artifacts(token: job.project.runners_token)
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- it 'reject requests that did not go through gitlab-workhorse' do
- headers.delete(Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER)
-
- authorize_artifacts
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
-
- context 'authorization token is invalid' do
- it 'responds with forbidden' do
- authorize_artifacts(token: 'invalid', filesize: 100 )
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'authorize uploading of an lsif artifact' do
- before do
- stub_feature_flags(code_navigation: job.project)
- end
-
- it 'adds ProcessLsif header' do
- authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['ProcessLsif']).to be_truthy
- end
-
- it 'adds ProcessLsifReferences header' do
- authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['ProcessLsifReferences']).to be_truthy
- end
-
- context 'code_navigation feature flag is disabled' do
- it 'responds with a forbidden error' do
- stub_feature_flags(code_navigation: false)
- authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
-
- aggregate_failures do
- expect(response).to have_gitlab_http_status(:forbidden)
- expect(json_response['ProcessLsif']).to be_falsy
- expect(json_response['ProcessLsifReferences']).to be_falsy
- end
- end
- end
-
- context 'code_navigation_references feature flag is disabled' do
- it 'sets ProcessLsifReferences header to false' do
- stub_feature_flags(code_navigation_references: false)
- authorize_artifacts_with_token_in_headers(artifact_type: :lsif)
-
- aggregate_failures do
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['ProcessLsif']).to be_truthy
- expect(json_response['ProcessLsifReferences']).to be_falsy
- end
- end
- end
- end
-
- def authorize_artifacts(params = {}, request_headers = headers)
- post api("/jobs/#{job.id}/artifacts/authorize"), params: params, headers: request_headers
- end
-
- def authorize_artifacts_with_token_in_params(params = {}, request_headers = headers)
- params = params.merge(token: job.token)
- authorize_artifacts(params, request_headers)
- end
-
- def authorize_artifacts_with_token_in_headers(params = {}, request_headers = headers_with_token)
- authorize_artifacts(params, request_headers)
- end
- end
-
- describe 'POST /api/v4/jobs/:id/artifacts' do
- it_behaves_like 'application context metadata', '/api/:version/jobs/:id/artifacts' do
- let(:send_request) do
- upload_artifacts(file_upload, headers_with_token)
- end
- end
-
- it 'updates runner info' do
- expect { upload_artifacts(file_upload, headers_with_token) }.to change { runner.reload.contacted_at }
- end
-
- context 'when the artifact is too large' do
- it_behaves_like 'rejecting artifacts that are too large' do
- # This filesize validation also happens in non remote stored files,
- # it's just that it's hard to stub the filesize in other cases to be
- # more than a megabyte.
- let!(:fog_connection) do
- stub_artifacts_object_storage(direct_upload: true)
- end
- let(:object) do
- fog_connection.directories.new(key: 'artifacts').files.create(
- key: 'tmp/uploads/12312300',
- body: 'content'
- )
- end
- let(:file_upload) { fog_to_uploaded_file(object) }
- let(:send_request) do
- upload_artifacts(file_upload, headers_with_token, 'file.remote_id' => '12312300')
- end
- let(:success_code) { :created }
-
- before do
- allow(object).to receive(:content_length).and_return(filesize)
- end
- end
- end
-
- context 'when artifacts are being stored inside of tmp path' do
- before do
- # by configuring this path we allow to pass temp file from any path
- allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return('/')
- end
-
- context 'when job has been erased' do
- let(:job) { create(:ci_build, erased_at: Time.now) }
-
- before do
- upload_artifacts(file_upload, headers_with_token)
- end
-
- it 'responds with forbidden' do
- upload_artifacts(file_upload, headers_with_token)
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'when job is running' do
- shared_examples 'successful artifacts upload' do
- it 'updates successfully' do
- expect(response).to have_gitlab_http_status(:created)
- end
- end
-
- context 'when uses accelerated file post' do
- context 'for file stored locally' do
- before do
- upload_artifacts(file_upload, headers_with_token)
- end
-
- it_behaves_like 'successful artifacts upload'
- end
-
- context 'for file stored remotely' do
- let!(:fog_connection) do
- stub_artifacts_object_storage(direct_upload: true)
- end
- let(:object) do
- fog_connection.directories.new(key: 'artifacts').files.create(
- key: 'tmp/uploads/12312300',
- body: 'content'
- )
- end
- let(:file_upload) { fog_to_uploaded_file(object) }
-
- before do
- upload_artifacts(file_upload, headers_with_token, 'file.remote_id' => remote_id)
- end
-
- context 'when valid remote_id is used' do
- let(:remote_id) { '12312300' }
-
- it_behaves_like 'successful artifacts upload'
- end
-
- context 'when invalid remote_id is used' do
- let(:remote_id) { 'invalid id' }
-
- it 'responds with bad request' do
- expect(response).to have_gitlab_http_status(:internal_server_error)
- expect(json_response['message']).to eq("Missing file")
- end
- end
- end
- end
-
- context 'when using runners token' do
- it 'responds with forbidden' do
- upload_artifacts(file_upload, headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token))
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
- end
-
- context 'when artifacts post request does not contain file' do
- it 'fails to post artifacts without file' do
- post api("/jobs/#{job.id}/artifacts"), params: {}, headers: headers_with_token
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
-
- context 'GitLab Workhorse is not configured' do
- it 'fails to post artifacts without GitLab-Workhorse' do
- post api("/jobs/#{job.id}/artifacts"), params: { token: job.token }, headers: {}
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
-
- context 'Is missing GitLab Workhorse token headers' do
- let(:jwt) { JWT.encode({ 'iss' => 'invalid-header' }, Gitlab::Workhorse.secret, 'HS256') }
-
- it 'fails to post artifacts without GitLab-Workhorse' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception).once
-
- upload_artifacts(file_upload, headers_with_token)
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
-
- context 'when setting an expire date' do
- let(:default_artifacts_expire_in) {}
- let(:post_data) do
- { file: file_upload,
- expire_in: expire_in }
- end
-
- before do
- stub_application_setting(default_artifacts_expire_in: default_artifacts_expire_in)
-
- upload_artifacts(file_upload, headers_with_token, post_data)
- end
-
- context 'when an expire_in is given' do
- let(:expire_in) { '7 days' }
-
- it 'updates when specified' do
- expect(response).to have_gitlab_http_status(:created)
- expect(job.reload.artifacts_expire_at).to be_within(5.minutes).of(7.days.from_now)
- end
- end
-
- context 'when no expire_in is given' do
- let(:expire_in) { nil }
-
- it 'ignores if not specified' do
- expect(response).to have_gitlab_http_status(:created)
- expect(job.reload.artifacts_expire_at).to be_nil
- end
-
- context 'with application default' do
- context 'when default is 5 days' do
- let(:default_artifacts_expire_in) { '5 days' }
-
- it 'sets to application default' do
- expect(response).to have_gitlab_http_status(:created)
- expect(job.reload.artifacts_expire_at).to be_within(5.minutes).of(5.days.from_now)
- end
- end
-
- context 'when default is 0' do
- let(:default_artifacts_expire_in) { '0' }
-
- it 'does not set expire_in' do
- expect(response).to have_gitlab_http_status(:created)
- expect(job.reload.artifacts_expire_at).to be_nil
- end
- end
- end
- end
- end
-
- context 'posts artifacts file and metadata file' do
- let!(:artifacts) { file_upload }
- let!(:artifacts_sha256) { Digest::SHA256.file(artifacts.path).hexdigest }
- let!(:metadata) { file_upload2 }
- let!(:metadata_sha256) { Digest::SHA256.file(metadata.path).hexdigest }
-
- let(:stored_artifacts_file) { job.reload.artifacts_file }
- let(:stored_metadata_file) { job.reload.artifacts_metadata }
- let(:stored_artifacts_size) { job.reload.artifacts_size }
- let(:stored_artifacts_sha256) { job.reload.job_artifacts_archive.file_sha256 }
- let(:stored_metadata_sha256) { job.reload.job_artifacts_metadata.file_sha256 }
- let(:file_keys) { post_data.keys }
- let(:send_rewritten_field) { true }
-
- before do
- workhorse_finalize_with_multiple_files(
- api("/jobs/#{job.id}/artifacts"),
- method: :post,
- file_keys: file_keys,
- params: post_data,
- headers: headers_with_token,
- send_rewritten_field: send_rewritten_field
- )
- end
-
- context 'when posts data accelerated by workhorse is correct' do
- let(:post_data) { { file: artifacts, metadata: metadata } }
-
- it 'stores artifacts and artifacts metadata' do
- expect(response).to have_gitlab_http_status(:created)
- expect(stored_artifacts_file.filename).to eq(artifacts.original_filename)
- expect(stored_metadata_file.filename).to eq(metadata.original_filename)
- expect(stored_artifacts_size).to eq(artifacts.size)
- expect(stored_artifacts_sha256).to eq(artifacts_sha256)
- expect(stored_metadata_sha256).to eq(metadata_sha256)
- end
- end
-
- context 'with a malicious file.path param' do
- let(:post_data) { {} }
- let(:tmp_file) { Tempfile.new('crafted.file.path') }
- let(:url) { "/jobs/#{job.id}/artifacts?file.path=#{tmp_file.path}" }
-
- it 'rejects the request' do
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(stored_artifacts_size).to be_nil
- end
- end
-
- context 'when workhorse header is missing' do
- let(:post_data) { { file: artifacts, metadata: metadata } }
- let(:send_rewritten_field) { false }
-
- it 'rejects the request' do
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(stored_artifacts_size).to be_nil
- end
- end
-
- context 'when there is no artifacts file in post data' do
- let(:post_data) do
- { metadata: metadata }
- end
-
- it 'is expected to respond with bad request' do
- expect(response).to have_gitlab_http_status(:bad_request)
- end
-
- it 'does not store metadata' do
- expect(stored_metadata_file).to be_nil
- end
- end
- end
-
- context 'when artifact_type is archive' do
- context 'when artifact_format is zip' do
- let(:params) { { artifact_type: :archive, artifact_format: :zip } }
-
- it 'stores junit test report' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:created)
- expect(job.reload.job_artifacts_archive).not_to be_nil
- end
- end
-
- context 'when artifact_format is gzip' do
- let(:params) { { artifact_type: :archive, artifact_format: :gzip } }
-
- it 'returns an error' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(job.reload.job_artifacts_archive).to be_nil
- end
- end
- end
-
- context 'when artifact_type is junit' do
- context 'when artifact_format is gzip' do
- let(:file_upload) { fixture_file_upload('spec/fixtures/junit/junit.xml.gz') }
- let(:params) { { artifact_type: :junit, artifact_format: :gzip } }
-
- it 'stores junit test report' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:created)
- expect(job.reload.job_artifacts_junit).not_to be_nil
- end
- end
-
- context 'when artifact_format is raw' do
- let(:file_upload) { fixture_file_upload('spec/fixtures/junit/junit.xml.gz') }
- let(:params) { { artifact_type: :junit, artifact_format: :raw } }
-
- it 'returns an error' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(job.reload.job_artifacts_junit).to be_nil
- end
- end
- end
-
- context 'when artifact_type is metrics_referee' do
- context 'when artifact_format is gzip' do
- let(:file_upload) { fixture_file_upload('spec/fixtures/referees/metrics_referee.json.gz') }
- let(:params) { { artifact_type: :metrics_referee, artifact_format: :gzip } }
-
- it 'stores metrics_referee data' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:created)
- expect(job.reload.job_artifacts_metrics_referee).not_to be_nil
- end
- end
-
- context 'when artifact_format is raw' do
- let(:file_upload) { fixture_file_upload('spec/fixtures/referees/metrics_referee.json.gz') }
- let(:params) { { artifact_type: :metrics_referee, artifact_format: :raw } }
-
- it 'returns an error' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(job.reload.job_artifacts_metrics_referee).to be_nil
- end
- end
- end
-
- context 'when artifact_type is network_referee' do
- context 'when artifact_format is gzip' do
- let(:file_upload) { fixture_file_upload('spec/fixtures/referees/network_referee.json.gz') }
- let(:params) { { artifact_type: :network_referee, artifact_format: :gzip } }
-
- it 'stores network_referee data' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:created)
- expect(job.reload.job_artifacts_network_referee).not_to be_nil
- end
- end
-
- context 'when artifact_format is raw' do
- let(:file_upload) { fixture_file_upload('spec/fixtures/referees/network_referee.json.gz') }
- let(:params) { { artifact_type: :network_referee, artifact_format: :raw } }
-
- it 'returns an error' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(job.reload.job_artifacts_network_referee).to be_nil
- end
- end
- end
-
- context 'when artifact_type is dotenv' do
- context 'when artifact_format is gzip' do
- let(:file_upload) { fixture_file_upload('spec/fixtures/build.env.gz') }
- let(:params) { { artifact_type: :dotenv, artifact_format: :gzip } }
-
- it 'stores dotenv file' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:created)
- expect(job.reload.job_artifacts_dotenv).not_to be_nil
- end
-
- it 'parses dotenv file' do
- expect do
- upload_artifacts(file_upload, headers_with_token, params)
- end.to change { job.job_variables.count }.from(0).to(2)
- end
-
- context 'when parse error happens' do
- let(:file_upload) { fixture_file_upload('spec/fixtures/ci_build_artifacts_metadata.gz') }
-
- it 'returns an error' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']).to eq('Invalid Format')
- end
- end
- end
-
- context 'when artifact_format is raw' do
- let(:file_upload) { fixture_file_upload('spec/fixtures/build.env.gz') }
- let(:params) { { artifact_type: :dotenv, artifact_format: :raw } }
-
- it 'returns an error' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(job.reload.job_artifacts_dotenv).to be_nil
- end
- end
- end
- end
-
- context 'when artifacts already exist for the job' do
- let(:params) do
- {
- artifact_type: :archive,
- artifact_format: :zip,
- 'file.sha256' => uploaded_sha256
- }
- end
-
- let(:existing_sha256) { '0' * 64 }
-
- let!(:existing_artifact) do
- create(:ci_job_artifact, :archive, file_sha256: existing_sha256, job: job)
- end
-
- context 'when sha256 is the same of the existing artifact' do
- let(:uploaded_sha256) { existing_sha256 }
-
- it 'ignores the new artifact' do
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:created)
- expect(job.reload.job_artifacts_archive).to eq(existing_artifact)
- end
- end
-
- context 'when sha256 is different than the existing artifact' do
- let(:uploaded_sha256) { '1' * 64 }
-
- it 'logs and returns an error' do
- expect(Gitlab::ErrorTracking).to receive(:track_exception)
-
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(job.reload.job_artifacts_archive).to eq(existing_artifact)
- end
- end
- end
-
- context 'when object storage throws errors' do
- let(:params) { { artifact_type: :archive, artifact_format: :zip } }
-
- it 'does not store artifacts' do
- allow_next_instance_of(JobArtifactUploader) do |uploader|
- allow(uploader).to receive(:store!).and_raise(Errno::EIO)
- end
-
- upload_artifacts(file_upload, headers_with_token, params)
-
- expect(response).to have_gitlab_http_status(:service_unavailable)
- expect(job.reload.job_artifacts_archive).to be_nil
- end
- end
-
- context 'when artifacts are being stored outside of tmp path' do
- let(:new_tmpdir) { Dir.mktmpdir }
-
- before do
- # init before overwriting tmp dir
- file_upload
-
- # by configuring this path we allow to pass file from @tmpdir only
- # but all temporary files are stored in system tmp directory
- allow(Dir).to receive(:tmpdir).and_return(new_tmpdir)
- end
-
- after do
- FileUtils.remove_entry(new_tmpdir)
- end
-
- it 'fails to post artifacts for outside of tmp path' do
- upload_artifacts(file_upload, headers_with_token)
-
- expect(response).to have_gitlab_http_status(:bad_request)
- end
- end
-
- def upload_artifacts(file, headers = {}, params = {})
- workhorse_finalize(
- api("/jobs/#{job.id}/artifacts"),
- method: :post,
- file_key: :file,
- params: params.merge(file: file),
- headers: headers,
- send_rewritten_field: true
- )
- end
- end
-
- describe 'GET /api/v4/jobs/:id/artifacts' do
- let(:token) { job.token }
-
- it_behaves_like 'application context metadata', '/api/:version/jobs/:id/artifacts' do
- let(:send_request) { download_artifact }
- end
-
- it 'updates runner info' do
- expect { download_artifact }.to change { runner.reload.contacted_at }
- end
-
- context 'when job has artifacts' do
- let(:job) { create(:ci_build) }
- let(:store) { JobArtifactUploader::Store::LOCAL }
-
- before do
- create(:ci_job_artifact, :archive, file_store: store, job: job)
- end
-
- context 'when using job token' do
- context 'when artifacts are stored locally' do
- let(:download_headers) do
- { 'Content-Transfer-Encoding' => 'binary',
- 'Content-Disposition' => %q(attachment; filename="ci_build_artifacts.zip"; filename*=UTF-8''ci_build_artifacts.zip) }
- end
-
- before do
- download_artifact
- end
-
- it 'download artifacts' do
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.headers.to_h).to include download_headers
- end
- end
-
- context 'when artifacts are stored remotely' do
- let(:store) { JobArtifactUploader::Store::REMOTE }
- let!(:job) { create(:ci_build) }
-
- context 'when proxy download is being used' do
- before do
- download_artifact(direct_download: false)
- end
-
- it 'uses workhorse send-url' do
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.headers.to_h).to include(
- 'Gitlab-Workhorse-Send-Data' => /send-url:/)
- end
- end
-
- context 'when direct download is being used' do
- before do
- download_artifact(direct_download: true)
- end
-
- it 'receive redirect for downloading artifacts' do
- expect(response).to have_gitlab_http_status(:found)
- expect(response.headers).to include('Location')
- end
- end
- end
- end
-
- context 'when using runnners token' do
- let(:token) { job.project.runners_token }
-
- before do
- download_artifact
- end
-
- it 'responds with forbidden' do
- expect(response).to have_gitlab_http_status(:forbidden)
- end
- end
- end
-
- context 'when job does not have artifacts' do
- it 'responds with not found' do
- download_artifact
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- def download_artifact(params = {}, request_headers = headers)
- params = params.merge(token: token)
- job.reload
-
- get api("/jobs/#{job.id}/artifacts"), params: params, headers: request_headers
- end
- end
- end
- end
-end
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index 724e3177173..21ff0a94db9 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -329,6 +329,7 @@ RSpec.describe API::Commits do
]
}
end
+
let(:valid_c_params) do
{
branch: 'master',
@@ -342,6 +343,7 @@ RSpec.describe API::Commits do
]
}
end
+
let(:valid_utf8_c_params) do
{
branch: 'master',
@@ -621,6 +623,7 @@ RSpec.describe API::Commits do
]
}
end
+
let(:valid_d_params) do
{
branch: 'markdown',
@@ -664,6 +667,7 @@ RSpec.describe API::Commits do
]
}
end
+
let(:valid_m_params) do
{
branch: 'feature',
@@ -708,6 +712,7 @@ RSpec.describe API::Commits do
]
}
end
+
let(:valid_u_params) do
{
branch: 'master',
@@ -819,6 +824,7 @@ RSpec.describe API::Commits do
]
}
end
+
let(:valid_mo_params) do
{
branch: 'master',
@@ -1462,6 +1468,16 @@ RSpec.describe API::Commits do
expect(json_response['author_name']).to eq(commit.author_name)
expect(json_response['committer_name']).to eq(user.name)
end
+
+ it 'supports dry-run without applying changes' do
+ head = project.commit(branch)
+
+ post api(route, current_user), params: { branch: branch, dry_run: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq("dry_run" => "success")
+ expect(project.commit(branch)).to eq(head)
+ end
end
context 'when repository is disabled' do
@@ -1533,6 +1549,14 @@ RSpec.describe API::Commits do
expect(json_response['error_code']).to eq 'empty'
end
+
+ it 'includes an additional dry_run error field when enabled' do
+ post api(route, current_user), params: { branch: 'markdown', dry_run: true }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error_code']).to eq 'empty'
+ expect(json_response['dry_run']).to eq 'error'
+ end
end
context 'when ref contains a dot' do
@@ -1623,6 +1647,16 @@ RSpec.describe API::Commits do
expect(json_response['committer_name']).to eq(user.name)
expect(json_response['parent_ids']).to contain_exactly(commit_id)
end
+
+ it 'supports dry-run without applying changes' do
+ head = project.commit(branch)
+
+ post api(route, current_user), params: { branch: branch, dry_run: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to eq("dry_run" => "success")
+ expect(project.commit(branch)).to eq(head)
+ end
end
context 'when repository is disabled' do
@@ -1704,6 +1738,18 @@ RSpec.describe API::Commits do
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['error_code']).to eq 'empty'
end
+
+ it 'includes an additional dry_run error field when enabled' do
+ # First one actually reverts
+ post api(route, current_user), params: { branch: 'markdown' }
+
+ # Second one is redundant and should be empty
+ post api(route, current_user), params: { branch: 'markdown', dry_run: true }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error_code']).to eq 'empty'
+ expect(json_response['dry_run']).to eq 'error'
+ end
end
end
diff --git a/spec/requests/api/composer_packages_spec.rb b/spec/requests/api/composer_packages_spec.rb
index d756a7700f6..f5b8ebb545b 100644
--- a/spec/requests/api/composer_packages_spec.rb
+++ b/spec/requests/api/composer_packages_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe API::ComposerPackages do
- include PackagesManagerApiSpecHelpers
+ include HttpBasicAuthHelpers
let_it_be(:user) { create(:user) }
let_it_be(:group, reload: true) { create(:group, :public) }
@@ -11,47 +11,88 @@ RSpec.describe API::ComposerPackages do
let_it_be(:project, reload: true) { create(:project, :custom_repo, files: { 'composer.json' => { name: package_name }.to_json }, group: group) }
let(:headers) { {} }
+ using RSpec::Parameterized::TableSyntax
+
describe 'GET /api/v4/group/:id/-/packages/composer/packages' do
let(:url) { "/group/#{group.id}/-/packages/composer/packages.json" }
subject { get api(url), headers: headers }
- context 'without the need for a license' do
- context 'with valid project' do
- let!(:package) { create(:composer_package, :with_metadatum, project: project) }
+ context 'with valid project' do
+ let!(:package) { create(:composer_package, :with_metadatum, project: project) }
- using RSpec::Parameterized::TableSyntax
+ context 'with a public group' do
+ before do
+ group.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ end
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'Composer package index' | :success
- 'PUBLIC' | :guest | true | true | 'Composer package index' | :success
- 'PUBLIC' | :developer | true | false | 'Composer package index' | :success
- 'PUBLIC' | :guest | true | false | 'Composer package index' | :success
- 'PUBLIC' | :developer | false | true | 'Composer package index' | :success
- 'PUBLIC' | :guest | false | true | 'Composer package index' | :success
- 'PUBLIC' | :developer | false | false | 'Composer package index' | :success
- 'PUBLIC' | :guest | false | false | 'Composer package index' | :success
- 'PUBLIC' | :anonymous | false | true | 'Composer package index' | :success
- 'PRIVATE' | :developer | true | true | 'Composer package index' | :success
- 'PRIVATE' | :guest | true | true | 'Composer package index' | :success
- 'PRIVATE' | :developer | true | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | true | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :developer | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | false | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :anonymous | false | true | 'process Composer api request' | :not_found
+ where(:project_visibility_level, :user_role, :member, :user_token, :include_package) do
+ 'PUBLIC' | :developer | true | true | :include_package
+ 'PUBLIC' | :developer | true | false | :include_package
+ 'PUBLIC' | :developer | false | false | :include_package
+ 'PUBLIC' | :developer | false | true | :include_package
+ 'PUBLIC' | :guest | true | true | :include_package
+ 'PUBLIC' | :guest | true | false | :include_package
+ 'PUBLIC' | :guest | false | true | :include_package
+ 'PUBLIC' | :guest | false | false | :include_package
+ 'PUBLIC' | :anonymous | false | true | :include_package
+ 'PRIVATE' | :developer | true | true | :include_package
+ 'PRIVATE' | :developer | true | false | :does_not_include_package
+ 'PRIVATE' | :developer | false | true | :does_not_include_package
+ 'PRIVATE' | :developer | false | false | :does_not_include_package
+ 'PRIVATE' | :guest | true | true | :does_not_include_package
+ 'PRIVATE' | :guest | true | false | :does_not_include_package
+ 'PRIVATE' | :guest | false | true | :does_not_include_package
+ 'PRIVATE' | :guest | false | false | :does_not_include_package
+ 'PRIVATE' | :anonymous | false | true | :does_not_include_package
end
with_them do
- include_context 'Composer api group access', params[:project_visibility_level], params[:user_role], params[:user_token] do
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ include_context 'Composer api project access', params[:project_visibility_level], params[:user_role], params[:user_token] do
+ it_behaves_like 'Composer package index', params[:user_role], :success, params[:member], params[:include_package]
end
end
end
- it_behaves_like 'rejects Composer access with unknown group id'
+ context 'with a private group' do
+ before do
+ group.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ end
+
+ context 'with access to the api' do
+ where(:project_visibility_level, :user_role, :member, :user_token, :include_package) do
+ 'PRIVATE' | :developer | true | true | :include_package
+ 'PRIVATE' | :guest | true | true | :does_not_include_package
+ end
+
+ with_them do
+ include_context 'Composer api project access', params[:project_visibility_level], params[:user_role], params[:user_token] do
+ it_behaves_like 'Composer package index', params[:user_role], :success, params[:member], params[:include_package]
+ end
+ end
+ end
+
+ context 'without access to the api' do
+ where(:project_visibility_level, :user_role, :member, :user_token) do
+ 'PRIVATE' | :developer | true | false
+ 'PRIVATE' | :developer | false | true
+ 'PRIVATE' | :developer | false | false
+ 'PRIVATE' | :guest | true | false
+ 'PRIVATE' | :guest | false | true
+ 'PRIVATE' | :guest | false | false
+ 'PRIVATE' | :anonymous | false | true
+ end
+
+ with_them do
+ include_context 'Composer api project access', params[:project_visibility_level], params[:user_role], params[:user_token] do
+ it_behaves_like 'process Composer api request', params[:user_role], :not_found, params[:member]
+ end
+ end
+ end
+ end
end
+
+ it_behaves_like 'rejects Composer access with unknown group id'
end
describe 'GET /api/v4/group/:id/-/packages/composer/p/:sha.json' do
@@ -61,40 +102,36 @@ RSpec.describe API::ComposerPackages do
subject { get api(url), headers: headers }
- context 'without the need for a license' do
- context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'Composer provider index' | :success
- 'PUBLIC' | :guest | true | true | 'Composer provider index' | :success
- 'PUBLIC' | :developer | true | false | 'Composer provider index' | :success
- 'PUBLIC' | :guest | true | false | 'Composer provider index' | :success
- 'PUBLIC' | :developer | false | true | 'Composer provider index' | :success
- 'PUBLIC' | :guest | false | true | 'Composer provider index' | :success
- 'PUBLIC' | :developer | false | false | 'Composer provider index' | :success
- 'PUBLIC' | :guest | false | false | 'Composer provider index' | :success
- 'PUBLIC' | :anonymous | false | true | 'Composer provider index' | :success
- 'PRIVATE' | :developer | true | true | 'Composer provider index' | :success
- 'PRIVATE' | :guest | true | true | 'Composer empty provider index' | :success
- 'PRIVATE' | :developer | true | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | true | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :developer | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | false | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :anonymous | false | true | 'process Composer api request' | :not_found
- end
+ context 'with valid project' do
+ where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'Composer provider index' | :success
+ 'PUBLIC' | :developer | true | false | 'Composer provider index' | :success
+ 'PUBLIC' | :developer | false | true | 'Composer provider index' | :success
+ 'PUBLIC' | :developer | false | false | 'Composer provider index' | :success
+ 'PUBLIC' | :guest | true | true | 'Composer provider index' | :success
+ 'PUBLIC' | :guest | true | false | 'Composer provider index' | :success
+ 'PUBLIC' | :guest | false | true | 'Composer provider index' | :success
+ 'PUBLIC' | :guest | false | false | 'Composer provider index' | :success
+ 'PUBLIC' | :anonymous | false | true | 'Composer provider index' | :success
+ 'PRIVATE' | :developer | true | true | 'Composer provider index' | :success
+ 'PRIVATE' | :developer | true | false | 'process Composer api request' | :not_found
+ 'PRIVATE' | :developer | false | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | :developer | false | false | 'process Composer api request' | :not_found
+ 'PRIVATE' | :guest | true | true | 'Composer empty provider index' | :success
+ 'PRIVATE' | :guest | true | false | 'process Composer api request' | :not_found
+ 'PRIVATE' | :guest | false | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | :guest | false | false | 'process Composer api request' | :not_found
+ 'PRIVATE' | :anonymous | false | true | 'process Composer api request' | :not_found
+ end
- with_them do
- include_context 'Composer api group access', params[:project_visibility_level], params[:user_role], params[:user_token] do
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
- end
+ with_them do
+ include_context 'Composer api group access', params[:project_visibility_level], params[:user_role], params[:user_token] do
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
end
-
- it_behaves_like 'rejects Composer access with unknown group id'
end
+
+ it_behaves_like 'rejects Composer access with unknown group id'
end
describe 'GET /api/v4/group/:id/-/packages/composer/*package_name.json' do
@@ -103,48 +140,44 @@ RSpec.describe API::ComposerPackages do
subject { get api(url), headers: headers }
- context 'without the need for a license' do
- context 'with no packages' do
- include_context 'Composer user type', :developer, true do
- it_behaves_like 'returning response status', :not_found
- end
+ context 'with no packages' do
+ include_context 'Composer user type', :developer, true do
+ it_behaves_like 'returning response status', :not_found
end
+ end
- context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
- let!(:package) { create(:composer_package, :with_metadatum, name: package_name, project: project) }
-
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'Composer package api request' | :success
- 'PUBLIC' | :guest | true | true | 'Composer package api request' | :success
- 'PUBLIC' | :developer | true | false | 'Composer package api request' | :success
- 'PUBLIC' | :guest | true | false | 'Composer package api request' | :success
- 'PUBLIC' | :developer | false | true | 'Composer package api request' | :success
- 'PUBLIC' | :guest | false | true | 'Composer package api request' | :success
- 'PUBLIC' | :developer | false | false | 'Composer package api request' | :success
- 'PUBLIC' | :guest | false | false | 'Composer package api request' | :success
- 'PUBLIC' | :anonymous | false | true | 'Composer package api request' | :success
- 'PRIVATE' | :developer | true | true | 'Composer package api request' | :success
- 'PRIVATE' | :guest | true | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :developer | true | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | true | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :developer | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | false | false | 'process Composer api request' | :not_found
- 'PRIVATE' | :anonymous | false | true | 'process Composer api request' | :not_found
- end
+ context 'with valid project' do
+ let!(:package) { create(:composer_package, :with_metadatum, name: package_name, project: project) }
+
+ where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'Composer package api request' | :success
+ 'PUBLIC' | :developer | true | false | 'Composer package api request' | :success
+ 'PUBLIC' | :developer | false | true | 'Composer package api request' | :success
+ 'PUBLIC' | :developer | false | false | 'Composer package api request' | :success
+ 'PUBLIC' | :guest | true | true | 'Composer package api request' | :success
+ 'PUBLIC' | :guest | true | false | 'Composer package api request' | :success
+ 'PUBLIC' | :guest | false | true | 'Composer package api request' | :success
+ 'PUBLIC' | :guest | false | false | 'Composer package api request' | :success
+ 'PUBLIC' | :anonymous | false | true | 'Composer package api request' | :success
+ 'PRIVATE' | :developer | true | true | 'Composer package api request' | :success
+ 'PRIVATE' | :developer | true | false | 'process Composer api request' | :not_found
+ 'PRIVATE' | :developer | false | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | :developer | false | false | 'process Composer api request' | :not_found
+ 'PRIVATE' | :guest | true | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | :guest | true | false | 'process Composer api request' | :not_found
+ 'PRIVATE' | :guest | false | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | :guest | false | false | 'process Composer api request' | :not_found
+ 'PRIVATE' | :anonymous | false | true | 'process Composer api request' | :not_found
+ end
- with_them do
- include_context 'Composer api group access', params[:project_visibility_level], params[:user_role], params[:user_token] do
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
- end
+ with_them do
+ include_context 'Composer api group access', params[:project_visibility_level], params[:user_role], params[:user_token] do
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
end
-
- it_behaves_like 'rejects Composer access with unknown group id'
end
+
+ it_behaves_like 'rejects Composer access with unknown group id'
end
describe 'POST /api/v4/projects/:id/packages/composer' do
@@ -158,44 +191,40 @@ RSpec.describe API::ComposerPackages do
subject { post api(url), headers: headers, params: params }
shared_examples 'composer package publish' do
- context 'without the need for a license' do
- context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'Composer package creation' | :created
- 'PUBLIC' | :guest | true | true | 'process Composer api request' | :forbidden
- 'PUBLIC' | :developer | true | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :guest | true | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :developer | false | true | 'process Composer api request' | :forbidden
- 'PUBLIC' | :guest | false | true | 'process Composer api request' | :forbidden
- 'PUBLIC' | :developer | false | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :guest | false | false | 'process Composer api request' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :developer | true | true | 'Composer package creation' | :created
- 'PRIVATE' | :guest | true | true | 'process Composer api request' | :forbidden
- 'PRIVATE' | :developer | true | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :guest | false | true | 'process Composer api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'process Composer api request' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'process Composer api request' | :unauthorized
- end
+ context 'with valid project' do
+ where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'Composer package creation' | :created
+ 'PUBLIC' | :developer | true | false | 'process Composer api request' | :unauthorized
+ 'PUBLIC' | :developer | false | true | 'process Composer api request' | :forbidden
+ 'PUBLIC' | :developer | false | false | 'process Composer api request' | :unauthorized
+ 'PUBLIC' | :guest | true | true | 'process Composer api request' | :forbidden
+ 'PUBLIC' | :guest | true | false | 'process Composer api request' | :unauthorized
+ 'PUBLIC' | :guest | false | true | 'process Composer api request' | :forbidden
+ 'PUBLIC' | :guest | false | false | 'process Composer api request' | :unauthorized
+ 'PUBLIC' | :anonymous | false | true | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :developer | true | true | 'Composer package creation' | :created
+ 'PRIVATE' | :developer | true | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | :developer | false | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :guest | true | true | 'process Composer api request' | :forbidden
+ 'PRIVATE' | :guest | true | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :guest | false | true | 'process Composer api request' | :not_found
+ 'PRIVATE' | :guest | false | false | 'process Composer api request' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'process Composer api request' | :unauthorized
+ end
- with_them do
- include_context 'Composer api project access', params[:project_visibility_level], params[:user_role], params[:user_token] do
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
- end
+ with_them do
+ include_context 'Composer api project access', params[:project_visibility_level], params[:user_role], params[:user_token] do
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
end
-
- it_behaves_like 'rejects Composer access with unknown project id'
end
+
+ it_behaves_like 'rejects Composer access with unknown project id'
end
context 'with no tag or branch params' do
- let(:headers) { build_basic_auth_header(user.username, personal_access_token.token) }
+ let(:headers) { basic_auth_header(user.username, personal_access_token.token) }
it_behaves_like 'process Composer api request', :developer, :bad_request
end
@@ -209,7 +238,7 @@ RSpec.describe API::ComposerPackages do
context 'with a non existing tag' do
let(:params) { { tag: 'non-existing-tag' } }
- let(:headers) { build_basic_auth_header(user.username, personal_access_token.token) }
+ let(:headers) { basic_auth_header(user.username, personal_access_token.token) }
it_behaves_like 'process Composer api request', :developer, :not_found
end
@@ -224,7 +253,7 @@ RSpec.describe API::ComposerPackages do
context 'with a non existing branch' do
let(:params) { { branch: 'non-existing-branch' } }
- let(:headers) { build_basic_auth_header(user.username, personal_access_token.token) }
+ let(:headers) { basic_auth_header(user.username, personal_access_token.token) }
it_behaves_like 'process Composer api request', :developer, :not_found
end
@@ -238,65 +267,61 @@ RSpec.describe API::ComposerPackages do
subject { get api(url), headers: headers, params: params }
- context 'without the need for a license' do
- context 'with valid project' do
- let!(:package) { create(:composer_package, :with_metadatum, name: package_name, project: project) }
-
- context 'when the sha does not match the package name' do
- let(:sha) { '123' }
+ context 'with valid project' do
+ let!(:package) { create(:composer_package, :with_metadatum, name: package_name, project: project) }
- it_behaves_like 'process Composer api request', :anonymous, :not_found
- end
+ context 'when the sha does not match the package name' do
+ let(:sha) { '123' }
- context 'when the package name does not match the sha' do
- let(:branch) { project.repository.find_branch('master') }
- let(:sha) { branch.target }
- let(:url) { "/projects/#{project.id}/packages/composer/archives/unexisting-package-name.zip" }
+ it_behaves_like 'process Composer api request', :anonymous, :not_found
+ end
- it_behaves_like 'process Composer api request', :anonymous, :not_found
- end
+ context 'when the package name does not match the sha' do
+ let(:branch) { project.repository.find_branch('master') }
+ let(:sha) { branch.target }
+ let(:url) { "/projects/#{project.id}/packages/composer/archives/unexisting-package-name.zip" }
- context 'with a match package name and sha' do
- let(:branch) { project.repository.find_branch('master') }
- let(:sha) { branch.target }
-
- using RSpec::Parameterized::TableSyntax
-
- where(:project_visibility_level, :user_role, :member, :user_token, :expected_status) do
- 'PUBLIC' | :developer | true | true | :success
- 'PUBLIC' | :guest | true | true | :success
- 'PUBLIC' | :developer | true | false | :success
- 'PUBLIC' | :guest | true | false | :success
- 'PUBLIC' | :developer | false | true | :success
- 'PUBLIC' | :guest | false | true | :success
- 'PUBLIC' | :developer | false | false | :success
- 'PUBLIC' | :guest | false | false | :success
- 'PUBLIC' | :anonymous | false | true | :success
- 'PRIVATE' | :developer | true | true | :success
- 'PRIVATE' | :guest | true | true | :success
- 'PRIVATE' | :developer | true | false | :success
- 'PRIVATE' | :guest | true | false | :success
- 'PRIVATE' | :developer | false | true | :success
- 'PRIVATE' | :guest | false | true | :success
- 'PRIVATE' | :developer | false | false | :success
- 'PRIVATE' | :guest | false | false | :success
- 'PRIVATE' | :anonymous | false | true | :success
- end
+ it_behaves_like 'process Composer api request', :anonymous, :not_found
+ end
- with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ context 'with a match package name and sha' do
+ let(:branch) { project.repository.find_branch('master') }
+ let(:sha) { branch.target }
+
+ where(:project_visibility_level, :user_role, :member, :user_token, :expected_status) do
+ 'PUBLIC' | :developer | true | true | :success
+ 'PUBLIC' | :developer | true | false | :success
+ 'PUBLIC' | :developer | false | true | :success
+ 'PUBLIC' | :developer | false | false | :success
+ 'PUBLIC' | :guest | true | true | :success
+ 'PUBLIC' | :guest | true | false | :success
+ 'PUBLIC' | :guest | false | true | :success
+ 'PUBLIC' | :guest | false | false | :success
+ 'PUBLIC' | :anonymous | false | true | :success
+ 'PRIVATE' | :developer | true | true | :success
+ 'PRIVATE' | :developer | true | false | :success
+ 'PRIVATE' | :developer | false | true | :success
+ 'PRIVATE' | :developer | false | false | :success
+ 'PRIVATE' | :guest | true | true | :success
+ 'PRIVATE' | :guest | true | false | :success
+ 'PRIVATE' | :guest | false | true | :success
+ 'PRIVATE' | :guest | false | false | :success
+ 'PRIVATE' | :anonymous | false | true | :success
+ end
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
- end
+ with_them do
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
- it_behaves_like 'process Composer api request', params[:user_role], params[:expected_status], params[:member]
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
end
+
+ it_behaves_like 'process Composer api request', params[:user_role], params[:expected_status], params[:member]
end
end
-
- it_behaves_like 'rejects Composer access with unknown project id'
end
+
+ it_behaves_like 'rejects Composer access with unknown project id'
end
end
diff --git a/spec/requests/api/conan_packages_spec.rb b/spec/requests/api/conan_packages_spec.rb
index 1d88eaef79c..95798b060f1 100644
--- a/spec/requests/api/conan_packages_spec.rb
+++ b/spec/requests/api/conan_packages_spec.rb
@@ -3,6 +3,7 @@ require 'spec_helper'
RSpec.describe API::ConanPackages do
include WorkhorseHelpers
+ include HttpBasicAuthHelpers
include PackagesManagerApiSpecHelpers
let(:package) { create(:conan_package) }
@@ -330,6 +331,18 @@ RSpec.describe API::ConanPackages do
.and_return(presenter)
end
+ shared_examples 'rejects invalid upload_url params' do
+ context 'with unaccepted json format' do
+ let(:params) { %w[foo bar] }
+
+ it 'returns 400' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+ end
+
describe 'GET /api/v4/packages/conan/v1/conans/:package_name/package_version/:package_username/:package_channel' do
let(:recipe_path) { package.conan_recipe_path }
@@ -417,13 +430,14 @@ RSpec.describe API::ConanPackages do
let(:recipe_path) { package.conan_recipe_path }
let(:params) do
- { "conanfile.py": 24,
- "conanmanifext.txt": 123 }
+ { 'conanfile.py': 24,
+ 'conanmanifest.txt': 123 }
end
- subject { post api("/packages/conan/v1/conans/#{recipe_path}/upload_urls"), params: params, headers: headers }
+ subject { post api("/packages/conan/v1/conans/#{recipe_path}/upload_urls"), params: params.to_json, headers: headers }
it_behaves_like 'rejects invalid recipe'
+ it_behaves_like 'rejects invalid upload_url params'
it 'returns a set of upload urls for the files requested' do
subject
@@ -435,20 +449,58 @@ RSpec.describe API::ConanPackages do
expect(response.body).to eq(expected_response.to_json)
end
+
+ context 'with conan_sources and conan_export files' do
+ let(:params) do
+ { 'conan_sources.tgz': 345,
+ 'conan_export.tgz': 234,
+ 'conanmanifest.txt': 123 }
+ end
+
+ it 'returns upload urls for the additional files' do
+ subject
+
+ expected_response = {
+ 'conan_sources.tgz': "#{Settings.gitlab.base_url}/api/v4/packages/conan/v1/files/#{package.conan_recipe_path}/0/export/conan_sources.tgz",
+ 'conan_export.tgz': "#{Settings.gitlab.base_url}/api/v4/packages/conan/v1/files/#{package.conan_recipe_path}/0/export/conan_export.tgz",
+ 'conanmanifest.txt': "#{Settings.gitlab.base_url}/api/v4/packages/conan/v1/files/#{package.conan_recipe_path}/0/export/conanmanifest.txt"
+ }
+
+ expect(response.body).to eq(expected_response.to_json)
+ end
+ end
+
+ context 'with an invalid file' do
+ let(:params) do
+ { 'invalid_file.txt': 10,
+ 'conanmanifest.txt': 123 }
+ end
+
+ it 'does not return the invalid file as an upload_url' do
+ subject
+
+ expected_response = {
+ 'conanmanifest.txt': "#{Settings.gitlab.base_url}/api/v4/packages/conan/v1/files/#{package.conan_recipe_path}/0/export/conanmanifest.txt"
+ }
+
+ expect(response.body).to eq(expected_response.to_json)
+ end
+ end
end
describe 'POST /api/v4/packages/conan/v1/conans/:package_name/package_version/:package_username/:package_channel/packages/:conan_package_reference/upload_urls' do
let(:recipe_path) { package.conan_recipe_path }
let(:params) do
- { "conaninfo.txt": 24,
- "conanmanifext.txt": 123,
- "conan_package.tgz": 523 }
+ { 'conaninfo.txt': 24,
+ 'conanmanifest.txt': 123,
+ 'conan_package.tgz': 523 }
end
- subject { post api("/packages/conan/v1/conans/#{recipe_path}/packages/123456789/upload_urls"), params: params, headers: headers }
+ subject { post api("/packages/conan/v1/conans/#{recipe_path}/packages/123456789/upload_urls"), params: params.to_json, headers: headers }
it_behaves_like 'rejects invalid recipe'
+ it_behaves_like 'rejects invalid upload_url params'
it 'returns a set of upload urls for the files requested' do
expected_response = {
@@ -461,6 +513,23 @@ RSpec.describe API::ConanPackages do
expect(response.body).to eq(expected_response.to_json)
end
+
+ context 'with invalid files' do
+ let(:params) do
+ { 'conaninfo.txt': 24,
+ 'invalid_file.txt': 10 }
+ end
+
+ it 'returns upload urls only for the valid requested files' do
+ expected_response = {
+ 'conaninfo.txt': "#{Settings.gitlab.base_url}/api/v4/packages/conan/v1/files/#{package.conan_recipe_path}/0/package/123456789/0/conaninfo.txt"
+ }
+
+ subject
+
+ expect(response.body).to eq(expected_response.to_json)
+ end
+ end
end
describe 'DELETE /api/v4/packages/conan/v1/conans/:package_name/package_version/:package_username/:package_channel' do
diff --git a/spec/requests/api/deploy_keys_spec.rb b/spec/requests/api/deploy_keys_spec.rb
index 81cef653770..591d994fec9 100644
--- a/spec/requests/api/deploy_keys_spec.rb
+++ b/spec/requests/api/deploy_keys_spec.rb
@@ -165,6 +165,7 @@ RSpec.describe API::DeployKeys do
let(:deploy_keys_project) do
create(:deploy_keys_project, project: project, deploy_key: deploy_key)
end
+
let(:extra_params) { { title: 'new title', can_push: true } }
it 'updates the title of the deploy key' do
diff --git a/spec/requests/api/files_spec.rb b/spec/requests/api/files_spec.rb
index b50f63ed67c..d7571ad4bff 100644
--- a/spec/requests/api/files_spec.rb
+++ b/spec/requests/api/files_spec.rb
@@ -10,12 +10,14 @@ RSpec.describe API::Files do
let(:guest) { create(:user) { |u| project.add_guest(u) } }
let(:file_path) { "files%2Fruby%2Fpopen%2Erb" }
let(:rouge_file_path) { "%2e%2e%2f" }
+ let(:absolute_path) { "%2Fetc%2Fpasswd.rb" }
let(:invalid_file_message) { 'file_path should be a valid file path' }
let(:params) do
{
ref: 'master'
}
end
+
let(:author_email) { 'user@example.org' }
let(:author_name) { 'John Doe' }
@@ -57,6 +59,18 @@ RSpec.describe API::Files do
end
end
+ shared_examples 'when path is absolute' do
+ it 'returns 400 when file path is absolute' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+
+ if response.body.present?
+ expect(json_response['error']).to eq(invalid_file_message)
+ end
+ end
+ end
+
describe "HEAD /projects/:id/repository/files/:file_path" do
shared_examples_for 'repository files' do
it 'returns 400 when file path is invalid' do
@@ -65,6 +79,10 @@ RSpec.describe API::Files do
expect(response).to have_gitlab_http_status(:bad_request)
end
+ it_behaves_like 'when path is absolute' do
+ subject { head api(route(absolute_path), current_user), params: params }
+ end
+
it 'returns file attributes in headers' do
head api(route(file_path), current_user), params: params
@@ -165,6 +183,10 @@ RSpec.describe API::Files do
expect(json_response['error']).to eq(invalid_file_message)
end
+ it_behaves_like 'when path is absolute' do
+ subject { get api(route(absolute_path), api_user), params: params }
+ end
+
it 'returns file attributes as json' do
get api(route(file_path), api_user), params: params
@@ -350,6 +372,10 @@ RSpec.describe API::Files do
expect(json_response['error']).to eq(invalid_file_message)
end
+ it_behaves_like 'when path is absolute' do
+ subject { get api(route(absolute_path) + '/blame', current_user), params: params }
+ end
+
it 'returns blame file attributes as json' do
get api(route(file_path) + '/blame', current_user), params: params
@@ -473,6 +499,10 @@ RSpec.describe API::Files do
expect(json_response['error']).to eq(invalid_file_message)
end
+ it_behaves_like 'when path is absolute' do
+ subject { get api(route(absolute_path) + '/raw', current_user), params: params }
+ end
+
it 'returns raw file info' do
url = route(file_path) + "/raw"
expect(Gitlab::Workhorse).to receive(:send_git_blob)
@@ -597,6 +627,10 @@ RSpec.describe API::Files do
expect(json_response['error']).to eq(invalid_file_message)
end
+ it_behaves_like 'when path is absolute' do
+ subject { post api(route(absolute_path), user), params: params }
+ end
+
it "creates a new file in project repo" do
post api(route(file_path), user), params: params
@@ -735,6 +769,17 @@ RSpec.describe API::Files do
expect(json_response['error']).to eq(invalid_file_message)
end
+ it_behaves_like 'when path is absolute' do
+ let(:last_commit) do
+ Gitlab::Git::Commit
+ .last_for_path(project.repository, 'master', URI.unescape(file_path))
+ end
+
+ let(:params_with_correct_id) { params.merge(last_commit_id: last_commit.id) }
+
+ subject { put api(route(absolute_path), user), params: params_with_correct_id }
+ end
+
it "returns a 400 bad request if no params given" do
put api(route(file_path), user)
@@ -770,6 +815,10 @@ RSpec.describe API::Files do
expect(json_response['error']).to eq(invalid_file_message)
end
+ it_behaves_like 'when path is absolute' do
+ subject { delete api(route(absolute_path), user), params: params }
+ end
+
it "deletes existing file in project repo" do
delete api(route(file_path), user), params: params
@@ -821,6 +870,7 @@ RSpec.describe API::Files do
encoding: 'base64'
}
end
+
let(:get_params) do
{
ref: 'master'
diff --git a/spec/requests/api/go_proxy_spec.rb b/spec/requests/api/go_proxy_spec.rb
index 91e455dac19..2d7e319b0be 100644
--- a/spec/requests/api/go_proxy_spec.rb
+++ b/spec/requests/api/go_proxy_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe API::GoProxy do
include PackagesManagerApiSpecHelpers
+ include HttpBasicAuthHelpers
let_it_be(:user) { create :user }
let_it_be(:project) { create :project_empty_repo, creator: user, path: 'my-go-lib' }
@@ -108,6 +109,7 @@ RSpec.describe API::GoProxy do
project.repository.commit_by(oid: sha)
end
+
let(:resource) { "#{version}.info" }
it_behaves_like 'an unavailable resource'
@@ -386,7 +388,7 @@ RSpec.describe API::GoProxy do
end
it 'returns ok with a personal access token and basic authentication' do
- get_resource(headers: build_basic_auth_header(user.username, pa_token.token))
+ get_resource(headers: basic_auth_header(user.username, pa_token.token))
expect(response).to have_gitlab_http_status(:ok)
end
diff --git a/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb b/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb
new file mode 100644
index 00000000000..ae1abb50a40
--- /dev/null
+++ b/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'get board lists' do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:unauth_user) { create(:user) }
+ let_it_be(:project) { create(:project, creator_id: user.id, namespace: user.namespace ) }
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:project_label) { create(:label, project: project, name: 'Development') }
+ let_it_be(:project_label2) { create(:label, project: project, name: 'Testing') }
+ let_it_be(:group_label) { create(:group_label, group: group, name: 'Development') }
+ let_it_be(:group_label2) { create(:group_label, group: group, name: 'Testing') }
+
+ let(:params) { '' }
+ let(:board) { }
+ let(:board_parent_type) { board_parent.class.to_s.downcase }
+ let(:board_data) { graphql_data[board_parent_type]['boards']['nodes'][0] }
+ let(:lists_data) { board_data['lists']['nodes'][0] }
+ let(:issues_data) { lists_data['issues']['nodes'] }
+
+ def query(list_params = params)
+ graphql_query_for(
+ board_parent_type,
+ { 'fullPath' => board_parent.full_path },
+ <<~BOARDS
+ boards(first: 1) {
+ nodes {
+ lists {
+ nodes {
+ issues {
+ count
+ nodes {
+ #{all_graphql_fields_for('issues'.classify)}
+ }
+ }
+ }
+ }
+ }
+ }
+ BOARDS
+ )
+ end
+
+ def issue_titles
+ issues_data.map { |i| i['title'] }
+ end
+
+ shared_examples 'group and project board list issues query' do
+ let!(:board) { create(:board, resource_parent: board_parent) }
+ let!(:label_list) { create(:list, board: board, label: label, position: 10) }
+ let!(:issue1) { create(:issue, project: issue_project, labels: [label], relative_position: 9) }
+ let!(:issue2) { create(:issue, project: issue_project, labels: [label], relative_position: 2) }
+ let!(:issue3) { create(:issue, project: issue_project, labels: [label], relative_position: 9) }
+ let!(:issue4) { create(:issue, project: issue_project, labels: [label2], relative_position: 432) }
+
+ context 'when the user does not have access to the board' do
+ it 'returns nil' do
+ post_graphql(query, current_user: unauth_user)
+
+ expect(graphql_data[board_parent_type]).to be_nil
+ end
+ end
+
+ context 'when user can read the board' do
+ before do
+ board_parent.add_reporter(user)
+ end
+
+ it 'can access the issues' do
+ post_graphql(query("id: \"#{global_id_of(label_list)}\""), current_user: user)
+
+ expect(issue_titles).to eq([issue2.title, issue3.title, issue1.title])
+ end
+ end
+ end
+
+ describe 'for a project' do
+ let(:board_parent) { project }
+ let(:label) { project_label }
+ let(:label2) { project_label2 }
+ let(:issue_project) { project }
+
+ it_behaves_like 'group and project board list issues query'
+ end
+
+ describe 'for a group' do
+ let(:board_parent) { group }
+ let(:label) { group_label }
+ let(:label2) { group_label2 }
+ let(:issue_project) { create(:project, :private, group: group) }
+
+ before do
+ allow(board_parent).to receive(:multiple_issue_boards_available?).and_return(false)
+ end
+
+ it_behaves_like 'group and project board list issues query'
+ end
+end
diff --git a/spec/requests/api/graphql/boards/board_lists_query_spec.rb b/spec/requests/api/graphql/boards/board_lists_query_spec.rb
index 8a89590c85a..0838900eaba 100644
--- a/spec/requests/api/graphql/boards/board_lists_query_spec.rb
+++ b/spec/requests/api/graphql/boards/board_lists_query_spec.rb
@@ -105,6 +105,20 @@ RSpec.describe 'get board lists' do
end
end
end
+
+ context 'when querying for a single list' do
+ before do
+ board_parent.add_reporter(user)
+ end
+
+ it 'finds the correct list' do
+ label_list = create(:list, board: board, label: label, position: 10)
+
+ post_graphql(query("id: \"#{global_id_of(label_list)}\""), current_user: user)
+
+ expect(lists_data[0]['node']['title']).to eq label_list.title
+ end
+ end
end
describe 'for a project' do
diff --git a/spec/requests/api/graphql/ci/groups_spec.rb b/spec/requests/api/graphql/ci/groups_spec.rb
new file mode 100644
index 00000000000..9e81358a152
--- /dev/null
+++ b/spec/requests/api/graphql/ci/groups_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'Query.project.pipeline.stages.groups' do
+ include GraphqlHelpers
+
+ let(:project) { create(:project, :repository, :public) }
+ let(:user) { create(:user) }
+ let(:pipeline) { create(:ci_pipeline, project: project, user: user) }
+ let(:group_graphql_data) { graphql_data.dig('project', 'pipeline', 'stages', 'nodes', 0, 'groups', 'nodes') }
+
+ let(:params) { {} }
+
+ let(:fields) do
+ <<~QUERY
+ nodes {
+ #{all_graphql_fields_for('CiGroup')}
+ }
+ QUERY
+ end
+
+ let(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ pipeline(iid: "#{pipeline.iid}") {
+ stages {
+ nodes {
+ groups {
+ #{fields}
+ }
+ }
+ }
+ }
+ }
+ }
+ )
+ end
+
+ before do
+ create(:commit_status, pipeline: pipeline, name: 'rspec 0 2')
+ create(:commit_status, pipeline: pipeline, name: 'rspec 0 1')
+ create(:commit_status, pipeline: pipeline, name: 'spinach 0 1')
+ post_graphql(query, current_user: user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns a array of jobs belonging to a pipeline' do
+ expect(group_graphql_data.map { |g| g.slice('name', 'size') }).to eq([
+ { 'name' => 'rspec', 'size' => 2 },
+ { 'name' => 'spinach', 'size' => 1 }
+ ])
+ end
+end
diff --git a/spec/requests/api/graphql/ci/jobs_spec.rb b/spec/requests/api/graphql/ci/jobs_spec.rb
new file mode 100644
index 00000000000..7d416f4720b
--- /dev/null
+++ b/spec/requests/api/graphql/ci/jobs_spec.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'Query.project.pipeline.stages.groups.jobs' do
+ include GraphqlHelpers
+
+ let(:project) { create(:project, :repository, :public) }
+ let(:user) { create(:user) }
+ let(:pipeline) do
+ pipeline = create(:ci_pipeline, project: project, user: user)
+ stage = create(:ci_stage_entity, pipeline: pipeline, name: 'first')
+ create(:commit_status, stage_id: stage.id, pipeline: pipeline, name: 'my test job')
+
+ pipeline
+ end
+
+ def first(field)
+ [field.pluralize, 'nodes', 0]
+ end
+
+ let(:jobs_graphql_data) { graphql_data.dig(*%w[project pipeline], *first('stage'), *first('group'), 'jobs', 'nodes') }
+
+ let(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ pipeline(iid: "#{pipeline.iid}") {
+ stages {
+ nodes {
+ name
+ groups {
+ nodes {
+ name
+ jobs {
+ nodes {
+ name
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ )
+ end
+
+ it 'returns the jobs of a pipeline stage' do
+ post_graphql(query, current_user: user)
+
+ expect(jobs_graphql_data).to contain_exactly(a_hash_including('name' => 'my test job'))
+ end
+
+ context 'when fetching jobs from the pipeline' do
+ it 'avoids N+1 queries' do
+ control_count = ActiveRecord::QueryRecorder.new do
+ post_graphql(query, current_user: user)
+ end
+
+ build_stage = create(:ci_stage_entity, name: 'build', pipeline: pipeline)
+ test_stage = create(:ci_stage_entity, name: 'test', pipeline: pipeline)
+ create(:commit_status, pipeline: pipeline, stage_id: build_stage.id, name: 'docker 1 2')
+ create(:commit_status, pipeline: pipeline, stage_id: build_stage.id, name: 'docker 2 2')
+ create(:commit_status, pipeline: pipeline, stage_id: test_stage.id, name: 'rspec 1 2')
+ create(:commit_status, pipeline: pipeline, stage_id: test_stage.id, name: 'rspec 2 2')
+
+ expect do
+ post_graphql(query, current_user: user)
+ end.not_to exceed_query_limit(control_count)
+
+ expect(response).to have_gitlab_http_status(:ok)
+
+ build_stage = graphql_data.dig('project', 'pipeline', 'stages', 'nodes').find do |stage|
+ stage['name'] == 'build'
+ end
+ test_stage = graphql_data.dig('project', 'pipeline', 'stages', 'nodes').find do |stage|
+ stage['name'] == 'test'
+ end
+ docker_group = build_stage.dig('groups', 'nodes').first
+ rspec_group = test_stage.dig('groups', 'nodes').first
+
+ expect(docker_group['name']).to eq('docker')
+ expect(rspec_group['name']).to eq('rspec')
+
+ docker_jobs = docker_group.dig('jobs', 'nodes')
+ rspec_jobs = rspec_group.dig('jobs', 'nodes')
+
+ expect(docker_jobs).to eq([{ 'name' => 'docker 1 2' }, { 'name' => 'docker 2 2' }])
+ expect(rspec_jobs).to eq([{ 'name' => 'rspec 1 2' }, { 'name' => 'rspec 2 2' }])
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/ci/stages_spec.rb b/spec/requests/api/graphql/ci/stages_spec.rb
new file mode 100644
index 00000000000..cd48a24b9c8
--- /dev/null
+++ b/spec/requests/api/graphql/ci/stages_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'Query.project.pipeline.stages' do
+ include GraphqlHelpers
+
+ let(:project) { create(:project, :repository, :public) }
+ let(:user) { create(:user) }
+ let(:pipeline) { create(:ci_pipeline, project: project, user: user) }
+ let(:stage_graphql_data) { graphql_data['project']['pipeline']['stages'] }
+
+ let(:params) { {} }
+
+ let(:fields) do
+ <<~QUERY
+ nodes {
+ #{all_graphql_fields_for('CiStage')}
+ }
+ QUERY
+ end
+
+ let(:query) do
+ %(
+ query {
+ project(fullPath: "#{project.full_path}") {
+ pipeline(iid: "#{pipeline.iid}") {
+ stages {
+ #{fields}
+ }
+ }
+ }
+ }
+ )
+ end
+
+ before do
+ create(:ci_stage_entity, pipeline: pipeline, name: 'deploy')
+ post_graphql(query, current_user: user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns the stage of a pipeline' do
+ expect(stage_graphql_data['nodes'].first['name']).to eq('deploy')
+ end
+end
diff --git a/spec/requests/api/graphql/issue_status_counts_spec.rb b/spec/requests/api/graphql/issue_status_counts_spec.rb
new file mode 100644
index 00000000000..3d8817c3bc5
--- /dev/null
+++ b/spec/requests/api/graphql/issue_status_counts_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'getting Issue counts by status' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:issue_opened) { create(:issue, project: project) }
+ let_it_be(:issue_closed) { create(:issue, :closed, project: project) }
+ let_it_be(:other_project_issue) { create(:issue) }
+ let(:params) { {} }
+
+ let(:fields) do
+ <<~QUERY
+ #{all_graphql_fields_for('IssueStatusCountsType'.classify)}
+ QUERY
+ end
+
+ let(:query) do
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ query_graphql_field('issueStatusCounts', params, fields)
+ )
+ end
+
+ context 'with issue count data' do
+ let(:issue_counts) { graphql_data.dig('project', 'issueStatusCounts') }
+
+ context 'without project permissions' do
+ let(:user) { create(:user) }
+
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ it_behaves_like 'a working graphql query'
+ it { expect(issue_counts).to be nil }
+ end
+
+ context 'with project permissions' do
+ before do
+ project.add_developer(current_user)
+ post_graphql(query, current_user: current_user)
+ end
+
+ it_behaves_like 'a working graphql query'
+ it 'returns the correct counts for each status' do
+ expect(issue_counts).to eq(
+ 'all' => 2,
+ 'opened' => 1,
+ 'closed' => 1
+ )
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/metrics/dashboard/annotations_spec.rb b/spec/requests/api/graphql/metrics/dashboard/annotations_spec.rb
index c47920087dc..ca5a9165760 100644
--- a/spec/requests/api/graphql/metrics/dashboard/annotations_spec.rb
+++ b/spec/requests/api/graphql/metrics/dashboard/annotations_spec.rb
@@ -30,22 +30,22 @@ RSpec.describe 'Getting Metrics Dashboard Annotations' do
let(:query) do
%(
- query {
- project(fullPath:"#{project.full_path}") {
- environments(name: "#{environment.name}") {
- nodes {
- metricsDashboard(path: "#{path}"){
- annotations(#{args}){
- nodes {
- #{fields}
- }
- }
+ query {
+ project(fullPath: "#{project.full_path}") {
+ environments(name: "#{environment.name}") {
+ nodes {
+ metricsDashboard(path: "#{path}") {
+ annotations(#{args}) {
+ nodes {
+ #{fields}
}
}
}
}
}
- )
+ }
+ }
+ )
end
before do
diff --git a/spec/requests/api/graphql/milestone_spec.rb b/spec/requests/api/graphql/milestone_spec.rb
new file mode 100644
index 00000000000..59de116fa2b
--- /dev/null
+++ b/spec/requests/api/graphql/milestone_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Querying a Milestone' do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
+
+ let(:query) do
+ graphql_query_for('milestone', { id: milestone.to_global_id.to_s }, 'title')
+ end
+
+ subject { graphql_data['milestone'] }
+
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ context 'when the user has access to the milestone' do
+ before_all do
+ project.add_guest(current_user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it { is_expected.to include('title' => milestone.name) }
+ end
+
+ context 'when the user does not have access to the milestone' do
+ it_behaves_like 'a working graphql query'
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when ID argument is missing' do
+ let(:query) do
+ graphql_query_for('milestone', {}, 'title')
+ end
+
+ it 'raises an exception' do
+ expect(graphql_errors).to include(a_hash_including('message' => "Field 'milestone' is missing required arguments: id"))
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/boards/issues/issue_move_list_spec.rb b/spec/requests/api/graphql/mutations/boards/issues/issue_move_list_spec.rb
new file mode 100644
index 00000000000..e24ab0b07f2
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/boards/issues/issue_move_list_spec.rb
@@ -0,0 +1,109 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Reposition and move issue within board lists' do
+ include GraphqlHelpers
+
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:board) { create(:board, group: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:development) { create(:label, project: project, name: 'Development') }
+ let_it_be(:testing) { create(:label, project: project, name: 'Testing') }
+ let_it_be(:list1) { create(:list, board: board, label: development, position: 0) }
+ let_it_be(:list2) { create(:list, board: board, label: testing, position: 1) }
+ let_it_be(:existing_issue1) { create(:labeled_issue, project: project, labels: [testing], relative_position: 10) }
+ let_it_be(:existing_issue2) { create(:labeled_issue, project: project, labels: [testing], relative_position: 50) }
+ let_it_be(:issue1) { create(:labeled_issue, project: project, labels: [development]) }
+
+ let(:mutation_class) { Mutations::Boards::Issues::IssueMoveList }
+ let(:mutation_name) { mutation_class.graphql_name }
+ let(:mutation_result_identifier) { mutation_name.camelize(:lower) }
+ let(:current_user) { user }
+ let(:params) { { board_id: board.to_global_id.to_s, project_path: project.full_path, iid: issue1.iid.to_s } }
+ let(:issue_move_params) do
+ {
+ from_list_id: list1.id,
+ to_list_id: list2.id
+ }
+ end
+
+ before_all do
+ group.add_maintainer(user)
+ end
+
+ shared_examples 'returns an error' do
+ it 'fails with error' do
+ message = "The resource that you are attempting to access does not exist or you don't have "\
+ "permission to perform this action"
+
+ post_graphql_mutation(mutation(params), current_user: current_user)
+
+ expect(graphql_errors).to include(a_hash_including('message' => message))
+ end
+ end
+
+ context 'when user has access to resources' do
+ context 'when repositioning an issue' do
+ let(:issue_move_params) { { move_after_id: existing_issue1.id, move_before_id: existing_issue2.id } }
+
+ it 'repositions an issue' do
+ post_graphql_mutation(mutation(params), current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ response_issue = json_response['data'][mutation_result_identifier]['issue']
+ expect(response_issue['iid']).to eq(issue1.iid.to_s)
+ expect(response_issue['relativePosition']).to be > existing_issue1.relative_position
+ expect(response_issue['relativePosition']).to be < existing_issue2.relative_position
+ end
+ end
+
+ context 'when moving an issue to a different list' do
+ let(:issue_move_params) { { from_list_id: list1.id, to_list_id: list2.id } }
+
+ it 'moves issue to a different list' do
+ post_graphql_mutation(mutation(params), current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ response_issue = json_response['data'][mutation_result_identifier]['issue']
+ expect(response_issue['iid']).to eq(issue1.iid.to_s)
+ expect(response_issue['labels']['edges'][0]['node']['title']).to eq(testing.title)
+ end
+ end
+ end
+
+ context 'when user has no access to resources' do
+ context 'the user is not allowed to update the issue' do
+ let(:current_user) { create(:user) }
+
+ it_behaves_like 'returns an error'
+ end
+
+ context 'when the user can not read board' do
+ let(:board) { create(:board, group: create(:group, :private)) }
+
+ it_behaves_like 'returns an error'
+ end
+ end
+
+ def mutation(additional_params = {})
+ graphql_mutation(mutation_name, issue_move_params.merge(additional_params),
+ <<-QL.strip_heredoc
+ clientMutationId
+ issue {
+ iid,
+ relativePosition
+ labels {
+ edges {
+ node{
+ title
+ }
+ }
+ }
+ }
+ errors
+ QL
+ )
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/boards/lists/update_spec.rb b/spec/requests/api/graphql/mutations/boards/lists/update_spec.rb
new file mode 100644
index 00000000000..8a6d2cb3994
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/boards/lists/update_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Update of an existing board list' do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:board) { create(:board, group: group) }
+ let_it_be(:list) { create(:list, board: board, position: 0) }
+ let_it_be(:list2) { create(:list, board: board) }
+ let_it_be(:input) { { list_id: list.to_global_id.to_s, position: 1, collapsed: true } }
+ let(:mutation) { graphql_mutation(:update_board_list, input) }
+ let(:mutation_response) { graphql_mutation_response(:update_board_list) }
+
+ context 'the user is not allowed to read board lists' do
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: ['The resource that you are attempting to access does not exist or you don\'t have permission to perform this action']
+ end
+
+ before do
+ list.update_preferences_for(current_user, collapsed: false)
+ end
+
+ context 'when user has permissions to admin board lists' do
+ before do
+ group.add_reporter(current_user)
+ end
+
+ it 'updates the list position and collapsed state' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['list']).to include(
+ 'position' => 1,
+ 'collapsed' => true
+ )
+ end
+ end
+
+ context 'when user has permissions to read board lists' do
+ before do
+ group.add_guest(current_user)
+ end
+
+ it 'updates the list collapsed state but not the list position' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['list']).to include(
+ 'position' => 0,
+ 'collapsed' => true
+ )
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/container_expiration_policy/update_spec.rb b/spec/requests/api/graphql/mutations/container_expiration_policy/update_spec.rb
index bc1b42d68e6..7bef812bfec 100644
--- a/spec/requests/api/graphql/mutations/container_expiration_policy/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/container_expiration_policy/update_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe 'Updating the container expiration policy' do
older_than: 'FOURTEEN_DAYS'
}
end
+
let(:mutation) do
graphql_mutation(:update_container_expiration_policy, params,
<<~QL
@@ -32,6 +33,7 @@ RSpec.describe 'Updating the container expiration policy' do
QL
)
end
+
let(:mutation_response) { graphql_mutation_response(:update_container_expiration_policy) }
let(:container_expiration_policy_response) { mutation_response['containerExpirationPolicy'] }
diff --git a/spec/requests/api/graphql/mutations/design_management/move_spec.rb b/spec/requests/api/graphql/mutations/design_management/move_spec.rb
new file mode 100644
index 00000000000..dd121ec733e
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/design_management/move_spec.rb
@@ -0,0 +1,122 @@
+# frozen_string_literal: true
+require "spec_helper"
+
+RSpec.describe "moving designs" do
+ include GraphqlHelpers
+ include DesignManagementTestHelpers
+
+ let_it_be(:issue) { create(:issue) }
+ let_it_be(:designs) { create_list(:design, 3, :with_versions, :with_relative_position, issue: issue) }
+ let_it_be(:developer) { create(:user, developer_projects: [issue.project]) }
+
+ let(:user) { developer }
+
+ let(:current_design) { designs.first }
+ let(:previous_design) { designs.second }
+ let(:next_design) { designs.third }
+ let(:mutation_name) { :design_management_move }
+
+ let(:mutation) do
+ input = {
+ id: current_design.to_global_id.to_s,
+ previous: previous_design&.to_global_id&.to_s,
+ next: next_design&.to_global_id&.to_s
+ }.compact
+
+ graphql_mutation(mutation_name, input, <<~FIELDS)
+ errors
+ designCollection {
+ designs {
+ nodes {
+ filename
+ }
+ }
+ }
+ FIELDS
+ end
+
+ let(:move_designs) { post_graphql_mutation(mutation, current_user: user) }
+ let(:mutation_response) { graphql_mutation_response(mutation_name) }
+
+ before do
+ enable_design_management
+ designs.each(&:reset)
+ issue.reset
+ end
+
+ shared_examples 'a successful move' do
+ it 'does not error, and reports the current order' do
+ move_designs
+
+ expect(graphql_errors).not_to be_present
+
+ expect(mutation_response).to eq(
+ 'errors' => [],
+ 'designCollection' => {
+ 'designs' => {
+ 'nodes' => new_order.map { |d| { 'filename' => d.filename } }
+ }
+ }
+ )
+ end
+ end
+
+ context 'the user is not allowed to move designs' do
+ let(:user) { create(:user) }
+
+ it 'returns an error' do
+ move_designs
+
+ expect(graphql_errors).to be_present
+ end
+ end
+
+ context 'the neighbors do not have positions' do
+ let!(:previous_design) { create(:design, :with_versions, issue: issue) }
+ let!(:next_design) { create(:design, :with_versions, issue: issue) }
+
+ let(:new_order) do
+ [
+ designs.second,
+ designs.third,
+ previous_design, current_design, next_design
+ ]
+ end
+
+ it_behaves_like 'a successful move'
+
+ it 'maintains the correct order in the presence of other unpositioned designs' do
+ other_design = create(:design, :with_versions, issue: issue)
+
+ move_designs
+ moved_designs = mutation_response.dig('designCollection', 'designs', 'nodes')
+
+ expect(moved_designs.map { |d| d['filename'] })
+ .to eq([*new_order.map(&:filename), other_design.filename])
+ end
+ end
+
+ context 'moving a design between two others' do
+ let(:new_order) { [designs.second, designs.first, designs.third] }
+
+ it_behaves_like 'a successful move'
+ end
+
+ context 'moving a design to the start' do
+ let(:current_design) { designs.last }
+ let(:next_design) { designs.first }
+ let(:previous_design) { nil }
+ let(:new_order) { [designs.last, designs.first, designs.second] }
+
+ it_behaves_like 'a successful move'
+ end
+
+ context 'moving a design to the end' do
+ let(:current_design) { designs.first }
+ let(:next_design) { nil }
+ let(:previous_design) { designs.last }
+ let(:new_order) { [designs.second, designs.third, designs.first] }
+
+ it_behaves_like 'a successful move'
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/discussions/toggle_resolve_spec.rb b/spec/requests/api/graphql/mutations/discussions/toggle_resolve_spec.rb
index e83da830935..457c37e900b 100644
--- a/spec/requests/api/graphql/mutations/discussions/toggle_resolve_spec.rb
+++ b/spec/requests/api/graphql/mutations/discussions/toggle_resolve_spec.rb
@@ -10,9 +10,11 @@ RSpec.describe 'Toggling the resolve status of a discussion' do
let(:discussion) do
create(:diff_note_on_merge_request, noteable: noteable, project: project).to_discussion
end
+
let(:mutation) do
graphql_mutation(:discussion_toggle_resolve, { id: discussion.to_global_id.to_s, resolve: true })
end
+
let(:mutation_response) { graphql_mutation_response(:discussion_toggle_resolve) }
context 'when the user does not have permission' do
diff --git a/spec/requests/api/graphql/mutations/issues/set_due_date_spec.rb b/spec/requests/api/graphql/mutations/issues/set_due_date_spec.rb
index 3dd1225db5a..b3c9b9d4995 100644
--- a/spec/requests/api/graphql/mutations/issues/set_due_date_spec.rb
+++ b/spec/requests/api/graphql/mutations/issues/set_due_date_spec.rb
@@ -49,13 +49,13 @@ RSpec.describe 'Setting Due Date of an issue' do
expect(mutation_response['issue']['dueDate']).to eq(2.days.since.to_date.to_s)
end
- context 'when passing due date without a date value' do
+ context 'when the due date value is not a valid time' do
let(:input) { { due_date: 'test' } }
- it 'returns internal server error' do
+ it 'returns a coercion error' do
post_graphql_mutation(mutation, current_user: current_user)
- expect(graphql_errors).to include(a_hash_including('message' => 'Internal server error'))
+ expect(graphql_errors).to include(a_hash_including('message' => /provided invalid value for dueDate/))
end
end
end
diff --git a/spec/requests/api/graphql/mutations/issues/set_subscription_spec.rb b/spec/requests/api/graphql/mutations/issues/set_subscription_spec.rb
new file mode 100644
index 00000000000..1edc1e0553b
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/issues/set_subscription_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Setting subscribed status of an issue' do
+ include GraphqlHelpers
+
+ it_behaves_like 'a subscribable resource api' do
+ let_it_be(:resource) { create(:issue) }
+ let(:mutation_name) { :issue_set_subscription }
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/issues/update_spec.rb b/spec/requests/api/graphql/mutations/issues/update_spec.rb
new file mode 100644
index 00000000000..fd983c683be
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/issues/update_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Update of an existing issue' do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let(:input) do
+ {
+ project_path: project.full_path,
+ iid: issue.iid.to_s,
+ locked: true
+ }
+ end
+
+ let(:mutation) { graphql_mutation(:update_issue, input) }
+ let(:mutation_response) { graphql_mutation_response(:update_issue) }
+
+ context 'the user is not allowed to update issue' do
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: ['The resource that you are attempting to access does not exist or you don\'t have permission to perform this action']
+ end
+
+ context 'when user has permissions to update issue' do
+ before do
+ project.add_developer(current_user)
+ end
+
+ it 'updates the issue' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['issue']).to include(
+ 'discussionLocked' => true
+ )
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/merge_requests/create_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/create_spec.rb
index d4ac639e226..9297ca054c7 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/create_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe 'Creation of a new merge request' do
target_branch: target_branch
}
end
+
let(:title) { 'MergeRequest' }
let(:source_branch) { 'new_branch' }
let(:target_branch) { 'master' }
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_subscription_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_subscription_spec.rb
index 6b3035fbf48..d90faa605c0 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/set_subscription_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_subscription_spec.rb
@@ -5,59 +5,8 @@ require 'spec_helper'
RSpec.describe 'Setting subscribed status of a merge request' do
include GraphqlHelpers
- let(:current_user) { create(:user) }
- let(:merge_request) { create(:merge_request) }
- let(:project) { merge_request.project }
- let(:input) { { subscribed_state: true } }
-
- let(:mutation) do
- variables = {
- project_path: project.full_path,
- iid: merge_request.iid.to_s
- }
- graphql_mutation(:merge_request_set_subscription, variables.merge(input),
- <<-QL.strip_heredoc
- clientMutationId
- errors
- mergeRequest {
- id
- subscribed
- }
- QL
- )
- end
-
- def mutation_response
- graphql_mutation_response(:merge_request_set_subscription)['mergeRequest']['subscribed']
- end
-
- before do
- project.add_developer(current_user)
- end
-
- it 'returns an error if the user is not allowed to update the merge request' do
- post_graphql_mutation(mutation, current_user: create(:user))
-
- expect(graphql_errors).not_to be_empty
- end
-
- it 'marks the merge request as WIP' do
- post_graphql_mutation(mutation, current_user: current_user)
-
- expect(response).to have_gitlab_http_status(:success)
- expect(mutation_response).to eq(true)
- end
-
- context 'when passing subscribe false as input' do
- let(:input) { { subscribed_state: false } }
-
- it 'unmarks the merge request as subscribed' do
- merge_request.subscribe(current_user, project)
-
- post_graphql_mutation(mutation, current_user: current_user)
-
- expect(response).to have_gitlab_http_status(:success)
- expect(mutation_response).to eq(false)
- end
+ it_behaves_like 'a subscribable resource api' do
+ let_it_be(:resource) { create(:merge_request) }
+ let(:mutation_name) { :merge_request_set_subscription }
end
end
diff --git a/spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb b/spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb
index f7be671e5f3..463a872d95d 100644
--- a/spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb
+++ b/spec/requests/api/graphql/mutations/notes/update/image_diff_note_spec.rb
@@ -33,6 +33,7 @@ RSpec.describe 'Updating an image DiffNote' do
y: updated_y
}
end
+
let!(:diff_note) do
create(:image_diff_note_on_merge_request,
noteable: noteable,
@@ -40,6 +41,7 @@ RSpec.describe 'Updating an image DiffNote' do
note: original_body,
position: original_position)
end
+
let(:mutation) do
variables = {
id: GitlabSchema.id_from_object(diff_note).to_s,
diff --git a/spec/requests/api/graphql/mutations/notes/update/note_spec.rb b/spec/requests/api/graphql/mutations/notes/update/note_spec.rb
index 38378310d9f..0d93afe9434 100644
--- a/spec/requests/api/graphql/mutations/notes/update/note_spec.rb
+++ b/spec/requests/api/graphql/mutations/notes/update/note_spec.rb
@@ -8,11 +8,9 @@ RSpec.describe 'Updating a Note' do
let!(:note) { create(:note, note: original_body) }
let(:original_body) { 'Initial body text' }
let(:updated_body) { 'Updated body text' }
+ let(:params) { { body: updated_body, confidential: true } }
let(:mutation) do
- variables = {
- id: GitlabSchema.id_from_object(note).to_s,
- body: updated_body
- }
+ variables = params.merge(id: GitlabSchema.id_from_object(note).to_s)
graphql_mutation(:update_note, variables)
end
@@ -31,6 +29,7 @@ RSpec.describe 'Updating a Note' do
post_graphql_mutation(mutation, current_user: current_user)
expect(note.reload.note).to eq(original_body)
+ expect(note.confidential).to be_falsey
end
end
@@ -43,12 +42,40 @@ RSpec.describe 'Updating a Note' do
post_graphql_mutation(mutation, current_user: current_user)
expect(note.reload.note).to eq(updated_body)
+ expect(note.confidential).to be_truthy
end
it 'returns the updated Note' do
post_graphql_mutation(mutation, current_user: current_user)
expect(mutation_response['note']['body']).to eq(updated_body)
+ expect(mutation_response['note']['confidential']).to be_truthy
+ end
+
+ context 'when only confidential param is present' do
+ let(:params) { { confidential: true } }
+
+ it 'updates only the note confidentiality' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(note.reload.note).to eq(original_body)
+ expect(note.confidential).to be_truthy
+ end
+ end
+
+ context 'when only body param is present' do
+ let(:params) { { body: updated_body } }
+
+ before do
+ note.update_column(:confidential, true)
+ end
+
+ it 'updates only the note body' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(note.reload.note).to eq(updated_body)
+ expect(note.confidential).to be_truthy
+ end
end
context 'when there are ActiveRecord validation errors' do
@@ -60,12 +87,14 @@ RSpec.describe 'Updating a Note' do
post_graphql_mutation(mutation, current_user: current_user)
expect(note.reload.note).to eq(original_body)
+ expect(note.confidential).to be_falsey
end
- it 'returns the Note with its original body' do
+ it 'returns the original Note' do
post_graphql_mutation(mutation, current_user: current_user)
expect(mutation_response['note']['body']).to eq(original_body)
+ expect(mutation_response['note']['confidential']).to be_falsey
end
end
diff --git a/spec/requests/api/graphql/mutations/snippets/create_spec.rb b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
index e2474e1bcce..56a5f4907c1 100644
--- a/spec/requests/api/graphql/mutations/snippets/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
@@ -149,7 +149,7 @@ RSpec.describe 'Creating a Snippet' do
visibility_level: visibility_level,
project_path: project_path,
title: title,
- files: actions
+ blob_actions: actions
}
end
diff --git a/spec/requests/api/graphql/mutations/snippets/update_spec.rb b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
index 3b2f9dc0f19..3f39c0ab851 100644
--- a/spec/requests/api/graphql/mutations/snippets/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
@@ -26,6 +26,7 @@ RSpec.describe 'Updating a Snippet' do
title: updated_title
}
end
+
let(:mutation) do
graphql_mutation(:update_snippet, mutation_vars)
end
@@ -157,7 +158,7 @@ RSpec.describe 'Updating a Snippet' do
let(:mutation_vars) do
{
id: snippet_gid,
- files: [
+ blob_actions: [
{ action: :update, filePath: updated_file, content: updated_content },
{ action: :delete, filePath: deleted_file }
]
diff --git a/spec/requests/api/graphql/project/alert_management/alert/todos_spec.rb b/spec/requests/api/graphql/project/alert_management/alert/todos_spec.rb
new file mode 100644
index 00000000000..3a9077061ad
--- /dev/null
+++ b/spec/requests/api/graphql/project/alert_management/alert/todos_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'getting Alert Management Alert Assignees' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:alert) { create(:alert_management_alert, project: project) }
+ let_it_be(:other_alert) { create(:alert_management_alert, project: project) }
+ let_it_be(:todo) { create(:todo, :pending, target: alert, user: current_user, project: project) }
+ let_it_be(:other_todo) { create(:todo, :pending, target: other_alert, user: current_user, project: project) }
+
+ let(:fields) do
+ <<~QUERY
+ nodes {
+ iid
+ todos {
+ nodes {
+ id
+ }
+ }
+ }
+ QUERY
+ end
+
+ let(:graphql_query) do
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ query_graphql_field('alertManagementAlerts', {}, fields)
+ )
+ end
+
+ let(:gql_alerts) { graphql_data.dig('project', 'alertManagementAlerts', 'nodes') }
+ let(:gql_todos) { gql_alerts.map { |gql_alert| [gql_alert['iid'], gql_alert['todos']['nodes']] }.to_h }
+ let(:gql_alert_todo) { gql_todos[alert.iid.to_s].first }
+ let(:gql_other_alert_todo) { gql_todos[other_alert.iid.to_s].first }
+
+ before do
+ project.add_developer(current_user)
+ end
+
+ it 'includes the correct metrics dashboard url' do
+ post_graphql(graphql_query, current_user: current_user)
+
+ expect(gql_alert_todo['id']).to eq(todo.to_global_id.to_s)
+ expect(gql_other_alert_todo['id']).to eq(other_todo.to_global_id.to_s)
+ end
+end
diff --git a/spec/requests/api/graphql/project/alert_management/alert_status_counts_spec.rb b/spec/requests/api/graphql/project/alert_management/alert_status_counts_spec.rb
index b62215f43fb..9fbf5aaa41f 100644
--- a/spec/requests/api/graphql/project/alert_management/alert_status_counts_spec.rb
+++ b/spec/requests/api/graphql/project/alert_management/alert_status_counts_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe 'getting Alert Management Alert counts by status' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:current_user) { create(:user) }
- let_it_be(:alert_1) { create(:alert_management_alert, :resolved, project: project) }
- let_it_be(:alert_2) { create(:alert_management_alert, project: project) }
+ let_it_be(:alert_resolved) { create(:alert_management_alert, :resolved, project: project) }
+ let_it_be(:alert_triggered) { create(:alert_management_alert, project: project) }
let_it_be(:other_project_alert) { create(:alert_management_alert) }
let(:params) { {} }
@@ -58,7 +58,7 @@ RSpec.describe 'getting Alert Management Alert counts by status' do
end
context 'with search criteria' do
- let(:params) { { search: alert_1.title } }
+ let(:params) { { search: alert_resolved.title } }
it_behaves_like 'a working graphql query'
it 'returns the correct counts for each status' do
diff --git a/spec/requests/api/graphql/project/alert_management/alerts_spec.rb b/spec/requests/api/graphql/project/alert_management/alerts_spec.rb
index f050c6873f3..d3a2e6a1deb 100644
--- a/spec/requests/api/graphql/project/alert_management/alerts_spec.rb
+++ b/spec/requests/api/graphql/project/alert_management/alerts_spec.rb
@@ -4,12 +4,12 @@ require 'spec_helper'
RSpec.describe 'getting Alert Management Alerts' do
include GraphqlHelpers
- let_it_be(:payload) { { 'custom' => { 'alert' => 'payload' } } }
+ let_it_be(:payload) { { 'custom' => { 'alert' => 'payload' }, 'runbook' => 'runbook' } }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:current_user) { create(:user) }
- let_it_be(:resolved_alert) { create(:alert_management_alert, :all_fields, :resolved, project: project, issue: nil, severity: :low) }
- let_it_be(:triggered_alert) { create(:alert_management_alert, :all_fields, project: project, severity: :critical, payload: payload) }
- let_it_be(:other_project_alert) { create(:alert_management_alert, :all_fields) }
+ let_it_be(:resolved_alert) { create(:alert_management_alert, :all_fields, :resolved, project: project, issue: nil, severity: :low).present }
+ let_it_be(:triggered_alert) { create(:alert_management_alert, :all_fields, project: project, severity: :critical, payload: payload).present }
+ let_it_be(:other_project_alert) { create(:alert_management_alert, :all_fields).present }
let(:params) { {} }
@@ -71,10 +71,13 @@ RSpec.describe 'getting Alert Management Alerts' do
'eventCount' => triggered_alert.events,
'startedAt' => triggered_alert.started_at.strftime('%Y-%m-%dT%H:%M:%SZ'),
'endedAt' => nil,
- 'details' => { 'custom.alert' => 'payload' },
+ 'details' => { 'custom.alert' => 'payload', 'runbook' => 'runbook' },
'createdAt' => triggered_alert.created_at.strftime('%Y-%m-%dT%H:%M:%SZ'),
'updatedAt' => triggered_alert.updated_at.strftime('%Y-%m-%dT%H:%M:%SZ'),
- 'metricsDashboardUrl' => nil
+ 'metricsDashboardUrl' => nil,
+ 'detailsUrl' => triggered_alert.details_url,
+ 'prometheusAlert' => nil,
+ 'runbook' => 'runbook'
)
expect(second_alert).to include(
diff --git a/spec/requests/api/graphql/project/container_expiration_policy_spec.rb b/spec/requests/api/graphql/project/container_expiration_policy_spec.rb
index b064e4d43e9..dc16847a669 100644
--- a/spec/requests/api/graphql/project/container_expiration_policy_spec.rb
+++ b/spec/requests/api/graphql/project/container_expiration_policy_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe 'getting a repository in a project' do
#{all_graphql_fields_for('container_expiration_policy'.classify)}
QUERY
end
+
let(:query) do
graphql_query_for(
'project',
diff --git a/spec/requests/api/graphql/project/issue/designs/designs_spec.rb b/spec/requests/api/graphql/project/issue/designs/designs_spec.rb
index e47c025f8b2..decab900a43 100644
--- a/spec/requests/api/graphql/project/issue/designs/designs_spec.rb
+++ b/spec/requests/api/graphql/project/issue/designs/designs_spec.rb
@@ -24,12 +24,14 @@ RSpec.describe 'Getting designs related to an issue' do
}
NODE
end
+
let(:issue) { design.issue }
let(:project) { issue.project }
let(:query) { make_query }
let(:design_collection) do
graphql_data_at(:project, :issue, :design_collection)
end
+
let(:design_response) do
design_collection.dig('designs', 'edges').first['node']
end
@@ -185,6 +187,7 @@ RSpec.describe 'Getting designs related to an issue' do
}
NODE
end
+
let(:design_response) do
design_collection['designs']['edges']
end
diff --git a/spec/requests/api/graphql/project/issues_spec.rb b/spec/requests/api/graphql/project/issues_spec.rb
index cdfff2f50d4..06e613a09bc 100644
--- a/spec/requests/api/graphql/project/issues_spec.rb
+++ b/spec/requests/api/graphql/project/issues_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe 'getting an issue list for a project' do
[create(:issue, project: project, discussion_locked: true),
create(:issue, project: project)]
end
+
let(:fields) do
<<~QUERY
edges {
diff --git a/spec/requests/api/graphql/project/jira_import_spec.rb b/spec/requests/api/graphql/project/jira_import_spec.rb
index 814965262b6..1cc30b95162 100644
--- a/spec/requests/api/graphql/project/jira_import_spec.rb
+++ b/spec/requests/api/graphql/project/jira_import_spec.rb
@@ -53,6 +53,7 @@ RSpec.describe 'query Jira import data' do
}
)
end
+
let(:jira_imports) { graphql_data.dig('project', 'jiraImports', 'nodes')}
let(:jira_import_status) { graphql_data.dig('project', 'jiraImportStatus')}
@@ -106,7 +107,7 @@ RSpec.describe 'query Jira import data' do
let(:query) do
%(
query {
- project(fullPath:"#{project.full_path}") {
+ project(fullPath: "#{project.full_path}") {
jiraImports(first: 1) {
nodes {
jiraProjectKey
@@ -132,7 +133,7 @@ RSpec.describe 'query Jira import data' do
let(:query) do
%(
query {
- project(fullPath:"#{project.full_path}") {
+ project(fullPath: "#{project.full_path}") {
jiraImports(last: 1) {
nodes {
jiraProjectKey
diff --git a/spec/requests/api/graphql/project/jira_projects_spec.rb b/spec/requests/api/graphql/project/jira_projects_spec.rb
index d5f59711ab1..410d5b21505 100644
--- a/spec/requests/api/graphql/project/jira_projects_spec.rb
+++ b/spec/requests/api/graphql/project/jira_projects_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'query Jira projects' do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
- include_context 'jira projects request context'
+ include_context 'Jira projects request context'
let(:services) { graphql_data_at(:project, :services, :edges) }
let(:jira_projects) { services.first.dig('node', 'projects', 'nodes') }
diff --git a/spec/requests/api/graphql/project/merge_requests_spec.rb b/spec/requests/api/graphql/project/merge_requests_spec.rb
index e2255fdb048..bb63a5994b0 100644
--- a/spec/requests/api/graphql/project/merge_requests_spec.rb
+++ b/spec/requests/api/graphql/project/merge_requests_spec.rb
@@ -171,4 +171,43 @@ RSpec.describe 'getting merge request listings nested in a project' do
it_behaves_like 'searching with parameters'
end
+
+ describe 'fields' do
+ let(:requested_fields) { nil }
+ let(:extra_iid_for_second_query) { merge_request_c.iid.to_s }
+ let(:search_params) { { iids: [merge_request_a.iid.to_s, merge_request_b.iid.to_s] } }
+
+ def execute_query
+ query = query_merge_requests(requested_fields)
+ post_graphql(query, current_user: current_user)
+ end
+
+ context 'when requesting `commit_count`' do
+ let(:requested_fields) { [:commit_count] }
+
+ it 'exposes `commit_count`' do
+ merge_request_a.metrics.update!(commits_count: 5)
+
+ execute_query
+
+ expect(results).to include(a_hash_including('commitCount' => 5))
+ end
+
+ include_examples 'N+1 query check'
+ end
+
+ context 'when requesting `merged_at`' do
+ let(:requested_fields) { [:merged_at] }
+
+ before do
+ # make the MRs "merged"
+ [merge_request_a, merge_request_b, merge_request_c].each do |mr|
+ mr.update_column(:state_id, MergeRequest.available_states[:merged])
+ mr.metrics.update_column(:merged_at, Time.now)
+ end
+ end
+
+ include_examples 'N+1 query check'
+ end
+ end
end
diff --git a/spec/requests/api/graphql/project/packages_spec.rb b/spec/requests/api/graphql/project/packages_spec.rb
index 88f97f9256b..5df98ed1e6b 100644
--- a/spec/requests/api/graphql/project/packages_spec.rb
+++ b/spec/requests/api/graphql/project/packages_spec.rb
@@ -28,42 +28,40 @@ RSpec.describe 'getting a package list for a project' do
)
end
- context 'without the need for a license' do
- context 'when user has access to the project' do
- before do
- project.add_reporter(current_user)
- post_graphql(query, current_user: current_user)
- end
+ context 'when user has access to the project' do
+ before do
+ project.add_reporter(current_user)
+ post_graphql(query, current_user: current_user)
+ end
- it_behaves_like 'a working graphql query'
+ it_behaves_like 'a working graphql query'
- it 'returns packages successfully' do
- expect(packages_data[0]['node']['name']).to eq package.name
- end
+ it 'returns packages successfully' do
+ expect(packages_data[0]['node']['name']).to eq package.name
end
+ end
- context 'when the user does not have access to the project/packages' do
- before do
- post_graphql(query, current_user: current_user)
- end
+ context 'when the user does not have access to the project/packages' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
- it_behaves_like 'a working graphql query'
+ it_behaves_like 'a working graphql query'
- it 'returns nil' do
- expect(graphql_data['project']).to be_nil
- end
+ it 'returns nil' do
+ expect(graphql_data['project']).to be_nil
end
+ end
- context 'when the user is not autenthicated' do
- before do
- post_graphql(query)
- end
+ context 'when the user is not autenthicated' do
+ before do
+ post_graphql(query)
+ end
- it_behaves_like 'a working graphql query'
+ it_behaves_like 'a working graphql query'
- it 'returns nil' do
- expect(graphql_data['project']).to be_nil
- end
+ it 'returns nil' do
+ expect(graphql_data['project']).to be_nil
end
end
end
diff --git a/spec/requests/api/graphql/project/pipeline_spec.rb b/spec/requests/api/graphql/project/pipeline_spec.rb
index 57b9de25c3d..fef0e7e160c 100644
--- a/spec/requests/api/graphql/project/pipeline_spec.rb
+++ b/spec/requests/api/graphql/project/pipeline_spec.rb
@@ -29,4 +29,10 @@ RSpec.describe 'getting pipeline information nested in a project' do
expect(pipeline_graphql_data).not_to be_nil
end
+
+ it 'contains configSource' do
+ post_graphql(query, current_user: current_user)
+
+ expect(pipeline_graphql_data.dig('configSource')).to eq('UNKNOWN_SOURCE')
+ end
end
diff --git a/spec/requests/api/graphql/project/repository_spec.rb b/spec/requests/api/graphql/project/repository_spec.rb
index bd719a69647..a4984688557 100644
--- a/spec/requests/api/graphql/project/repository_spec.rb
+++ b/spec/requests/api/graphql/project/repository_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe 'getting a repository in a project' do
#{all_graphql_fields_for('repository'.classify)}
QUERY
end
+
let(:query) do
graphql_query_for(
'project',
diff --git a/spec/requests/api/graphql/project/tree/tree_spec.rb b/spec/requests/api/graphql/project/tree/tree_spec.rb
index bce63d57c38..f4cd316da96 100644
--- a/spec/requests/api/graphql/project/tree/tree_spec.rb
+++ b/spec/requests/api/graphql/project/tree/tree_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe 'getting a tree in a project' do
}
QUERY
end
+
let(:query) do
graphql_query_for(
'project',
diff --git a/spec/requests/api/graphql/project_query_spec.rb b/spec/requests/api/graphql/project_query_spec.rb
index b115030afbc..c6049e098be 100644
--- a/spec/requests/api/graphql/project_query_spec.rb
+++ b/spec/requests/api/graphql/project_query_spec.rb
@@ -76,16 +76,16 @@ RSpec.describe 'getting project information' do
def run_query(number)
q = <<~GQL
- query {
- project(fullPath: "#{project.full_path}") {
- mergeRequests(first: #{number}) {
- nodes {
- assignees { nodes { username } }
- headPipeline { status }
+ query {
+ project(fullPath: "#{project.full_path}") {
+ mergeRequests(first: #{number}) {
+ nodes {
+ assignees { nodes { username } }
+ headPipeline { status }
+ }
}
}
}
- }
GQL
post_graphql(q, current_user: current_user)
diff --git a/spec/requests/api/graphql/user_query_spec.rb b/spec/requests/api/graphql/user_query_spec.rb
index 7ba1788a9ef..2f4dc0a9160 100644
--- a/spec/requests/api/graphql/user_query_spec.rb
+++ b/spec/requests/api/graphql/user_query_spec.rb
@@ -75,7 +75,9 @@ RSpec.describe 'getting user information' do
'name' => presenter.name,
'username' => presenter.username,
'webUrl' => presenter.web_url,
- 'avatarUrl' => presenter.avatar_url
+ 'avatarUrl' => presenter.avatar_url,
+ 'status' => presenter.status,
+ 'email' => presenter.email
))
end
@@ -83,6 +85,7 @@ RSpec.describe 'getting user information' do
let(:user_fields) do
query_graphql_field(:assigned_merge_requests, mr_args, 'nodes { id }')
end
+
let(:mr_args) { nil }
it_behaves_like 'a working graphql query'
@@ -145,6 +148,7 @@ RSpec.describe 'getting user information' do
let(:user_fields) do
query_graphql_field(:authored_merge_requests, mr_args, 'nodes { id }')
end
+
let(:mr_args) { nil }
it_behaves_like 'a working graphql query'
diff --git a/spec/requests/api/group_import_spec.rb b/spec/requests/api/group_import_spec.rb
index ad67f737725..cb63206fcb8 100644
--- a/spec/requests/api/group_import_spec.rb
+++ b/spec/requests/api/group_import_spec.rb
@@ -217,12 +217,14 @@ RSpec.describe API::GroupImport do
let!(:fog_connection) do
stub_uploads_object_storage(ImportExportUploader, direct_upload: true)
end
+
let(:tmp_object) do
fog_connection.directories.new(key: 'uploads').files.create(
key: "tmp/uploads/#{file_name}",
body: file_upload
)
end
+
let(:fog_file) { fog_to_uploaded_file(tmp_object) }
let(:params) do
{
diff --git a/spec/requests/api/group_milestones_spec.rb b/spec/requests/api/group_milestones_spec.rb
index 2b361f2b503..7ed6e1a295f 100644
--- a/spec/requests/api/group_milestones_spec.rb
+++ b/spec/requests/api/group_milestones_spec.rb
@@ -3,15 +3,65 @@
require 'spec_helper'
RSpec.describe API::GroupMilestones do
- let(:user) { create(:user) }
- let(:group) { create(:group, :private) }
- let(:project) { create(:project, namespace: group) }
- let!(:group_member) { create(:group_member, group: group, user: user) }
- let!(:closed_milestone) { create(:closed_milestone, group: group, title: 'version1', description: 'closed milestone') }
- let!(:milestone) { create(:milestone, group: group, title: 'version2', description: 'open milestone') }
-
- it_behaves_like 'group and project milestones', "/groups/:id/milestones" do
- let(:route) { "/groups/#{group.id}/milestones" }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:project) { create(:project, namespace: group) }
+ let_it_be(:group_member) { create(:group_member, group: group, user: user) }
+ let_it_be(:closed_milestone) { create(:closed_milestone, group: group, title: 'version1', description: 'closed milestone') }
+ let_it_be(:milestone) { create(:milestone, group: group, title: 'version2', description: 'open milestone') }
+ let(:route) { "/groups/#{group.id}/milestones" }
+
+ it_behaves_like 'group and project milestones', "/groups/:id/milestones"
+
+ describe 'GET /groups/:id/milestones' do
+ context 'when include_parent_milestones is true' do
+ let_it_be(:ancestor_group) { create(:group, :private) }
+ let_it_be(:ancestor_group_milestone) { create(:milestone, group: ancestor_group) }
+ let_it_be(:params) { { include_parent_milestones: true } }
+
+ before_all do
+ group.update(parent: ancestor_group)
+ end
+
+ shared_examples 'listing all milestones' do
+ it 'returns correct list of milestones' do
+ get api(route, user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.size).to eq(milestones.size)
+ expect(json_response.map { |entry| entry["id"] }).to eq(milestones.map(&:id))
+ end
+ end
+
+ context 'when user has access to ancestor groups' do
+ let(:milestones) { [ancestor_group_milestone, milestone, closed_milestone] }
+
+ before do
+ ancestor_group.add_guest(user)
+ group.add_guest(user)
+ end
+
+ it_behaves_like 'listing all milestones'
+
+ context 'when iids param is present' do
+ let_it_be(:params) { { include_parent_milestones: true, iids: [milestone.iid] } }
+
+ it_behaves_like 'listing all milestones'
+ end
+ end
+
+ context 'when user has no access to ancestor groups' do
+ let(:user) { create(:user) }
+
+ before do
+ group.add_guest(user)
+ end
+
+ it_behaves_like 'listing all milestones' do
+ let(:milestones) { [milestone, closed_milestone] }
+ end
+ end
+ end
end
def setup_for_group
diff --git a/spec/requests/api/group_packages_spec.rb b/spec/requests/api/group_packages_spec.rb
index 7c7e8da3fb1..e02f6099637 100644
--- a/spec/requests/api/group_packages_spec.rb
+++ b/spec/requests/api/group_packages_spec.rb
@@ -13,135 +13,133 @@ RSpec.describe API::GroupPackages do
let(:url) { "/groups/#{group.id}/packages" }
let(:package_schema) { 'public_api/v4/packages/group_packages' }
- context 'without the need for a license' do
- context 'with sorting' do
- let_it_be(:package1) { create(:npm_package, project: project, version: '3.1.0', name: "@#{project.root_namespace.path}/foo1") }
- let_it_be(:package2) { create(:nuget_package, project: project, version: '2.0.4') }
- let(:package3) { create(:maven_package, project: project, version: '1.1.1', name: 'zzz') }
-
- before do
- travel_to(1.day.ago) do
- package3
- end
+ context 'with sorting' do
+ let_it_be(:package1) { create(:npm_package, project: project, version: '3.1.0', name: "@#{project.root_namespace.path}/foo1") }
+ let_it_be(:package2) { create(:nuget_package, project: project, version: '2.0.4') }
+ let(:package3) { create(:maven_package, project: project, version: '1.1.1', name: 'zzz') }
+
+ before do
+ travel_to(1.day.ago) do
+ package3
end
+ end
- context 'without sorting params' do
- let(:packages) { [package3, package1, package2] }
+ context 'without sorting params' do
+ let(:packages) { [package3, package1, package2] }
- it 'sorts by created_at asc' do
- subject
+ it 'sorts by created_at asc' do
+ subject
- expect(json_response.map { |package| package['id'] }).to eq(packages.map(&:id))
- end
+ expect(json_response.map { |package| package['id'] }).to eq(packages.map(&:id))
end
+ end
- it_behaves_like 'package sorting', 'name' do
- let(:packages) { [package1, package2, package3] }
- end
+ it_behaves_like 'package sorting', 'name' do
+ let(:packages) { [package1, package2, package3] }
+ end
- it_behaves_like 'package sorting', 'created_at' do
- let(:packages) { [package3, package1, package2] }
- end
+ it_behaves_like 'package sorting', 'created_at' do
+ let(:packages) { [package3, package1, package2] }
+ end
- it_behaves_like 'package sorting', 'version' do
- let(:packages) { [package3, package2, package1] }
- end
+ it_behaves_like 'package sorting', 'version' do
+ let(:packages) { [package3, package2, package1] }
+ end
- it_behaves_like 'package sorting', 'type' do
- let(:packages) { [package3, package1, package2] }
- end
+ it_behaves_like 'package sorting', 'type' do
+ let(:packages) { [package3, package1, package2] }
+ end
- it_behaves_like 'package sorting', 'project_path' do
- let(:another_project) { create(:project, :public, namespace: group, name: 'project B') }
- let!(:package4) { create(:npm_package, project: another_project, version: '3.1.0', name: "@#{project.root_namespace.path}/bar") }
+ it_behaves_like 'package sorting', 'project_path' do
+ let(:another_project) { create(:project, :public, namespace: group, name: 'project B') }
+ let!(:package4) { create(:npm_package, project: another_project, version: '3.1.0', name: "@#{project.root_namespace.path}/bar") }
- let(:packages) { [package1, package2, package3, package4] }
- end
+ let(:packages) { [package1, package2, package3, package4] }
end
+ end
- context 'with private group' do
- let!(:package1) { create(:package, project: project) }
- let!(:package2) { create(:package, project: project) }
+ context 'with private group' do
+ let!(:package1) { create(:package, project: project) }
+ let!(:package2) { create(:package, project: project) }
- let(:group) { create(:group, :private) }
- let(:subgroup) { create(:group, :private, parent: group) }
- let(:project) { create(:project, :private, namespace: group) }
- let(:subproject) { create(:project, :private, namespace: subgroup) }
+ let(:group) { create(:group, :private) }
+ let(:subgroup) { create(:group, :private, parent: group) }
+ let(:project) { create(:project, :private, namespace: group) }
+ let(:subproject) { create(:project, :private, namespace: subgroup) }
- context 'with unauthenticated user' do
- it_behaves_like 'rejects packages access', :group, :no_type, :not_found
- end
+ context 'with unauthenticated user' do
+ it_behaves_like 'rejects packages access', :group, :no_type, :not_found
+ end
+
+ context 'with authenticated user' do
+ subject { get api(url, user) }
+
+ it_behaves_like 'returns packages', :group, :owner
+ it_behaves_like 'returns packages', :group, :maintainer
+ it_behaves_like 'returns packages', :group, :developer
+ it_behaves_like 'rejects packages access', :group, :reporter, :forbidden
+ it_behaves_like 'rejects packages access', :group, :guest, :forbidden
- context 'with authenticated user' do
- subject { get api(url, user) }
+ context 'with subgroup' do
+ let(:subgroup) { create(:group, :private, parent: group) }
+ let(:subproject) { create(:project, :private, namespace: subgroup) }
+ let!(:package3) { create(:npm_package, project: subproject) }
- it_behaves_like 'returns packages', :group, :owner
- it_behaves_like 'returns packages', :group, :maintainer
- it_behaves_like 'returns packages', :group, :developer
+ it_behaves_like 'returns packages with subgroups', :group, :owner
+ it_behaves_like 'returns packages with subgroups', :group, :maintainer
+ it_behaves_like 'returns packages with subgroups', :group, :developer
it_behaves_like 'rejects packages access', :group, :reporter, :forbidden
it_behaves_like 'rejects packages access', :group, :guest, :forbidden
- context 'with subgroup' do
- let(:subgroup) { create(:group, :private, parent: group) }
- let(:subproject) { create(:project, :private, namespace: subgroup) }
- let!(:package3) { create(:npm_package, project: subproject) }
+ context 'excluding subgroup' do
+ let(:url) { "/groups/#{group.id}/packages?exclude_subgroups=true" }
- it_behaves_like 'returns packages with subgroups', :group, :owner
- it_behaves_like 'returns packages with subgroups', :group, :maintainer
- it_behaves_like 'returns packages with subgroups', :group, :developer
+ it_behaves_like 'returns packages', :group, :owner
+ it_behaves_like 'returns packages', :group, :maintainer
+ it_behaves_like 'returns packages', :group, :developer
it_behaves_like 'rejects packages access', :group, :reporter, :forbidden
it_behaves_like 'rejects packages access', :group, :guest, :forbidden
-
- context 'excluding subgroup' do
- let(:url) { "/groups/#{group.id}/packages?exclude_subgroups=true" }
-
- it_behaves_like 'returns packages', :group, :owner
- it_behaves_like 'returns packages', :group, :maintainer
- it_behaves_like 'returns packages', :group, :developer
- it_behaves_like 'rejects packages access', :group, :reporter, :forbidden
- it_behaves_like 'rejects packages access', :group, :guest, :forbidden
- end
end
end
end
+ end
- context 'with public group' do
- let_it_be(:package1) { create(:package, project: project) }
- let_it_be(:package2) { create(:package, project: project) }
+ context 'with public group' do
+ let_it_be(:package1) { create(:package, project: project) }
+ let_it_be(:package2) { create(:package, project: project) }
- context 'with unauthenticated user' do
- it_behaves_like 'returns packages', :group, :no_type
- end
+ context 'with unauthenticated user' do
+ it_behaves_like 'returns packages', :group, :no_type
+ end
- context 'with authenticated user' do
- subject { get api(url, user) }
+ context 'with authenticated user' do
+ subject { get api(url, user) }
- it_behaves_like 'returns packages', :group, :owner
- it_behaves_like 'returns packages', :group, :maintainer
- it_behaves_like 'returns packages', :group, :developer
- it_behaves_like 'returns packages', :group, :reporter
- it_behaves_like 'returns packages', :group, :guest
- end
+ it_behaves_like 'returns packages', :group, :owner
+ it_behaves_like 'returns packages', :group, :maintainer
+ it_behaves_like 'returns packages', :group, :developer
+ it_behaves_like 'returns packages', :group, :reporter
+ it_behaves_like 'returns packages', :group, :guest
end
+ end
- context 'with pagination params' do
- let_it_be(:package1) { create(:package, project: project) }
- let_it_be(:package2) { create(:package, project: project) }
- let_it_be(:package3) { create(:npm_package, project: project) }
- let_it_be(:package4) { create(:npm_package, project: project) }
+ context 'with pagination params' do
+ let_it_be(:package1) { create(:package, project: project) }
+ let_it_be(:package2) { create(:package, project: project) }
+ let_it_be(:package3) { create(:npm_package, project: project) }
+ let_it_be(:package4) { create(:npm_package, project: project) }
- it_behaves_like 'returns paginated packages'
- end
+ it_behaves_like 'returns paginated packages'
+ end
- it_behaves_like 'filters on each package_type', is_project: false
+ it_behaves_like 'filters on each package_type', is_project: false
- context 'does not accept non supported package_type value' do
- include_context 'package filter context'
+ context 'does not accept non supported package_type value' do
+ include_context 'package filter context'
- let(:url) { group_filter_url(:type, 'foo') }
+ let(:url) { group_filter_url(:type, 'foo') }
- it_behaves_like 'returning response status', :bad_request
- end
+ it_behaves_like 'returning response status', :bad_request
end
end
end
diff --git a/spec/requests/api/group_variables_spec.rb b/spec/requests/api/group_variables_spec.rb
index c6d6ae1615b..41b013f49ee 100644
--- a/spec/requests/api/group_variables_spec.rb
+++ b/spec/requests/api/group_variables_spec.rb
@@ -169,6 +169,14 @@ RSpec.describe API::GroupVariables do
expect(response).to have_gitlab_http_status(:not_found)
end
+
+ it 'responds with 400 if the update fails' do
+ put api("/groups/#{group.id}/variables/#{variable.key}", user), params: { value: 'shrt', masked: true }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(variable.reload.masked).to eq(false)
+ expect(json_response['message']).to eq('value' => ['is invalid'])
+ end
end
context 'authorized user with invalid permissions' do
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index fac9f4dfe00..da423e986c3 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -860,6 +860,66 @@ RSpec.describe API::Groups do
end
end
+ context 'with similarity ordering' do
+ let_it_be(:group_with_projects) { create(:group) }
+ let_it_be(:project_1) { create(:project, name: 'Project', path: 'project', group: group_with_projects) }
+ let_it_be(:project_2) { create(:project, name: 'Test Project', path: 'test-project', group: group_with_projects) }
+ let_it_be(:project_3) { create(:project, name: 'Test', path: 'test', group: group_with_projects) }
+
+ let(:params) { { order_by: 'similarity', search: 'test' } }
+
+ subject { get api("/groups/#{group_with_projects.id}/projects", user1), params: params }
+
+ before do
+ group_with_projects.add_owner(user1)
+ end
+
+ it 'returns items based ordered by similarity' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response.length).to eq(2)
+
+ project_names = json_response.map { |proj| proj['name'] }
+ expect(project_names).to eq(['Test', 'Test Project'])
+ end
+
+ context 'when `search` parameter is not given' do
+ before do
+ params.delete(:search)
+ end
+
+ it 'returns items ordered by name' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response.length).to eq(3)
+
+ project_names = json_response.map { |proj| proj['name'] }
+ expect(project_names).to eq(['Project', 'Test', 'Test Project'])
+ end
+ end
+
+ context 'when `similarity_search` feature flag is off' do
+ before do
+ stub_feature_flags(similarity_search: false)
+ end
+
+ it 'returns items ordered by name' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response.length).to eq(2)
+
+ project_names = json_response.map { |proj| proj['name'] }
+ expect(project_names).to eq(['Test', 'Test Project'])
+ end
+ end
+ end
+
it "returns the group's projects with simple representation" do
get api("/groups/#{group1.id}/projects", user1), params: { simple: true }
@@ -1012,6 +1072,7 @@ RSpec.describe API::Groups do
let!(:project4) do
create(:project, namespace: group2, path: 'test_project', visibility_level: Gitlab::VisibilityLevel::PRIVATE)
end
+
let(:path) { "/groups/#{group1.id}/projects/shared" }
before do
diff --git a/spec/requests/api/helpers_spec.rb b/spec/requests/api/helpers_spec.rb
index 12cd5ace84e..fefa7105327 100644
--- a/spec/requests/api/helpers_spec.rb
+++ b/spec/requests/api/helpers_spec.rb
@@ -24,6 +24,7 @@ RSpec.describe API::Helpers do
'CONTENT_TYPE' => 'text/plain;charset=utf-8'
}
end
+
let(:header) { }
let(:request) { Grape::Request.new(env)}
let(:params) { request.params }
diff --git a/spec/requests/api/import_bitbucket_server_spec.rb b/spec/requests/api/import_bitbucket_server_spec.rb
index 5828dab3080..dac139064da 100644
--- a/spec/requests/api/import_bitbucket_server_spec.rb
+++ b/spec/requests/api/import_bitbucket_server_spec.rb
@@ -96,7 +96,7 @@ RSpec.describe API::ImportBitbucketServer do
Grape::Endpoint.before_each nil
end
- it 'returns 400 response due to a blcoked URL' do
+ it 'returns 400 response due to a blocked URL' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
.to receive(:new).with(project_key, repo_slug, anything, project.name, user.namespace, user, anything)
.and_return(double(execute: project))
diff --git a/spec/requests/api/import_github_spec.rb b/spec/requests/api/import_github_spec.rb
index f026314f7a8..bbfb17fe753 100644
--- a/spec/requests/api/import_github_spec.rb
+++ b/spec/requests/api/import_github_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe API::ImportGithub do
before do
Grape::Endpoint.before_each do |endpoint|
- allow(endpoint).to receive(:client).and_return(double('client', user: provider_user, repo: provider_repo).as_null_object)
+ allow(endpoint).to receive(:client).and_return(double('client', user: provider_user, repository: provider_repo).as_null_object)
end
end
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index 7d219954e9d..873189af397 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -120,6 +120,138 @@ RSpec.describe API::Internal::Base do
end
end
+ describe 'POST /internal/personal_access_token' do
+ it 'returns an error message when the key does not exist' do
+ post api('/internal/personal_access_token'),
+ params: {
+ secret_token: secret_token,
+ key_id: non_existing_record_id
+ }
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq('Could not find the given key')
+ end
+
+ it 'returns an error message when the key is a deploy key' do
+ deploy_key = create(:deploy_key)
+
+ post api('/internal/personal_access_token'),
+ params: {
+ secret_token: secret_token,
+ key_id: deploy_key.id
+ }
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq('Deploy keys cannot be used to create personal access tokens')
+ end
+
+ it 'returns an error message when the user does not exist' do
+ key_without_user = create(:key, user: nil)
+
+ post api('/internal/personal_access_token'),
+ params: {
+ secret_token: secret_token,
+ key_id: key_without_user.id
+ }
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq('Could not find a user for the given key')
+ expect(json_response['token']).to be_nil
+ end
+
+ it 'returns an error message when given an non existent user' do
+ post api('/internal/personal_access_token'),
+ params: {
+ secret_token: secret_token,
+ user_id: 0
+ }
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq("Could not find the given user")
+ end
+
+ it 'returns an error message when no name parameter is received' do
+ post api('/internal/personal_access_token'),
+ params: {
+ secret_token: secret_token,
+ key_id: key.id
+ }
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq("No token name specified")
+ end
+
+ it 'returns an error message when no scopes parameter is received' do
+ post api('/internal/personal_access_token'),
+ params: {
+ secret_token: secret_token,
+ key_id: key.id,
+ name: 'newtoken'
+ }
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq("No token scopes specified")
+ end
+
+ it 'returns an error message when expires_at contains an invalid date' do
+ post api('/internal/personal_access_token'),
+ params: {
+ secret_token: secret_token,
+ key_id: key.id,
+ name: 'newtoken',
+ scopes: ['api'],
+ expires_at: 'invalid-date'
+ }
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to eq("Invalid token expiry date: 'invalid-date'")
+ end
+
+ it 'returns an error message when it receives an invalid scope' do
+ post api('/internal/personal_access_token'),
+ params: {
+ secret_token: secret_token,
+ key_id: key.id,
+ name: 'newtoken',
+ scopes: %w(read_api badscope read_repository)
+ }
+
+ expect(json_response['success']).to be_falsey
+ expect(json_response['message']).to match(/\AInvalid scope: 'badscope'. Valid scopes are: /)
+ end
+
+ it 'returns a token without expiry when the expires_at parameter is missing' do
+ post api('/internal/personal_access_token'),
+ params: {
+ secret_token: secret_token,
+ key_id: key.id,
+ name: 'newtoken',
+ scopes: %w(read_api read_repository)
+ }
+
+ expect(json_response['success']).to be_truthy
+ expect(json_response['token']).to match(/\A\S{20}\z/)
+ expect(json_response['scopes']).to match_array(%w(read_api read_repository))
+ expect(json_response['expires_at']).to be_nil
+ end
+
+ it 'returns a token with expiry when it receives a valid expires_at parameter' do
+ post api('/internal/personal_access_token'),
+ params: {
+ secret_token: secret_token,
+ key_id: key.id,
+ name: 'newtoken',
+ scopes: %w(read_api read_repository),
+ expires_at: '9001-11-17'
+ }
+
+ expect(json_response['success']).to be_truthy
+ expect(json_response['token']).to match(/\A\S{20}\z/)
+ expect(json_response['scopes']).to match_array(%w(read_api read_repository))
+ expect(json_response['expires_at']).to eq('9001-11-17')
+ end
+ end
+
describe "POST /internal/lfs_authenticate" do
before do
project.add_developer(user)
@@ -321,6 +453,8 @@ RSpec.describe API::Internal::Base do
expect(json_response["status"]).to be_truthy
expect(json_response["gl_project_path"]).to eq(project.wiki.full_path)
expect(json_response["gl_repository"]).to eq("wiki-#{project.id}")
+ expect(json_response["gl_key_type"]).to eq("key")
+ expect(json_response["gl_key_id"]).to eq(key.id)
expect(user.reload.last_activity_on).to be_nil
end
@@ -444,6 +578,8 @@ RSpec.describe API::Internal::Base do
expect(json_response["status"]).to be_truthy
expect(json_response["gl_repository"]).to eq("project-#{project.id}")
expect(json_response["gl_project_path"]).to eq(project.full_path)
+ expect(json_response["gl_key_type"]).to eq("key")
+ expect(json_response["gl_key_id"]).to eq(key.id)
expect(json_response["gitaly"]).not_to be_nil
expect(json_response["gitaly"]["repository"]).not_to be_nil
expect(json_response["gitaly"]["repository"]["storage_name"]).to eq(project.repository.gitaly_repository.storage_name)
@@ -547,6 +683,7 @@ RSpec.describe API::Internal::Base do
}
}
end
+
let(:console_messages) { ['informational message'] }
let(:custom_action_result) { Gitlab::GitAccessResult::CustomAction.new(payload, console_messages) }
@@ -706,6 +843,8 @@ RSpec.describe API::Internal::Base do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response["status"]).to be_truthy
expect(json_response["gitaly"]).not_to be_nil
+ expect(json_response["gl_key_type"]).to eq("deploy_key")
+ expect(json_response["gl_key_id"]).to eq(key.id)
expect(json_response["gitaly"]["repository"]).not_to be_nil
expect(json_response["gitaly"]["repository"]["storage_name"]).to eq(project.repository.gitaly_repository.storage_name)
expect(json_response["gitaly"]["repository"]["relative_path"]).to eq(project.repository.gitaly_repository.relative_path)
diff --git a/spec/requests/api/internal/kubernetes_spec.rb b/spec/requests/api/internal/kubernetes_spec.rb
new file mode 100644
index 00000000000..555ca441fe7
--- /dev/null
+++ b/spec/requests/api/internal/kubernetes_spec.rb
@@ -0,0 +1,154 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Internal::Kubernetes do
+ describe "GET /internal/kubernetes/agent_info" do
+ context 'kubernetes_agent_internal_api feature flag disabled' do
+ before do
+ stub_feature_flags(kubernetes_agent_internal_api: false)
+ end
+
+ it 'returns 404' do
+ get api('/internal/kubernetes/agent_info')
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ it 'returns 403 if Authorization header not sent' do
+ get api('/internal/kubernetes/agent_info')
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ context 'an agent is found' do
+ let!(:agent_token) { create(:cluster_agent_token) }
+
+ let(:agent) { agent_token.agent }
+ let(:project) { agent.project }
+
+ it 'returns expected data', :aggregate_failures do
+ get api('/internal/kubernetes/agent_info'), headers: { 'Authorization' => "Bearer #{agent_token.token}" }
+
+ expect(response).to have_gitlab_http_status(:success)
+
+ expect(json_response).to match(
+ a_hash_including(
+ 'project_id' => project.id,
+ 'agent_id' => agent.id,
+ 'agent_name' => agent.name,
+ 'gitaly_info' => a_hash_including(
+ 'address' => match(/\.socket$/),
+ 'token' => 'secret',
+ 'features' => {}
+ ),
+ 'gitaly_repository' => a_hash_including(
+ 'storage_name' => project.repository_storage,
+ 'relative_path' => project.disk_path + '.git',
+ 'gl_repository' => "project-#{project.id}",
+ 'gl_project_path' => project.full_path
+ )
+ )
+ )
+ end
+ end
+
+ context 'no such agent exists' do
+ it 'returns 404' do
+ get api('/internal/kubernetes/agent_info'), headers: { 'Authorization' => 'Bearer ABCD' }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
+ describe 'GET /internal/kubernetes/project_info' do
+ context 'kubernetes_agent_internal_api feature flag disabled' do
+ before do
+ stub_feature_flags(kubernetes_agent_internal_api: false)
+ end
+
+ it 'returns 404' do
+ get api('/internal/kubernetes/project_info')
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ it 'returns 403 if Authorization header not sent' do
+ get api('/internal/kubernetes/project_info')
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ context 'no such agent exists' do
+ it 'returns 404' do
+ get api('/internal/kubernetes/project_info'), headers: { 'Authorization' => 'Bearer ABCD' }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'an agent is found' do
+ let!(:agent_token) { create(:cluster_agent_token) }
+
+ let(:agent) { agent_token.agent }
+
+ context 'project is public' do
+ let(:project) { create(:project, :public) }
+
+ it 'returns expected data', :aggregate_failures do
+ get api('/internal/kubernetes/project_info'), params: { id: project.id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" }
+
+ expect(response).to have_gitlab_http_status(:success)
+
+ expect(json_response).to match(
+ a_hash_including(
+ 'project_id' => project.id,
+ 'gitaly_info' => a_hash_including(
+ 'address' => match(/\.socket$/),
+ 'token' => 'secret',
+ 'features' => {}
+ ),
+ 'gitaly_repository' => a_hash_including(
+ 'storage_name' => project.repository_storage,
+ 'relative_path' => project.disk_path + '.git',
+ 'gl_repository' => "project-#{project.id}",
+ 'gl_project_path' => project.full_path
+ )
+ )
+ )
+ end
+ end
+
+ context 'project is private' do
+ let(:project) { create(:project, :private) }
+
+ it 'returns 404' do
+ get api('/internal/kubernetes/project_info'), params: { id: project.id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'project is internal' do
+ let(:project) { create(:project, :internal) }
+
+ it 'returns 404' do
+ get api('/internal/kubernetes/project_info'), params: { id: project.id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'project does not exist' do
+ it 'returns 404' do
+ get api('/internal/kubernetes/project_info'), params: { id: 0 }, headers: { 'Authorization' => "Bearer #{agent_token.token}" }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/internal/pages_spec.rb b/spec/requests/api/internal/pages_spec.rb
index 48fc95b6574..e58eba02132 100644
--- a/spec/requests/api/internal/pages_spec.rb
+++ b/spec/requests/api/internal/pages_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe API::Internal::Pages do
jwt_token = JWT.encode({ 'iss' => 'gitlab-pages' }, Gitlab::Pages.secret, 'HS256')
{ Gitlab::Pages::INTERNAL_API_REQUEST_HEADER => jwt_token }
end
+
let(:pages_secret) { SecureRandom.random_bytes(Gitlab::Pages::SECRET_LENGTH) }
before do
diff --git a/spec/requests/api/issues/get_group_issues_spec.rb b/spec/requests/api/issues/get_group_issues_spec.rb
index b53fac3679d..b0fbf3bf66d 100644
--- a/spec/requests/api/issues/get_group_issues_spec.rb
+++ b/spec/requests/api/issues/get_group_issues_spec.rb
@@ -36,6 +36,7 @@ RSpec.describe API::Issues do
updated_at: 3.hours.ago,
created_at: 1.day.ago
end
+
let!(:group_confidential_issue) do
create :issue,
:confidential,
@@ -45,6 +46,7 @@ RSpec.describe API::Issues do
updated_at: 2.hours.ago,
created_at: 2.days.ago
end
+
let!(:group_issue) do
create :issue,
author: user,
@@ -56,14 +58,17 @@ RSpec.describe API::Issues do
description: issue_description,
created_at: 5.days.ago
end
+
let!(:group_label) do
create(:label, title: 'group_lbl', color: '#FFAABB', project: group_project)
end
+
let!(:group_label_link) { create(:label_link, label: group_label, target: group_issue) }
let!(:group_milestone) { create(:milestone, title: '3.0.0', project: group_project) }
let!(:group_empty_milestone) do
create(:milestone, title: '4.0.0', project: group_project)
end
+
let!(:group_note) { create(:note_on_issue, author: user, project: group_project, noteable: group_issue) }
let(:base_url) { "/groups/#{group.id}/issues" }
@@ -246,6 +251,7 @@ RSpec.describe API::Issues do
target_project: private_mrs_project,
description: "closes #{group_issue.to_reference(private_mrs_project)}")
end
+
let!(:merge_request2) do
create(:merge_request,
:simple,
diff --git a/spec/requests/api/issues/get_project_issues_spec.rb b/spec/requests/api/issues/get_project_issues_spec.rb
index 7ff07bf580d..4228ca2d5fd 100644
--- a/spec/requests/api/issues/get_project_issues_spec.rb
+++ b/spec/requests/api/issues/get_project_issues_spec.rb
@@ -28,6 +28,7 @@ RSpec.describe API::Issues do
updated_at: 3.hours.ago,
closed_at: 1.hour.ago
end
+
let!(:confidential_issue) do
create :issue,
:confidential,
@@ -37,6 +38,7 @@ RSpec.describe API::Issues do
created_at: generate(:past_time),
updated_at: 2.hours.ago
end
+
let!(:issue) do
create :issue,
author: user,
@@ -48,6 +50,7 @@ RSpec.describe API::Issues do
title: issue_title,
description: issue_description
end
+
let_it_be(:label) do
create(:label, title: 'label', color: '#FFAABB', project: project)
end
@@ -69,6 +72,7 @@ RSpec.describe API::Issues do
target_project: project,
description: "closes #{issue.to_reference}")
end
+
let!(:merge_request2) do
create(:merge_request,
:simple,
@@ -180,12 +184,15 @@ RSpec.describe API::Issues do
it 'avoids N+1 queries' do
get api("/projects/#{project.id}/issues", user)
- create_list(:issue, 3, project: project)
+ create_list(:issue, 3, project: project, closed_by: user)
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/issues", user)
end.count
+ milestone = create(:milestone, project: project)
+ create(:issue, project: project, milestone: milestone, closed_by: create(:user))
+
expect do
get api("/projects/#{project.id}/issues", user)
end.not_to exceed_all_query_limit(control_count)
diff --git a/spec/requests/api/issues/issues_spec.rb b/spec/requests/api/issues/issues_spec.rb
index 519bea22501..b638a65d65e 100644
--- a/spec/requests/api/issues/issues_spec.rb
+++ b/spec/requests/api/issues/issues_spec.rb
@@ -28,6 +28,7 @@ RSpec.describe API::Issues do
updated_at: 3.hours.ago,
closed_at: 1.hour.ago
end
+
let!(:confidential_issue) do
create :issue,
:confidential,
@@ -37,6 +38,7 @@ RSpec.describe API::Issues do
created_at: generate(:past_time),
updated_at: 2.hours.ago
end
+
let!(:issue) do
create :issue,
author: user,
@@ -48,6 +50,7 @@ RSpec.describe API::Issues do
title: issue_title,
description: issue_description
end
+
let_it_be(:label) do
create(:label, title: 'label', color: '#FFAABB', project: project)
end
@@ -384,6 +387,60 @@ RSpec.describe API::Issues do
end
end
+ context 'filtering by due date' do
+ # This date chosen because it is the beginning of a week + near the beginning of a month
+ let_it_be(:frozen_time) { DateTime.parse('2020-08-03 12:00') }
+
+ let_it_be(:issue2) { create(:issue, project: project, author: user, due_date: frozen_time + 3.days) }
+ let_it_be(:issue3) { create(:issue, project: project, author: user, due_date: frozen_time + 10.days) }
+ let_it_be(:issue4) { create(:issue, project: project, author: user, due_date: frozen_time + 34.days) }
+ let_it_be(:issue5) { create(:issue, project: project, author: user, due_date: frozen_time - 8.days) }
+
+ before do
+ travel_to(frozen_time)
+ end
+
+ after do
+ travel_back
+ end
+
+ it 'returns them all when argument is empty' do
+ get api('/issues?due_date=', user)
+
+ expect_paginated_array_response(issue5.id, issue4.id, issue3.id, issue2.id, issue.id, closed_issue.id)
+ end
+
+ it 'returns issues without due date' do
+ get api('/issues?due_date=0', user)
+
+ expect_paginated_array_response(issue.id, closed_issue.id)
+ end
+
+ it 'returns issues due for this week' do
+ get api('/issues?due_date=week', user)
+
+ expect_paginated_array_response(issue2.id)
+ end
+
+ it 'returns issues due for this month' do
+ get api('/issues?due_date=month', user)
+
+ expect_paginated_array_response(issue3.id, issue2.id)
+ end
+
+ it 'returns issues that are due previous two weeks and next month' do
+ get api('/issues?due_date=next_month_and_previous_two_weeks', user)
+
+ expect_paginated_array_response(issue5.id, issue4.id, issue3.id, issue2.id)
+ end
+
+ it 'returns issues that are overdue' do
+ get api('/issues?due_date=overdue', user)
+
+ expect_paginated_array_response(issue5.id)
+ end
+ end
+
context 'filter by labels or label_name param' do
context 'N+1' do
let(:label_b) { create(:label, title: 'foo', project: project) }
@@ -807,6 +864,7 @@ RSpec.describe API::Issues do
target_project: private_mrs_project,
description: "closes #{issue.to_reference(private_mrs_project)}")
end
+
let!(:merge_request2) do
create(:merge_request,
:simple,
diff --git a/spec/requests/api/issues/post_projects_issues_spec.rb b/spec/requests/api/issues/post_projects_issues_spec.rb
index e2f1bb2cd1a..a7fe4d4509a 100644
--- a/spec/requests/api/issues/post_projects_issues_spec.rb
+++ b/spec/requests/api/issues/post_projects_issues_spec.rb
@@ -27,6 +27,7 @@ RSpec.describe API::Issues do
updated_at: 3.hours.ago,
closed_at: 1.hour.ago
end
+
let!(:confidential_issue) do
create :issue,
:confidential,
@@ -36,6 +37,7 @@ RSpec.describe API::Issues do
created_at: generate(:past_time),
updated_at: 2.hours.ago
end
+
let!(:issue) do
create :issue,
author: user,
@@ -47,6 +49,7 @@ RSpec.describe API::Issues do
title: issue_title,
description: issue_description
end
+
let_it_be(:label) do
create(:label, title: 'label', color: '#FFAABB', project: project)
end
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index 53c57931d36..77d5a4f26a8 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -239,6 +239,18 @@ RSpec.describe API::Jobs do
end
end
+ context 'when config source not ci' do
+ let(:non_ci_config_source) { ::Ci::PipelineEnums.non_ci_config_source_values.first }
+ let(:pipeline) do
+ create(:ci_pipeline, config_source: non_ci_config_source, project: project)
+ end
+
+ it 'returns the specified pipeline' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response[0]['pipeline']['sha']).to eq(pipeline.sha.to_s)
+ end
+ end
+
it 'avoids N+1 queries' do
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), params: query
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 68f1a0f1ba1..d4c05b4b198 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -1551,25 +1551,33 @@ RSpec.describe API::MergeRequests do
it "returns 422 when source_branch equals target_branch" do
post api("/projects/#{project.id}/merge_requests", user),
params: { title: "Test merge_request", source_branch: "master", target_branch: "master", author: user }
+
expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response['message']).to eq(["You can't use same project/branch for source and target"])
end
it "returns 400 when source_branch is missing" do
post api("/projects/#{project.id}/merge_requests", user),
params: { title: "Test merge_request", target_branch: "master", author: user }
+
expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('source_branch is missing')
end
it "returns 400 when target_branch is missing" do
post api("/projects/#{project.id}/merge_requests", user),
params: { title: "Test merge_request", source_branch: "markdown", author: user }
+
expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('target_branch is missing')
end
it "returns 400 when title is missing" do
post api("/projects/#{project.id}/merge_requests", user),
params: { target_branch: 'master', source_branch: 'markdown' }
+
expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('title is missing')
end
context 'with existing MR' do
@@ -1594,7 +1602,9 @@ RSpec.describe API::MergeRequests do
author: user
}
end.to change { MergeRequest.count }.by(0)
+
expect(response).to have_gitlab_http_status(:conflict)
+ expect(json_response['message']).to eq(["Another open merge request already exists for this source branch: !5"])
end
end
diff --git a/spec/requests/api/notes_spec.rb b/spec/requests/api/notes_spec.rb
index 1510d31a1a6..ca4ebd3689f 100644
--- a/spec/requests/api/notes_spec.rb
+++ b/spec/requests/api/notes_spec.rb
@@ -55,6 +55,7 @@ RSpec.describe API::Notes do
create(:project, namespace: private_user.namespace)
.tap { |p| p.add_maintainer(private_user) }
end
+
let(:private_issue) { create(:issue, project: private_project) }
let(:ext_proj) { create(:project, :public) }
diff --git a/spec/requests/api/notification_settings_spec.rb b/spec/requests/api/notification_settings_spec.rb
index 73cb4948524..7b4a58e63da 100644
--- a/spec/requests/api/notification_settings_spec.rb
+++ b/spec/requests/api/notification_settings_spec.rb
@@ -70,12 +70,13 @@ RSpec.describe API::NotificationSettings do
describe "PUT /projects/:id/notification_settings" do
it "updates project level notification settings for the current user" do
- put api("/projects/#{project.id}/notification_settings", user), params: { level: 'custom', new_note: true }
+ put api("/projects/#{project.id}/notification_settings", user), params: { level: 'custom', new_note: true, moved_project: true }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['level']).to eq(user.reload.notification_settings_for(project).level)
expect(json_response['events']['new_note']).to be_truthy
expect(json_response['events']['new_issue']).to be_falsey
+ expect(json_response['events']['moved_project']).to be_truthy
end
end
diff --git a/spec/requests/api/npm_packages_spec.rb b/spec/requests/api/npm_packages_spec.rb
index 98a1ca978a8..94647123df0 100644
--- a/spec/requests/api/npm_packages_spec.rb
+++ b/spec/requests/api/npm_packages_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe API::NpmPackages do
include PackagesManagerApiSpecHelpers
+ include HttpBasicAuthHelpers
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
@@ -407,39 +408,37 @@ RSpec.describe API::NpmPackages do
subject { get api(url) }
- context 'without the need for a license' do
- context 'with public project' do
- context 'with authenticated user' do
- subject { get api(url, personal_access_token: personal_access_token) }
+ context 'with public project' do
+ context 'with authenticated user' do
+ subject { get api(url, personal_access_token: personal_access_token) }
- it_behaves_like 'returns package tags', :maintainer
- it_behaves_like 'returns package tags', :developer
- it_behaves_like 'returns package tags', :reporter
- it_behaves_like 'returns package tags', :guest
- end
+ it_behaves_like 'returns package tags', :maintainer
+ it_behaves_like 'returns package tags', :developer
+ it_behaves_like 'returns package tags', :reporter
+ it_behaves_like 'returns package tags', :guest
+ end
- context 'with unauthenticated user' do
- it_behaves_like 'returns package tags', :no_type
- end
+ context 'with unauthenticated user' do
+ it_behaves_like 'returns package tags', :no_type
end
+ end
- context 'with private project' do
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
- end
+ context 'with private project' do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ end
- context 'with authenticated user' do
- subject { get api(url, personal_access_token: personal_access_token) }
+ context 'with authenticated user' do
+ subject { get api(url, personal_access_token: personal_access_token) }
- it_behaves_like 'returns package tags', :maintainer
- it_behaves_like 'returns package tags', :developer
- it_behaves_like 'returns package tags', :reporter
- it_behaves_like 'rejects package tags access', :guest, :forbidden
- end
+ it_behaves_like 'returns package tags', :maintainer
+ it_behaves_like 'returns package tags', :developer
+ it_behaves_like 'returns package tags', :reporter
+ it_behaves_like 'rejects package tags access', :guest, :forbidden
+ end
- context 'with unauthenticated user' do
- it_behaves_like 'rejects package tags access', :no_type, :forbidden
- end
+ context 'with unauthenticated user' do
+ it_behaves_like 'rejects package tags access', :no_type, :forbidden
end
end
end
@@ -453,39 +452,37 @@ RSpec.describe API::NpmPackages do
subject { put api(url), env: { 'api.request.body': version } }
- context 'without the need for a license' do
- context 'with public project' do
- context 'with authenticated user' do
- subject { put api(url, personal_access_token: personal_access_token), env: { 'api.request.body': version } }
+ context 'with public project' do
+ context 'with authenticated user' do
+ subject { put api(url, personal_access_token: personal_access_token), env: { 'api.request.body': version } }
- it_behaves_like 'create package tag', :maintainer
- it_behaves_like 'create package tag', :developer
- it_behaves_like 'rejects package tags access', :reporter, :forbidden
- it_behaves_like 'rejects package tags access', :guest, :forbidden
- end
+ it_behaves_like 'create package tag', :maintainer
+ it_behaves_like 'create package tag', :developer
+ it_behaves_like 'rejects package tags access', :reporter, :forbidden
+ it_behaves_like 'rejects package tags access', :guest, :forbidden
+ end
- context 'with unauthenticated user' do
- it_behaves_like 'rejects package tags access', :no_type, :unauthorized
- end
+ context 'with unauthenticated user' do
+ it_behaves_like 'rejects package tags access', :no_type, :unauthorized
end
+ end
- context 'with private project' do
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
- end
+ context 'with private project' do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ end
- context 'with authenticated user' do
- subject { put api(url, personal_access_token: personal_access_token), env: { 'api.request.body': version } }
+ context 'with authenticated user' do
+ subject { put api(url, personal_access_token: personal_access_token), env: { 'api.request.body': version } }
- it_behaves_like 'create package tag', :maintainer
- it_behaves_like 'create package tag', :developer
- it_behaves_like 'rejects package tags access', :reporter, :forbidden
- it_behaves_like 'rejects package tags access', :guest, :forbidden
- end
+ it_behaves_like 'create package tag', :maintainer
+ it_behaves_like 'create package tag', :developer
+ it_behaves_like 'rejects package tags access', :reporter, :forbidden
+ it_behaves_like 'rejects package tags access', :guest, :forbidden
+ end
- context 'with unauthenticated user' do
- it_behaves_like 'rejects package tags access', :no_type, :unauthorized
- end
+ context 'with unauthenticated user' do
+ it_behaves_like 'rejects package tags access', :no_type, :unauthorized
end
end
end
@@ -499,39 +496,37 @@ RSpec.describe API::NpmPackages do
subject { delete api(url) }
- context 'without the need for a license' do
- context 'with public project' do
- context 'with authenticated user' do
- subject { delete api(url, personal_access_token: personal_access_token) }
+ context 'with public project' do
+ context 'with authenticated user' do
+ subject { delete api(url, personal_access_token: personal_access_token) }
- it_behaves_like 'delete package tag', :maintainer
- it_behaves_like 'rejects package tags access', :developer, :forbidden
- it_behaves_like 'rejects package tags access', :reporter, :forbidden
- it_behaves_like 'rejects package tags access', :guest, :forbidden
- end
+ it_behaves_like 'delete package tag', :maintainer
+ it_behaves_like 'rejects package tags access', :developer, :forbidden
+ it_behaves_like 'rejects package tags access', :reporter, :forbidden
+ it_behaves_like 'rejects package tags access', :guest, :forbidden
+ end
- context 'with unauthenticated user' do
- it_behaves_like 'rejects package tags access', :no_type, :unauthorized
- end
+ context 'with unauthenticated user' do
+ it_behaves_like 'rejects package tags access', :no_type, :unauthorized
end
+ end
- context 'with private project' do
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
- end
+ context 'with private project' do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ end
- context 'with authenticated user' do
- subject { delete api(url, personal_access_token: personal_access_token) }
+ context 'with authenticated user' do
+ subject { delete api(url, personal_access_token: personal_access_token) }
- it_behaves_like 'delete package tag', :maintainer
- it_behaves_like 'rejects package tags access', :developer, :forbidden
- it_behaves_like 'rejects package tags access', :reporter, :forbidden
- it_behaves_like 'rejects package tags access', :guest, :forbidden
- end
+ it_behaves_like 'delete package tag', :maintainer
+ it_behaves_like 'rejects package tags access', :developer, :forbidden
+ it_behaves_like 'rejects package tags access', :reporter, :forbidden
+ it_behaves_like 'rejects package tags access', :guest, :forbidden
+ end
- context 'with unauthenticated user' do
- it_behaves_like 'rejects package tags access', :no_type, :unauthorized
- end
+ context 'with unauthenticated user' do
+ it_behaves_like 'rejects package tags access', :no_type, :unauthorized
end
end
end
diff --git a/spec/requests/api/nuget_packages_spec.rb b/spec/requests/api/nuget_packages_spec.rb
index 43aa65d1f76..ab537a61058 100644
--- a/spec/requests/api/nuget_packages_spec.rb
+++ b/spec/requests/api/nuget_packages_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe API::NugetPackages do
include WorkhorseHelpers
include PackagesManagerApiSpecHelpers
+ include HttpBasicAuthHelpers
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :public) }
@@ -20,38 +21,76 @@ RSpec.describe API::NugetPackages do
context 'with valid project' do
using RSpec::Parameterized::TableSyntax
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'process nuget service index request' | :success
- 'PUBLIC' | :guest | true | true | 'process nuget service index request' | :success
- 'PUBLIC' | :developer | true | false | 'process nuget service index request' | :success
- 'PUBLIC' | :guest | true | false | 'process nuget service index request' | :success
- 'PUBLIC' | :developer | false | true | 'process nuget service index request' | :success
- 'PUBLIC' | :guest | false | true | 'process nuget service index request' | :success
- 'PUBLIC' | :developer | false | false | 'process nuget service index request' | :success
- 'PUBLIC' | :guest | false | false | 'process nuget service index request' | :success
- 'PUBLIC' | :anonymous | false | true | 'process nuget service index request' | :success
- 'PRIVATE' | :developer | true | true | 'process nuget service index request' | :success
- 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
- 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
- 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
- end
+ context 'personal token' do
+ where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :guest | true | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :developer | true | false | 'process nuget service index request' | :success
+ 'PUBLIC' | :guest | true | false | 'process nuget service index request' | :success
+ 'PUBLIC' | :developer | false | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :guest | false | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :developer | false | false | 'process nuget service index request' | :success
+ 'PUBLIC' | :guest | false | false | 'process nuget service index request' | :success
+ 'PUBLIC' | :anonymous | false | true | 'process nuget service index request' | :success
+ 'PRIVATE' | :developer | true | true | 'process nuget service index request' | :success
+ 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
+ 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
+ end
- with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ with_them do
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
- subject { get api(url), headers: headers }
+ subject { get api(url), headers: headers }
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
+ end
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ context 'with job token' do
+ where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :guest | true | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :developer | false | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :guest | false | true | 'process nuget service index request' | :success
+ 'PUBLIC' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PUBLIC' | :anonymous | false | true | 'process nuget service index request' | :success
+ 'PRIVATE' | :developer | true | true | 'process nuget service index request' | :success
+ 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
+ 'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :guest | false | true | 'rejects nuget packages access' | :not_found
+ 'PRIVATE' | :developer | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'rejects nuget packages access' | :unauthorized
+ end
+
+ with_them do
+ let(:job) { user_token ? create(:ci_build, project: project, user: user) : double(token: 'wrong') }
+ let(:headers) { user_role == :anonymous ? {} : job_basic_auth_header(job) }
+
+ subject { get api(url), headers: headers }
+
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
+ end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ end
end
end
@@ -98,7 +137,7 @@ RSpec.describe API::NugetPackages do
with_them do
let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:user_headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ let(:user_headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
let(:headers) { user_headers.merge(workhorse_header) }
before do
@@ -165,7 +204,7 @@ RSpec.describe API::NugetPackages do
with_them do
let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:user_headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ let(:user_headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
let(:headers) { user_headers.merge(workhorse_header) }
before do
@@ -225,7 +264,7 @@ RSpec.describe API::NugetPackages do
with_them do
let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
subject { get api(url), headers: headers }
@@ -286,7 +325,7 @@ RSpec.describe API::NugetPackages do
with_them do
let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
subject { get api(url), headers: headers }
@@ -342,7 +381,7 @@ RSpec.describe API::NugetPackages do
with_them do
let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
subject { get api(url), headers: headers }
@@ -397,7 +436,7 @@ RSpec.describe API::NugetPackages do
with_them do
let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
subject { get api(url), headers: headers }
@@ -460,7 +499,7 @@ RSpec.describe API::NugetPackages do
with_them do
let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
subject { get api(url), headers: headers }
diff --git a/spec/requests/api/pages_domains_spec.rb b/spec/requests/api/pages_domains_spec.rb
index b6838a39257..75183156c9d 100644
--- a/spec/requests/api/pages_domains_spec.rb
+++ b/spec/requests/api/pages_domains_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe API::PagesDomains do
build(:pages_domain, :without_key, :without_certificate, domain: 'www.other-domain.test', auto_ssl_enabled: true)
.slice(:domain, :auto_ssl_enabled)
end
+
let(:pages_domain_secure_params) { build(:pages_domain, domain: 'ssl.other-domain.test', project: project).slice(:domain, :certificate, :key) }
let(:pages_domain_secure_key_missmatch_params) {build(:pages_domain, :with_trusted_chain, project: project).slice(:domain, :certificate, :key) }
let(:pages_domain_secure_missing_chain_params) {build(:pages_domain, :with_missing_chain, project: project).slice(:certificate) }
diff --git a/spec/requests/api/performance_bar_spec.rb b/spec/requests/api/performance_bar_spec.rb
new file mode 100644
index 00000000000..a4dbb3d17b8
--- /dev/null
+++ b/spec/requests/api/performance_bar_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Performance Bar for API requests', :request_store, :clean_gitlab_redis_cache do
+ context 'with user that has access to the performance bar' do
+ let_it_be(:admin) { create(:admin) }
+
+ context 'when cookie is set to true' do
+ before do
+ cookies[:perf_bar_enabled] = 'true'
+ end
+
+ it 'stores performance data' do
+ get api("/users/#{admin.id}", admin)
+
+ expect(Peek.adapter.get(headers['X-Request-Id'])).not_to be_empty
+ end
+ end
+
+ context 'when cookie is missing' do
+ it 'does not store performance data' do
+ get api("/users/#{admin.id}", admin)
+
+ expect(Peek.adapter.get(headers['X-Request-Id'])).to be_nil
+ end
+ end
+ end
+
+ context 'with user that does not have access to the performance bar' do
+ let(:user) { create(:user) }
+
+ it 'does not store performance data' do
+ cookies[:perf_bar_enabled] = 'true'
+
+ get api("/users/#{user.id}", user)
+
+ expect(Peek.adapter.get(headers['X-Request-Id'])).to be_nil
+ end
+ end
+end
diff --git a/spec/requests/api/project_export_spec.rb b/spec/requests/api/project_export_spec.rb
index d7ba3b4e158..09d295afbea 100644
--- a/spec/requests/api/project_export_spec.rb
+++ b/spec/requests/api/project_export_spec.rb
@@ -338,6 +338,16 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache do
end
context 'with download strategy' do
+ before do
+ Grape::Endpoint.before_each do |endpoint|
+ allow(endpoint).to receive(:user_project).and_return(project)
+ end
+ end
+
+ after do
+ Grape::Endpoint.before_each nil
+ end
+
it 'starts' do
expect_any_instance_of(Gitlab::ImportExport::AfterExportStrategies::WebUploadStrategy).not_to receive(:send_file)
@@ -345,6 +355,12 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache do
expect(response).to have_gitlab_http_status(:accepted)
end
+
+ it 'removes previously exported archive file' do
+ expect(project).to receive(:remove_exports).once
+
+ post api(path, user)
+ end
end
end
diff --git a/spec/requests/api/project_hooks_spec.rb b/spec/requests/api/project_hooks_spec.rb
index 8ab90e26a51..3b2a7895630 100644
--- a/spec/requests/api/project_hooks_spec.rb
+++ b/spec/requests/api/project_hooks_spec.rb
@@ -40,6 +40,7 @@ RSpec.describe API::ProjectHooks, 'ProjectHooks' do
expect(json_response.first['job_events']).to eq(true)
expect(json_response.first['pipeline_events']).to eq(true)
expect(json_response.first['wiki_page_events']).to eq(true)
+ expect(json_response.first['deployment_events']).to eq(true)
expect(json_response.first['enable_ssl_verification']).to eq(true)
expect(json_response.first['push_events_branch_filter']).to eq('master')
end
@@ -71,6 +72,7 @@ RSpec.describe API::ProjectHooks, 'ProjectHooks' do
expect(json_response['job_events']).to eq(hook.job_events)
expect(json_response['pipeline_events']).to eq(hook.pipeline_events)
expect(json_response['wiki_page_events']).to eq(hook.wiki_page_events)
+ expect(json_response['deployment_events']).to eq(true)
expect(json_response['enable_ssl_verification']).to eq(hook.enable_ssl_verification)
end
@@ -92,8 +94,11 @@ RSpec.describe API::ProjectHooks, 'ProjectHooks' do
describe "POST /projects/:id/hooks" do
it "adds hook to project" do
expect do
- post api("/projects/#{project.id}/hooks", user),
- params: { url: "http://example.com", issues_events: true, confidential_issues_events: true, wiki_page_events: true, job_events: true, push_events_branch_filter: 'some-feature-branch' }
+ post(api("/projects/#{project.id}/hooks", user),
+ params: { url: "http://example.com", issues_events: true,
+ confidential_issues_events: true, wiki_page_events: true,
+ job_events: true, deployment_events: true,
+ push_events_branch_filter: 'some-feature-branch' })
end.to change {project.hooks.count}.by(1)
expect(response).to have_gitlab_http_status(:created)
@@ -108,6 +113,7 @@ RSpec.describe API::ProjectHooks, 'ProjectHooks' do
expect(json_response['job_events']).to eq(true)
expect(json_response['pipeline_events']).to eq(false)
expect(json_response['wiki_page_events']).to eq(true)
+ expect(json_response['deployment_events']).to eq(true)
expect(json_response['enable_ssl_verification']).to eq(true)
expect(json_response['push_events_branch_filter']).to eq('some-feature-branch')
expect(json_response).not_to include('token')
diff --git a/spec/requests/api/project_milestones_spec.rb b/spec/requests/api/project_milestones_spec.rb
index b238949ce47..d1e5df66b3f 100644
--- a/spec/requests/api/project_milestones_spec.rb
+++ b/spec/requests/api/project_milestones_spec.rb
@@ -3,17 +3,68 @@
require 'spec_helper'
RSpec.describe API::ProjectMilestones do
- let(:user) { create(:user) }
- let!(:project) { create(:project, namespace: user.namespace ) }
- let!(:closed_milestone) { create(:closed_milestone, project: project, title: 'version1', description: 'closed milestone') }
- let!(:milestone) { create(:milestone, project: project, title: 'version2', description: 'open milestone') }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, namespace: user.namespace ) }
+ let_it_be(:closed_milestone) { create(:closed_milestone, project: project, title: 'version1', description: 'closed milestone') }
+ let_it_be(:milestone) { create(:milestone, project: project, title: 'version2', description: 'open milestone') }
+ let_it_be(:route) { "/projects/#{project.id}/milestones" }
before do
project.add_developer(user)
end
- it_behaves_like 'group and project milestones', "/projects/:id/milestones" do
- let(:route) { "/projects/#{project.id}/milestones" }
+ it_behaves_like 'group and project milestones', "/projects/:id/milestones"
+
+ describe 'GET /projects/:id/milestones' do
+ context 'when include_parent_milestones is true' do
+ let_it_be(:ancestor_group) { create(:group, :private) }
+ let_it_be(:group) { create(:group, :private, parent: ancestor_group) }
+ let_it_be(:ancestor_group_milestone) { create(:milestone, group: ancestor_group) }
+ let_it_be(:group_milestone) { create(:milestone, group: group) }
+ let(:params) { { include_parent_milestones: true } }
+
+ shared_examples 'listing all milestones' do
+ it 'returns correct list of milestones' do
+ get api(route, user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.size).to eq(milestones.size)
+ expect(json_response.map { |entry| entry["id"] }).to eq(milestones.map(&:id))
+ end
+ end
+
+ context 'when project parent is a namespace' do
+ it_behaves_like 'listing all milestones' do
+ let(:milestones) { [milestone, closed_milestone] }
+ end
+ end
+
+ context 'when project parent is a group' do
+ let(:milestones) { [group_milestone, ancestor_group_milestone, milestone, closed_milestone] }
+
+ before_all do
+ project.update(namespace: group)
+ end
+
+ it_behaves_like 'listing all milestones'
+
+ context 'when iids param is present' do
+ let(:params) { { include_parent_milestones: true, iids: [group_milestone.iid] } }
+
+ it_behaves_like 'listing all milestones'
+ end
+
+ context 'when user is not a member of the private project' do
+ let(:external_user) { create(:user) }
+
+ it 'returns a 404 error' do
+ get api(route, external_user), params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
end
describe 'DELETE /projects/:id/milestones/:milestone_id' do
@@ -45,10 +96,11 @@ RSpec.describe API::ProjectMilestones do
describe 'PUT /projects/:id/milestones/:milestone_id to test observer on close' do
it 'creates an activity event when a milestone is closed' do
- expect(Event).to receive(:create!)
+ path = "/projects/#{project.id}/milestones/#{milestone.id}"
- put api("/projects/#{project.id}/milestones/#{milestone.id}", user),
- params: { state_event: 'close' }
+ expect do
+ put api(path, user), params: { state_event: 'close' }
+ end.to change(Event, :count).by(1)
end
end
diff --git a/spec/requests/api/project_snippets_spec.rb b/spec/requests/api/project_snippets_spec.rb
index fbb0e3e109f..9b876edae24 100644
--- a/spec/requests/api/project_snippets_spec.rb
+++ b/spec/requests/api/project_snippets_spec.rb
@@ -123,16 +123,19 @@ RSpec.describe API::ProjectSnippets do
end
describe 'POST /projects/:project_id/snippets/' do
- let(:params) do
+ let(:base_params) do
{
title: 'Test Title',
- file_name: 'test.rb',
description: 'test description',
- content: 'puts "hello world"',
visibility: 'public'
}
end
+ let(:file_path) { 'file_1.rb' }
+ let(:file_content) { 'puts "hello world"' }
+ let(:params) { base_params.merge(file_params) }
+ let(:file_params) { { files: [{ file_path: file_path, content: file_content }] } }
+
shared_examples 'project snippet repository actions' do
let(:snippet) { ProjectSnippet.find(json_response['id']) }
@@ -145,9 +148,9 @@ RSpec.describe API::ProjectSnippets do
it 'commit the files to the repository' do
subject
- blob = snippet.repository.blob_at('master', params[:file_name])
+ blob = snippet.repository.blob_at('master', file_path)
- expect(blob.data).to eq params[:content]
+ expect(blob.data).to eq file_content
end
end
@@ -184,63 +187,60 @@ RSpec.describe API::ProjectSnippets do
params['visibility'] = 'internal'
end
+ subject { post api("/projects/#{project.id}/snippets/", user), params: params }
+
it 'creates a new snippet' do
- post api("/projects/#{project.id}/snippets/", user), params: params
+ subject
expect(response).to have_gitlab_http_status(:created)
snippet = ProjectSnippet.find(json_response['id'])
- expect(snippet.content).to eq(params[:content])
+ expect(snippet.content).to eq(file_content)
expect(snippet.description).to eq(params[:description])
expect(snippet.title).to eq(params[:title])
- expect(snippet.file_name).to eq(params[:file_name])
+ expect(snippet.file_name).to eq(file_path)
expect(snippet.visibility_level).to eq(Snippet::INTERNAL)
end
- it_behaves_like 'project snippet repository actions' do
- subject { post api("/projects/#{project.id}/snippets/", user), params: params }
- end
+ it_behaves_like 'project snippet repository actions'
end
- it 'creates a new snippet' do
- post api("/projects/#{project.id}/snippets/", admin), params: params
+ context 'with an admin' do
+ subject { post api("/projects/#{project.id}/snippets/", admin), params: params }
- expect(response).to have_gitlab_http_status(:created)
- snippet = ProjectSnippet.find(json_response['id'])
- expect(snippet.content).to eq(params[:content])
- expect(snippet.description).to eq(params[:description])
- expect(snippet.title).to eq(params[:title])
- expect(snippet.file_name).to eq(params[:file_name])
- expect(snippet.visibility_level).to eq(Snippet::PUBLIC)
- end
+ it 'creates a new snippet' do
+ subject
- it_behaves_like 'project snippet repository actions' do
- subject { post api("/projects/#{project.id}/snippets/", admin), params: params }
- end
+ expect(response).to have_gitlab_http_status(:created)
+ snippet = ProjectSnippet.find(json_response['id'])
+ expect(snippet.content).to eq(file_content)
+ expect(snippet.description).to eq(params[:description])
+ expect(snippet.title).to eq(params[:title])
+ expect(snippet.file_name).to eq(file_path)
+ expect(snippet.visibility_level).to eq(Snippet::PUBLIC)
+ end
- it 'returns 400 for missing parameters' do
- params.delete(:title)
+ it_behaves_like 'project snippet repository actions'
- post api("/projects/#{project.id}/snippets/", admin), params: params
+ it 'returns 400 for missing parameters' do
+ params.delete(:title)
- expect(response).to have_gitlab_http_status(:bad_request)
- end
+ subject
- it 'returns 400 if content is blank' do
- params[:content] = ''
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
- post api("/projects/#{project.id}/snippets/", admin), params: params
+ it_behaves_like 'snippet creation with files parameter'
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['error']).to eq 'content is empty'
- end
+ it_behaves_like 'snippet creation without files parameter'
- it 'returns 400 if title is blank' do
- params[:title] = ''
+ it 'returns 400 if title is blank' do
+ params[:title] = ''
- post api("/projects/#{project.id}/snippets/", admin), params: params
+ subject
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['error']).to eq 'title is empty'
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq 'title is empty'
+ end
end
context 'when save fails because the repository could not be created' do
diff --git a/spec/requests/api/project_templates_spec.rb b/spec/requests/api/project_templates_spec.rb
index 59b2b09f0bf..d242d49fc1b 100644
--- a/spec/requests/api/project_templates_spec.rb
+++ b/spec/requests/api/project_templates_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe API::ProjectTemplates do
- let_it_be(:public_project) { create(:project, :public, path: 'path.with.dot') }
- let_it_be(:private_project) { create(:project, :private) }
+ let_it_be(:public_project) { create(:project, :public, :repository, create_templates: :merge_request, path: 'path.with.dot') }
+ let_it_be(:private_project) { create(:project, :private, :repository, create_templates: :issue) }
let_it_be(:developer) { create(:user) }
let(:url_encoded_path) { "#{public_project.namespace.path}%2F#{public_project.path}" }
@@ -62,6 +62,33 @@ RSpec.describe API::ProjectTemplates do
expect(json_response).to satisfy_one { |template| template['key'] == 'mit' }
end
+ it 'returns metrics_dashboard_ymls' do
+ get api("/projects/#{public_project.id}/templates/metrics_dashboard_ymls")
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(response).to match_response_schema('public_api/v4/template_list')
+ expect(json_response).to satisfy_one { |template| template['key'] == 'Default' }
+ end
+
+ it 'returns issue templates' do
+ get api("/projects/#{private_project.id}/templates/issues", developer)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(response).to match_response_schema('public_api/v4/template_list')
+ expect(json_response.map {|t| t['key']}).to match_array(%w(bug feature_proposal template_test))
+ end
+
+ it 'returns merge request templates' do
+ get api("/projects/#{public_project.id}/templates/merge_requests")
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(response).to match_response_schema('public_api/v4/template_list')
+ expect(json_response.map {|t| t['key']}).to match_array(%w(bug feature_proposal template_test))
+ end
+
it 'returns 400 for an unknown template type' do
get api("/projects/#{public_project.id}/templates/unknown")
@@ -136,6 +163,14 @@ RSpec.describe API::ProjectTemplates do
expect(json_response['name']).to eq('Android')
end
+ it 'returns a specific metrics_dashboard_yml' do
+ get api("/projects/#{public_project.id}/templates/metrics_dashboard_ymls/Default")
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/template')
+ expect(json_response['name']).to eq('Default')
+ end
+
it 'returns a specific license' do
get api("/projects/#{public_project.id}/templates/licenses/mit")
@@ -143,12 +178,42 @@ RSpec.describe API::ProjectTemplates do
expect(response).to match_response_schema('public_api/v4/license')
end
+ it 'returns a specific issue template' do
+ get api("/projects/#{private_project.id}/templates/issues/bug", developer)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/template')
+ expect(json_response['name']).to eq('bug')
+ expect(json_response['content']).to eq('something valid')
+ end
+
+ it 'returns a specific merge request template' do
+ get api("/projects/#{public_project.id}/templates/merge_requests/feature_proposal")
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/template')
+ expect(json_response['name']).to eq('feature_proposal')
+ expect(json_response['content']).to eq('feature_proposal') # Content is identical to filename here
+ end
+
it 'returns 404 for an unknown specific template' do
get api("/projects/#{public_project.id}/templates/licenses/unknown")
expect(response).to have_gitlab_http_status(:not_found)
end
+ it 'returns 404 for an unknown issue template' do
+ get api("/projects/#{public_project.id}/templates/issues/unknown")
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns 404 for an unknown merge request template' do
+ get api("/projects/#{public_project.id}/templates/merge_requests/unknown")
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
it 'denies access to an anonymous user on a private project' do
get api("/projects/#{private_project.id}/templates/licenses/mit")
@@ -166,6 +231,10 @@ RSpec.describe API::ProjectTemplates do
subject { get api("/projects/#{url_encoded_path}/templates/gitlab_ci_ymls/Android") }
end
+ it_behaves_like 'accepts project paths with dots' do
+ subject { get api("/projects/#{url_encoded_path}/templates/metrics_dashboard_ymls/Default") }
+ end
+
shared_examples 'path traversal attempt' do |template_type|
it 'rejects invalid filenames' do
get api("/projects/#{public_project.id}/templates/#{template_type}/%2e%2e%2fPython%2ea")
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 76b0c04e32d..46340f86f69 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -70,12 +70,14 @@ RSpec.describe API::Projects do
builds_enabled: false,
snippets_enabled: false)
end
+
let(:project_member2) do
create(:project_member,
user: user4,
project: project3,
access_level: ProjectMember::MAINTAINER)
end
+
let(:project4) do
create(:project,
name: 'third_project',
@@ -386,6 +388,14 @@ RSpec.describe API::Projects do
let(:current_user) { user }
let(:projects) { [public_project, project, project2, project3].select { |p| p.id > project2.id } }
end
+
+ context 'regression: empty string is ignored' do
+ it_behaves_like 'projects response' do
+ let(:filter) { { id_after: '' } }
+ let(:current_user) { user }
+ let(:projects) { [public_project, project, project2, project3] }
+ end
+ end
end
context 'and using id_before' do
@@ -394,6 +404,14 @@ RSpec.describe API::Projects do
let(:current_user) { user }
let(:projects) { [public_project, project, project2, project3].select { |p| p.id < project2.id } }
end
+
+ context 'regression: empty string is ignored' do
+ it_behaves_like 'projects response' do
+ let(:filter) { { id_before: '' } }
+ let(:current_user) { user }
+ let(:projects) { [public_project, project, project2, project3] }
+ end
+ end
end
context 'and using both id_after and id_before' do
@@ -1586,6 +1604,7 @@ RSpec.describe API::Projects do
expect(json_response['ci_default_git_depth']).to eq(project.ci_default_git_depth)
expect(json_response['merge_method']).to eq(project.merge_method.to_s)
expect(json_response['readme_url']).to eq(project.readme_url)
+ expect(json_response).to have_key 'packages_enabled'
end
it 'returns a group link with expiration date' do
@@ -2339,6 +2358,20 @@ RSpec.describe API::Projects do
expect(project_member).to be_persisted
end
+ describe 'updating packages_enabled attribute' do
+ it 'is enabled by default' do
+ expect(project.packages_enabled).to be true
+ end
+
+ it 'disables project packages feature' do
+ put(api("/projects/#{project.id}", user), params: { packages_enabled: false })
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(project.reload.packages_enabled).to be false
+ expect(json_response['packages_enabled']).to eq(false)
+ end
+ end
+
it 'returns 400 when nothing sent' do
project_param = {}
diff --git a/spec/requests/api/pypi_packages_spec.rb b/spec/requests/api/pypi_packages_spec.rb
index b4e83c8caab..e2cfd87b507 100644
--- a/spec/requests/api/pypi_packages_spec.rb
+++ b/spec/requests/api/pypi_packages_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe API::PypiPackages do
include WorkhorseHelpers
include PackagesManagerApiSpecHelpers
+ include HttpBasicAuthHelpers
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :public) }
@@ -17,49 +18,47 @@ RSpec.describe API::PypiPackages do
subject { get api(url) }
- context 'without the need for a license' do
- context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'PyPi package versions' | :success
- 'PUBLIC' | :guest | true | true | 'PyPi package versions' | :success
- 'PUBLIC' | :developer | true | false | 'PyPi package versions' | :success
- 'PUBLIC' | :guest | true | false | 'PyPi package versions' | :success
- 'PUBLIC' | :developer | false | true | 'PyPi package versions' | :success
- 'PUBLIC' | :guest | false | true | 'PyPi package versions' | :success
- 'PUBLIC' | :developer | false | false | 'PyPi package versions' | :success
- 'PUBLIC' | :guest | false | false | 'PyPi package versions' | :success
- 'PUBLIC' | :anonymous | false | true | 'PyPi package versions' | :success
- 'PRIVATE' | :developer | true | true | 'PyPi package versions' | :success
- 'PRIVATE' | :guest | true | true | 'process PyPi api request' | :forbidden
- 'PRIVATE' | :developer | true | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'process PyPi api request' | :not_found
- 'PRIVATE' | :guest | false | true | 'process PyPi api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
- end
-
- with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ context 'with valid project' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'PyPi package versions' | :success
+ 'PUBLIC' | :guest | true | true | 'PyPi package versions' | :success
+ 'PUBLIC' | :developer | true | false | 'PyPi package versions' | :success
+ 'PUBLIC' | :guest | true | false | 'PyPi package versions' | :success
+ 'PUBLIC' | :developer | false | true | 'PyPi package versions' | :success
+ 'PUBLIC' | :guest | false | true | 'PyPi package versions' | :success
+ 'PUBLIC' | :developer | false | false | 'PyPi package versions' | :success
+ 'PUBLIC' | :guest | false | false | 'PyPi package versions' | :success
+ 'PUBLIC' | :anonymous | false | true | 'PyPi package versions' | :success
+ 'PRIVATE' | :developer | true | true | 'PyPi package versions' | :success
+ 'PRIVATE' | :guest | true | true | 'process PyPi api request' | :forbidden
+ 'PRIVATE' | :developer | true | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :guest | true | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'process PyPi api request' | :not_found
+ 'PRIVATE' | :guest | false | true | 'process PyPi api request' | :not_found
+ 'PRIVATE' | :developer | false | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :guest | false | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
+ end
- subject { get api(url), headers: headers }
+ with_them do
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
- end
+ subject { get api(url), headers: headers }
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
+ end
- it_behaves_like 'deploy token for package GET requests'
+ it_behaves_like 'deploy token for package GET requests'
- it_behaves_like 'rejects PyPI access with unknown project id'
- end
+ it_behaves_like 'rejects PyPI access with unknown project id'
end
describe 'POST /api/v4/projects/:id/packages/pypi/authorize' do
@@ -70,48 +69,46 @@ RSpec.describe API::PypiPackages do
subject { post api(url), headers: headers }
- context 'without the need for a license' do
- context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'process PyPi api request' | :success
- 'PUBLIC' | :guest | true | true | 'process PyPi api request' | :forbidden
- 'PUBLIC' | :developer | true | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :guest | true | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :developer | false | true | 'process PyPi api request' | :forbidden
- 'PUBLIC' | :guest | false | true | 'process PyPi api request' | :forbidden
- 'PUBLIC' | :developer | false | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :guest | false | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :developer | true | true | 'process PyPi api request' | :success
- 'PRIVATE' | :guest | true | true | 'process PyPi api request' | :forbidden
- 'PRIVATE' | :developer | true | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'process PyPi api request' | :not_found
- 'PRIVATE' | :guest | false | true | 'process PyPi api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
- end
-
- with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:user_headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
- let(:headers) { user_headers.merge(workhorse_header) }
+ context 'with valid project' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'process PyPi api request' | :success
+ 'PUBLIC' | :guest | true | true | 'process PyPi api request' | :forbidden
+ 'PUBLIC' | :developer | true | false | 'process PyPi api request' | :unauthorized
+ 'PUBLIC' | :guest | true | false | 'process PyPi api request' | :unauthorized
+ 'PUBLIC' | :developer | false | true | 'process PyPi api request' | :forbidden
+ 'PUBLIC' | :guest | false | true | 'process PyPi api request' | :forbidden
+ 'PUBLIC' | :developer | false | false | 'process PyPi api request' | :unauthorized
+ 'PUBLIC' | :guest | false | false | 'process PyPi api request' | :unauthorized
+ 'PUBLIC' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :developer | true | true | 'process PyPi api request' | :success
+ 'PRIVATE' | :guest | true | true | 'process PyPi api request' | :forbidden
+ 'PRIVATE' | :developer | true | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :guest | true | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'process PyPi api request' | :not_found
+ 'PRIVATE' | :guest | false | true | 'process PyPi api request' | :not_found
+ 'PRIVATE' | :developer | false | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :guest | false | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
+ end
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
- end
+ with_them do
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+ let(:user_headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
+ let(:headers) { user_headers.merge(workhorse_header) }
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
+ end
- it_behaves_like 'deploy token for package uploads'
+ it_behaves_like 'deploy token for package uploads'
- it_behaves_like 'rejects PyPI access with unknown project id'
- end
+ it_behaves_like 'rejects PyPI access with unknown project id'
end
describe 'POST /api/v4/projects/:id/packages/pypi' do
@@ -135,61 +132,59 @@ RSpec.describe API::PypiPackages do
)
end
- context 'without the need for a license' do
- context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'PyPi package creation' | :created
- 'PUBLIC' | :guest | true | true | 'process PyPi api request' | :forbidden
- 'PUBLIC' | :developer | true | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :guest | true | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :developer | false | true | 'process PyPi api request' | :forbidden
- 'PUBLIC' | :guest | false | true | 'process PyPi api request' | :forbidden
- 'PUBLIC' | :developer | false | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :guest | false | false | 'process PyPi api request' | :unauthorized
- 'PUBLIC' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :developer | true | true | 'process PyPi api request' | :created
- 'PRIVATE' | :guest | true | true | 'process PyPi api request' | :forbidden
- 'PRIVATE' | :developer | true | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :guest | true | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :developer | false | true | 'process PyPi api request' | :not_found
- 'PRIVATE' | :guest | false | true | 'process PyPi api request' | :not_found
- 'PRIVATE' | :developer | false | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :guest | false | false | 'process PyPi api request' | :unauthorized
- 'PRIVATE' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
- end
-
- with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:user_headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
- let(:headers) { user_headers.merge(workhorse_header) }
-
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
- end
-
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
- end
+ context 'with valid project' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'PyPi package creation' | :created
+ 'PUBLIC' | :guest | true | true | 'process PyPi api request' | :forbidden
+ 'PUBLIC' | :developer | true | false | 'process PyPi api request' | :unauthorized
+ 'PUBLIC' | :guest | true | false | 'process PyPi api request' | :unauthorized
+ 'PUBLIC' | :developer | false | true | 'process PyPi api request' | :forbidden
+ 'PUBLIC' | :guest | false | true | 'process PyPi api request' | :forbidden
+ 'PUBLIC' | :developer | false | false | 'process PyPi api request' | :unauthorized
+ 'PUBLIC' | :guest | false | false | 'process PyPi api request' | :unauthorized
+ 'PUBLIC' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :developer | true | true | 'process PyPi api request' | :created
+ 'PRIVATE' | :guest | true | true | 'process PyPi api request' | :forbidden
+ 'PRIVATE' | :developer | true | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :guest | true | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :developer | false | true | 'process PyPi api request' | :not_found
+ 'PRIVATE' | :guest | false | true | 'process PyPi api request' | :not_found
+ 'PRIVATE' | :developer | false | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :guest | false | false | 'process PyPi api request' | :unauthorized
+ 'PRIVATE' | :anonymous | false | true | 'process PyPi api request' | :unauthorized
end
- context 'with an invalid package' do
- let(:token) { personal_access_token.token }
- let(:user_headers) { build_basic_auth_header(user.username, token) }
+ with_them do
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+ let(:user_headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
let(:headers) { user_headers.merge(workhorse_header) }
before do
- params[:name] = '.$/@!^*'
- project.add_developer(user)
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
end
- it_behaves_like 'returning response status', :bad_request
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
+ end
- it_behaves_like 'deploy token for package uploads'
+ context 'with an invalid package' do
+ let(:token) { personal_access_token.token }
+ let(:user_headers) { basic_auth_header(user.username, token) }
+ let(:headers) { user_headers.merge(workhorse_header) }
+
+ before do
+ params[:name] = '.$/@!^*'
+ project.add_developer(user)
+ end
- it_behaves_like 'rejects PyPI access with unknown project id'
+ it_behaves_like 'returning response status', :bad_request
end
+
+ it_behaves_like 'deploy token for package uploads'
+
+ it_behaves_like 'rejects PyPI access with unknown project id'
end
describe 'GET /api/v4/projects/:id/packages/pypi/files/:sha256/*file_identifier' do
@@ -200,60 +195,58 @@ RSpec.describe API::PypiPackages do
subject { get api(url) }
- context 'without the need for a license' do
- context 'with valid project' do
- using RSpec::Parameterized::TableSyntax
-
- where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
- 'PUBLIC' | :developer | true | true | 'PyPi package download' | :success
- 'PUBLIC' | :guest | true | true | 'PyPi package download' | :success
- 'PUBLIC' | :developer | true | false | 'PyPi package download' | :success
- 'PUBLIC' | :guest | true | false | 'PyPi package download' | :success
- 'PUBLIC' | :developer | false | true | 'PyPi package download' | :success
- 'PUBLIC' | :guest | false | true | 'PyPi package download' | :success
- 'PUBLIC' | :developer | false | false | 'PyPi package download' | :success
- 'PUBLIC' | :guest | false | false | 'PyPi package download' | :success
- 'PUBLIC' | :anonymous | false | true | 'PyPi package download' | :success
- 'PRIVATE' | :developer | true | true | 'PyPi package download' | :success
- 'PRIVATE' | :guest | true | true | 'PyPi package download' | :success
- 'PRIVATE' | :developer | true | false | 'PyPi package download' | :success
- 'PRIVATE' | :guest | true | false | 'PyPi package download' | :success
- 'PRIVATE' | :developer | false | true | 'PyPi package download' | :success
- 'PRIVATE' | :guest | false | true | 'PyPi package download' | :success
- 'PRIVATE' | :developer | false | false | 'PyPi package download' | :success
- 'PRIVATE' | :guest | false | false | 'PyPi package download' | :success
- 'PRIVATE' | :anonymous | false | true | 'PyPi package download' | :success
- end
-
- with_them do
- let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ context 'with valid project' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:project_visibility_level, :user_role, :member, :user_token, :shared_examples_name, :expected_status) do
+ 'PUBLIC' | :developer | true | true | 'PyPi package download' | :success
+ 'PUBLIC' | :guest | true | true | 'PyPi package download' | :success
+ 'PUBLIC' | :developer | true | false | 'PyPi package download' | :success
+ 'PUBLIC' | :guest | true | false | 'PyPi package download' | :success
+ 'PUBLIC' | :developer | false | true | 'PyPi package download' | :success
+ 'PUBLIC' | :guest | false | true | 'PyPi package download' | :success
+ 'PUBLIC' | :developer | false | false | 'PyPi package download' | :success
+ 'PUBLIC' | :guest | false | false | 'PyPi package download' | :success
+ 'PUBLIC' | :anonymous | false | true | 'PyPi package download' | :success
+ 'PRIVATE' | :developer | true | true | 'PyPi package download' | :success
+ 'PRIVATE' | :guest | true | true | 'PyPi package download' | :success
+ 'PRIVATE' | :developer | true | false | 'PyPi package download' | :success
+ 'PRIVATE' | :guest | true | false | 'PyPi package download' | :success
+ 'PRIVATE' | :developer | false | true | 'PyPi package download' | :success
+ 'PRIVATE' | :guest | false | true | 'PyPi package download' | :success
+ 'PRIVATE' | :developer | false | false | 'PyPi package download' | :success
+ 'PRIVATE' | :guest | false | false | 'PyPi package download' | :success
+ 'PRIVATE' | :anonymous | false | true | 'PyPi package download' | :success
+ end
- subject { get api(url), headers: headers }
+ with_them do
+ let(:token) { user_token ? personal_access_token.token : 'wrong' }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
- end
+ subject { get api(url), headers: headers }
- it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project_visibility_level, false))
end
+
+ it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
+ end
- context 'with deploy token headers' do
- let(:headers) { build_basic_auth_header(deploy_token.username, deploy_token.token) }
+ context 'with deploy token headers' do
+ let(:headers) { basic_auth_header(deploy_token.username, deploy_token.token) }
- context 'valid token' do
- it_behaves_like 'returning response status', :success
- end
+ context 'valid token' do
+ it_behaves_like 'returning response status', :success
+ end
- context 'invalid token' do
- let(:headers) { build_basic_auth_header('foo', 'bar') }
+ context 'invalid token' do
+ let(:headers) { basic_auth_header('foo', 'bar') }
- it_behaves_like 'returning response status', :success
- end
+ it_behaves_like 'returning response status', :success
end
-
- it_behaves_like 'rejects PyPI access with unknown project id'
end
+
+ it_behaves_like 'rejects PyPI access with unknown project id'
end
end
diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb
index 5e8353d74c3..a9a92a4d3cd 100644
--- a/spec/requests/api/releases_spec.rb
+++ b/spec/requests/api/releases_spec.rb
@@ -420,7 +420,17 @@ RSpec.describe API::Releases do
{
name: 'New release',
tag_name: 'v0.1',
- description: 'Super nice release'
+ description: 'Super nice release',
+ assets: {
+ links: [
+ {
+ name: 'An example runbook link',
+ url: 'https://example.com/runbook',
+ link_type: 'runbook',
+ filepath: '/permanent/path/to/runbook'
+ }
+ ]
+ }
}
end
@@ -435,9 +445,17 @@ RSpec.describe API::Releases do
post api("/projects/#{project.id}/releases", maintainer), params: params
end.to change { Release.count }.by(1)
- expect(project.releases.last.name).to eq('New release')
- expect(project.releases.last.tag).to eq('v0.1')
- expect(project.releases.last.description).to eq('Super nice release')
+ release = project.releases.last
+
+ aggregate_failures do
+ expect(release.name).to eq('New release')
+ expect(release.tag).to eq('v0.1')
+ expect(release.description).to eq('Super nice release')
+ expect(release.links.last.name).to eq('An example runbook link')
+ expect(release.links.last.url).to eq('https://example.com/runbook')
+ expect(release.links.last.link_type).to eq('runbook')
+ expect(release.links.last.filepath).to eq('/permanent/path/to/runbook')
+ end
end
it 'creates a new release without description' do
diff --git a/spec/requests/api/snippets_spec.rb b/spec/requests/api/snippets_spec.rb
index e676eb94337..4e2f6e108eb 100644
--- a/spec/requests/api/snippets_spec.rb
+++ b/spec/requests/api/snippets_spec.rb
@@ -84,8 +84,8 @@ RSpec.describe API::Snippets do
public_snippet.id,
public_snippet_other.id)
expect(json_response.map { |snippet| snippet['web_url']} ).to contain_exactly(
- "http://localhost/snippets/#{public_snippet.id}",
- "http://localhost/snippets/#{public_snippet_other.id}")
+ "http://localhost/-/snippets/#{public_snippet.id}",
+ "http://localhost/-/snippets/#{public_snippet_other.id}")
expect(json_response[0]['files'].first).to eq snippet_blob_file(public_snippet_other.blobs.first)
expect(json_response[1]['files'].first).to eq snippet_blob_file(public_snippet.blobs.first)
end
@@ -229,13 +229,16 @@ RSpec.describe API::Snippets do
let(:base_params) do
{
title: 'Test Title',
- file_name: 'test.rb',
description: 'test description',
- content: 'puts "hello world"',
visibility: 'public'
}
end
- let(:params) { base_params.merge(extra_params) }
+
+ let(:file_path) { 'file_1.rb' }
+ let(:file_content) { 'puts "hello world"' }
+
+ let(:params) { base_params.merge(file_params, extra_params) }
+ let(:file_params) { { files: [{ file_path: file_path, content: file_content }] } }
let(:extra_params) { {} }
subject { post api("/snippets/", user), params: params }
@@ -251,7 +254,7 @@ RSpec.describe API::Snippets do
expect(response).to have_gitlab_http_status(:created)
expect(json_response['title']).to eq(params[:title])
expect(json_response['description']).to eq(params[:description])
- expect(json_response['file_name']).to eq(params[:file_name])
+ expect(json_response['file_name']).to eq(file_path)
expect(json_response['files']).to eq(snippet.blobs.map { |blob| snippet_blob_file(blob) })
expect(json_response['visibility']).to eq(params[:visibility])
end
@@ -265,12 +268,31 @@ RSpec.describe API::Snippets do
it 'commit the files to the repository' do
subject
- blob = snippet.repository.blob_at('master', params[:file_name])
+ blob = snippet.repository.blob_at('master', file_path)
- expect(blob.data).to eq params[:content]
+ expect(blob.data).to eq file_content
end
end
+ context 'with files parameter' do
+ it_behaves_like 'snippet creation with files parameter'
+
+ context 'with multiple files' do
+ let(:file_params) do
+ {
+ files: [
+ { file_path: 'file_1.rb', content: 'puts "hello world"' },
+ { file_path: 'file_2.rb', content: 'puts "hello world 2"' }
+ ]
+ }
+ end
+
+ it_behaves_like 'snippet creation'
+ end
+ end
+
+ it_behaves_like 'snippet creation without files parameter'
+
context 'with restricted visibility settings' do
before do
stub_application_setting(restricted_visibility_levels:
@@ -305,15 +327,6 @@ RSpec.describe API::Snippets do
expect(response).to have_gitlab_http_status(:bad_request)
end
- it 'returns 400 if content is blank' do
- params[:content] = ''
-
- subject
-
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['error']).to eq 'content is empty'
- end
-
it 'returns 400 if title is blank' do
params[:title] = ''
diff --git a/spec/requests/api/suggestions_spec.rb b/spec/requests/api/suggestions_spec.rb
index 34d3c54d700..78a2688ac5e 100644
--- a/spec/requests/api/suggestions_spec.rb
+++ b/spec/requests/api/suggestions_spec.rb
@@ -74,7 +74,7 @@ RSpec.describe API::Suggestions do
put api(url, user)
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response).to eq({ 'message' => 'A suggestion is not applicable.' })
+ expect(json_response).to eq({ 'message' => "Can't apply as this line was changed in a more recent version." })
end
end
@@ -133,7 +133,7 @@ RSpec.describe API::Suggestions do
params: { ids: [suggestion.id, unappliable_suggestion.id] }
expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response).to eq({ 'message' => 'A suggestion is not applicable.' })
+ expect(json_response).to eq({ 'message' => "Can't apply as this line was changed in a more recent version." })
end
end
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 17f9112c1d5..6c6497a240b 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -64,6 +64,7 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
expect(json_response).to have_key('note')
expect(json_response['note']).to eq(user.note)
+ expect(json_response).to have_key('sign_in_count')
end
end
@@ -72,6 +73,7 @@ RSpec.describe API::Users, :do_not_mock_admin_mode do
get api("/users/#{user.id}", user)
expect(json_response).not_to have_key('note')
+ expect(json_response).not_to have_key('sign_in_count')
end
end
end
diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb
index d387c6df4cf..5b6ffabb7ac 100644
--- a/spec/requests/git_http_spec.rb
+++ b/spec/requests/git_http_spec.rb
@@ -301,14 +301,14 @@ RSpec.describe 'Git HTTP requests' do
it 'rejects clones with 404 Not Found' do
download(path, user: user.username, password: user.password) do |response|
expect(response).to have_gitlab_http_status(:not_found)
- expect(response.body).to eq(git_access_error(:project_not_found))
+ expect(response.body).to eq(git_access_wiki_error(:not_found))
end
end
it 'rejects pushes with 404 Not Found' do
upload(path, user: user.username, password: user.password) do |response|
expect(response).to have_gitlab_http_status(:not_found)
- expect(response.body).to eq(git_access_error(:project_not_found))
+ expect(response.body).to eq(git_access_wiki_error(:not_found))
end
end
end
diff --git a/spec/requests/groups/milestones_controller_spec.rb b/spec/requests/groups/milestones_controller_spec.rb
index 2c4d97ec4c3..4afdde8be04 100644
--- a/spec/requests/groups/milestones_controller_spec.rb
+++ b/spec/requests/groups/milestones_controller_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Groups::MilestonesController do
let!(:public_project_with_private_issues_and_mrs) do
create(:project, :public, :issues_private, :merge_requests_private, group: public_group)
end
+
let!(:private_milestone) { create(:milestone, project: public_project_with_private_issues_and_mrs, title: 'project milestone') }
describe 'GET #index' do
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index 082857ab738..fd4261fb50d 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -17,6 +17,8 @@ RSpec.describe 'Git LFS API and storage' do
'X-Sendfile-Type' => sendfile
}.compact
end
+
+ let(:include_workhorse_jwt_header) { true }
let(:authorization) { }
let(:sendfile) { }
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
@@ -1076,14 +1078,24 @@ RSpec.describe 'Git LFS API and storage' do
end
end
- context 'invalid tempfiles' do
+ context 'without the lfs object' do
before do
lfs_object.destroy
end
it 'rejects slashes in the tempfile name (path traversal)' do
put_finalize('../bar', with_tempfile: true)
- expect(response).to have_gitlab_http_status(:forbidden)
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ context 'not sending the workhorse jwt header' do
+ let(:include_workhorse_jwt_header) { false }
+
+ it 'rejects the request' do
+ put_finalize(with_tempfile: true)
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ end
end
end
end
@@ -1309,7 +1321,8 @@ RSpec.describe 'Git LFS API and storage' do
method: :put,
file_key: :file,
params: args.merge(file: uploaded_file),
- headers: finalize_headers
+ headers: finalize_headers,
+ send_rewritten_field: include_workhorse_jwt_header
)
end
diff --git a/spec/requests/product_analytics/collector_app_spec.rb b/spec/requests/product_analytics/collector_app_spec.rb
index 0491c2564f0..b87610841e7 100644
--- a/spec/requests/product_analytics/collector_app_spec.rb
+++ b/spec/requests/product_analytics/collector_app_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe 'ProductAnalytics::CollectorApp' do
let_it_be(:project) { create(:project) }
let(:params) { {} }
+ let(:raw_event) { Gitlab::Json.parse(fixture_file('product_analytics/event.json')) }
subject { get '/-/collector/i', params: params }
@@ -17,24 +18,7 @@ RSpec.describe 'ProductAnalytics::CollectorApp' do
end
context 'correct event params' do
- let(:params) do
- {
- aid: project.id,
- p: 'web',
- tna: 'sp',
- tv: 'js-2.14.0',
- eid: SecureRandom.uuid,
- duid: SecureRandom.uuid,
- sid: SecureRandom.uuid,
- vid: 4,
- url: 'http://example.com/products/1',
- refr: 'http://example.com/products/1',
- lang: 'en-US',
- cookie: true,
- tz: 'America/Los_Angeles',
- cs: 'UTF-8'
- }
- end
+ let(:params) { raw_event.merge(aid: project.id) }
it 'repond with 200' do
expect { subject }.to change { ProductAnalyticsEvent.count }.by(1)
diff --git a/spec/requests/projects/incident_management/pagerduty_incidents_spec.rb b/spec/requests/projects/incident_management/pagerduty_incidents_spec.rb
index c246aacb4c7..b18bffdb110 100644
--- a/spec/requests/projects/incident_management/pagerduty_incidents_spec.rb
+++ b/spec/requests/projects/incident_management/pagerduty_incidents_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'PagerDuty webhook' do
def make_request
headers = { 'Content-Type' => 'application/json' }
- post project_incidents_pagerduty_url(project, token: 'VALID-TOKEN'), params: payload.to_json, headers: headers
+ post project_incidents_integrations_pagerduty_url(project, token: 'VALID-TOKEN'), params: payload.to_json, headers: headers
end
before do
diff --git a/spec/requests/projects/metrics/dashboards/builder_spec.rb b/spec/requests/projects/metrics/dashboards/builder_spec.rb
new file mode 100644
index 00000000000..e59ed591f63
--- /dev/null
+++ b/spec/requests/projects/metrics/dashboards/builder_spec.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Projects::Metrics::Dashboards::BuilderController' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:environment) { create(:environment, project: project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:valid_panel_yml) do
+ <<~YML
+ ---
+ title: "Super Chart A1"
+ type: "area-chart"
+ y_label: "y_label"
+ weight: 1
+ max_value: 1
+ metrics:
+ - id: metric_a1
+ query_range: |+
+ avg(
+ sum(
+ container_memory_usage_bytes{
+ container_name!="POD",
+ pod_name=~"^{{ci_environment_slug}}-(.*)",
+ namespace="{{kube_namespace}}",
+ user_def_variable="{{user_def_variable}}"
+ }
+ ) by (job)
+ ) without (job)
+ /1024/1024/1024
+ unit: unit
+ label: Legend Label
+ YML
+ end
+ let_it_be(:invalid_panel_yml) do
+ <<~YML
+ ---
+ title: "Super Chart A1"
+ type: "area-chart"
+ y_label: "y_label"
+ weight: 1
+ max_value: 1
+ YML
+ end
+
+ def send_request(params = {})
+ post namespace_project_metrics_dashboards_builder_path(namespace_id: project.namespace, project_id: project, format: :json, **params)
+ end
+
+ describe 'POST /:namespace/:project/-/metrics/dashboards/builder' do
+ context 'as anonymous user' do
+ it 'redirects user to sign in page' do
+ send_request
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+
+ context 'as user with guest access' do
+ before do
+ project.add_guest(user)
+ login_as(user)
+ end
+
+ it 'returns not found' do
+ send_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'as logged in user' do
+ before do
+ project.add_developer(user)
+ login_as(user)
+ end
+
+ context 'valid yaml panel is supplied' do
+ it 'returns success' do
+ send_request(panel_yaml: valid_panel_yml)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to include('title' => 'Super Chart A1', 'type' => 'area-chart')
+ end
+ end
+
+ context 'invalid yaml panel is supplied' do
+ it 'returns unprocessable entity' do
+ send_request(panel_yaml: invalid_panel_yml)
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response['message']).to eq('Each "panel" must define an array :metrics')
+ end
+ end
+
+ context 'invalid panel_yaml is not a yaml string' do
+ it 'returns unprocessable entity' do
+ send_request(panel_yaml: 1)
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response['message']).to eq('Invalid configuration format')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/projects/metrics_dashboard_spec.rb b/spec/requests/projects/metrics_dashboard_spec.rb
index ab35788387c..f571e4a4309 100644
--- a/spec/requests/projects/metrics_dashboard_spec.rb
+++ b/spec/requests/projects/metrics_dashboard_spec.rb
@@ -23,6 +23,22 @@ RSpec.describe 'metrics dashboard page' do
send_request
expect(assigns(:environment).id).to eq(environment.id)
end
+
+ context 'with anonymous user and public dashboard visibility' do
+ let(:anonymous_user) { create(:user) }
+ let(:project) do
+ create(:project, :public, metrics_dashboard_access_level: 'enabled')
+ end
+
+ before do
+ login_as(anonymous_user)
+ end
+
+ it 'returns 200' do
+ send_request
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
end
describe 'GET /:namespace/:project/-/metrics?environment=:environment.id' do
@@ -79,7 +95,27 @@ RSpec.describe 'metrics dashboard page' do
end
end
+ describe 'GET :/namespace/:project/-/metrics/:page' do
+ it 'returns 200 with path param page' do
+ # send_request(page: 'panel/new') cannot be used because it encodes '/'
+ get "#{dashboard_route}/panel/new"
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'returns 200 with dashboard and path param page' do
+ # send_request(page: 'panel/new') cannot be used because it encodes '/'
+ get "#{dashboard_route(dashboard_path: 'dashboard.yml')}/panel/new"
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
def send_request(params = {})
- get namespace_project_metrics_dashboard_path(namespace_id: project.namespace, project_id: project, **params)
+ get dashboard_route(params)
+ end
+
+ def dashboard_route(params = {})
+ namespace_project_metrics_dashboard_path(namespace_id: project.namespace, project_id: project, **params)
end
end
diff --git a/spec/requests/rack_attack_global_spec.rb b/spec/requests/rack_attack_global_spec.rb
index 106515a6b13..444ee478cbb 100644
--- a/spec/requests/rack_attack_global_spec.rb
+++ b/spec/requests/rack_attack_global_spec.rb
@@ -227,6 +227,7 @@ RSpec.describe 'Rack Attack global throttles' do
let(:protected_path_that_does_not_require_authentication) do
'/users/sign_in'
end
+
let(:post_params) { { user: { login: 'username', password: 'password' } } }
before do
diff --git a/spec/requests/search_controller_spec.rb b/spec/requests/search_controller_spec.rb
new file mode 100644
index 00000000000..52bfc480313
--- /dev/null
+++ b/spec/requests/search_controller_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SearchController, type: :request do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :public, :repository, :wiki_repo, name: 'awesome project', group: group) }
+
+ before_all do
+ login_as(user)
+ end
+
+ def send_search_request(params)
+ get search_path, params: params
+ end
+
+ shared_examples 'an efficient database result' do
+ it 'avoids N+1 database queries' do
+ create(object, *creation_traits, creation_args)
+
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) { send_search_request(params) }
+ create_list(object, 3, *creation_traits, creation_args)
+
+ expect { send_search_request(params) }.not_to exceed_all_query_limit(control).with_threshold(threshold)
+ end
+ end
+
+ describe 'GET /search' do
+ let(:creation_traits) { [] }
+
+ context 'for issues scope' do
+ let(:object) { :issue }
+ let(:creation_args) { { project: project } }
+ let(:params) { { search: '*', scope: 'issues' } }
+ let(:threshold) { 0 }
+
+ it_behaves_like 'an efficient database result'
+ end
+
+ context 'for merge_request scope' do
+ let(:creation_traits) { [:unique_branches] }
+ let(:object) { :merge_request }
+ let(:creation_args) { { source_project: project } }
+ let(:params) { { search: '*', scope: 'merge_requests' } }
+ let(:threshold) { 0 }
+
+ it_behaves_like 'an efficient database result'
+ end
+
+ context 'for project scope' do
+ let(:creation_traits) { [:public] }
+ let(:object) { :project }
+ let(:creation_args) { {} }
+ let(:params) { { search: '*', scope: 'projects' } }
+ # some N+1 queries still exist
+ # each project requires 3 extra queries
+ # - one count for forks
+ # - one count for open MRs
+ # - one count for open Issues
+ let(:threshold) { 9 }
+
+ it_behaves_like 'an efficient database result'
+ end
+ end
+end
diff --git a/spec/routing/notifications_routing_spec.rb b/spec/routing/notifications_routing_spec.rb
index f545badcdfa..007e8ff4816 100644
--- a/spec/routing/notifications_routing_spec.rb
+++ b/spec/routing/notifications_routing_spec.rb
@@ -10,4 +10,9 @@ RSpec.describe "notifications routing" do
it "routes to #update" do
expect(put("/profile/notifications")).to route_to("profiles/notifications#update")
end
+
+ it 'routes to group #update' do
+ expect(put("/profile/notifications/groups/gitlab-org")).to route_to("profiles/groups#update", id: 'gitlab-org')
+ expect(put("/profile/notifications/groups/gitlab.org")).to route_to("profiles/groups#update", id: 'gitlab.org')
+ end
end
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index 87091da0c84..b80baf0aa13 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -314,39 +314,39 @@ RSpec.describe 'project routing' do
# DELETE /:project_id/snippets/:id(.:format) snippets#destroy
describe SnippetsController, 'routing' do
it 'to #raw' do
- expect(get('/gitlab/gitlabhq/snippets/1/raw')).to route_to('projects/snippets#raw', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
+ expect(get('/gitlab/gitlabhq/-/snippets/1/raw')).to route_to('projects/snippets#raw', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
end
it 'to #index' do
- expect(get('/gitlab/gitlabhq/snippets')).to route_to('projects/snippets#index', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ expect(get('/gitlab/gitlabhq/-/snippets')).to route_to('projects/snippets#index', namespace_id: 'gitlab', project_id: 'gitlabhq')
end
it 'to #create' do
- expect(post('/gitlab/gitlabhq/snippets')).to route_to('projects/snippets#create', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ expect(post('/gitlab/gitlabhq/-/snippets')).to route_to('projects/snippets#create', namespace_id: 'gitlab', project_id: 'gitlabhq')
end
it 'to #new' do
- expect(get('/gitlab/gitlabhq/snippets/new')).to route_to('projects/snippets#new', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ expect(get('/gitlab/gitlabhq/-/snippets/new')).to route_to('projects/snippets#new', namespace_id: 'gitlab', project_id: 'gitlabhq')
end
it 'to #edit' do
- expect(get('/gitlab/gitlabhq/snippets/1/edit')).to route_to('projects/snippets#edit', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
+ expect(get('/gitlab/gitlabhq/-/snippets/1/edit')).to route_to('projects/snippets#edit', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
end
it 'to #show' do
- expect(get('/gitlab/gitlabhq/snippets/1')).to route_to('projects/snippets#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
+ expect(get('/gitlab/gitlabhq/-/snippets/1')).to route_to('projects/snippets#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
end
it 'to #update' do
- expect(put('/gitlab/gitlabhq/snippets/1')).to route_to('projects/snippets#update', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
+ expect(put('/gitlab/gitlabhq/-/snippets/1')).to route_to('projects/snippets#update', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
end
it 'to #destroy' do
- expect(delete('/gitlab/gitlabhq/snippets/1')).to route_to('projects/snippets#destroy', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
+ expect(delete('/gitlab/gitlabhq/-/snippets/1')).to route_to('projects/snippets#destroy', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
end
- it 'to #show from scope routing' do
- expect(get('/gitlab/gitlabhq/-/snippets/1')).to route_to('projects/snippets#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
+ it 'to #show from unscope routing' do
+ expect(get('/gitlab/gitlabhq/snippets/1')).to route_to('projects/snippets#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
end
end
@@ -823,4 +823,66 @@ RSpec.describe 'project routing' do
project_id: 'gitlabhq', snippet_id: '1', ref: 'master', path: 'lib/version.rb')
end
end
+
+ describe Projects::MetricsDashboardController, 'routing' do
+ it 'routes to #show with no dashboard_path and no page' do
+ expect(get: "/gitlab/gitlabhq/-/metrics").to route_to(
+ "projects/metrics_dashboard#show",
+ **base_params
+ )
+ end
+
+ it 'routes to #show with only dashboard_path' do
+ expect(get: "/gitlab/gitlabhq/-/metrics/dashboard1.yml").to route_to(
+ "projects/metrics_dashboard#show",
+ dashboard_path: 'dashboard1.yml',
+ **base_params
+ )
+ end
+
+ it 'routes to #show with only page' do
+ expect(get: "/gitlab/gitlabhq/-/metrics/panel/new").to route_to(
+ "projects/metrics_dashboard#show",
+ page: 'panel/new',
+ **base_params
+ )
+ end
+
+ it 'routes to #show with dashboard_path and page' do
+ expect(get: "/gitlab/gitlabhq/-/metrics/config%2Fprometheus%2Fcommon_metrics.yml/panel/new").to route_to(
+ "projects/metrics_dashboard#show",
+ dashboard_path: 'config/prometheus/common_metrics.yml',
+ page: 'panel/new',
+ **base_params
+ )
+ end
+
+ it 'routes to 404 with invalid page' do
+ expect(get: "/gitlab/gitlabhq/-/metrics/invalid_page").to route_to(
+ 'application#route_not_found',
+ unmatched_route: 'gitlab/gitlabhq/-/metrics/invalid_page'
+ )
+ end
+
+ it 'routes to 404 with invalid dashboard_path' do
+ expect(get: "/gitlab/gitlabhq/-/metrics/invalid_dashboard").to route_to(
+ 'application#route_not_found',
+ unmatched_route: 'gitlab/gitlabhq/-/metrics/invalid_dashboard'
+ )
+ end
+
+ it 'routes to 404 with invalid dashboard_path and valid page' do
+ expect(get: "/gitlab/gitlabhq/-/metrics/dashboard1/panel/new").to route_to(
+ 'application#route_not_found',
+ unmatched_route: 'gitlab/gitlabhq/-/metrics/dashboard1/panel/new'
+ )
+ end
+
+ it 'routes to 404 with valid dashboard_path and invalid page' do
+ expect(get: "/gitlab/gitlabhq/-/metrics/dashboard1.yml/invalid_page").to route_to(
+ 'application#route_not_found',
+ unmatched_route: 'gitlab/gitlabhq/-/metrics/dashboard1.yml/invalid_page'
+ )
+ end
+ end
end
diff --git a/spec/routing/routing_spec.rb b/spec/routing/routing_spec.rb
index 1218ae30781..af4becd980b 100644
--- a/spec/routing/routing_spec.rb
+++ b/spec/routing/routing_spec.rb
@@ -76,46 +76,45 @@ end
# DELETE /snippets/:id(.:format) snippets#destroy
RSpec.describe SnippetsController, "routing" do
it "to #raw" do
- expect(get("/snippets/1/raw")).to route_to('snippets#raw', id: '1')
+ expect(get("/-/snippets/1/raw")).to route_to('snippets#raw', id: '1')
end
it "to #index" do
- expect(get("/snippets")).to route_to('snippets#index')
+ expect(get("/-/snippets")).to route_to('snippets#index')
end
it "to #create" do
- expect(post("/snippets")).to route_to('snippets#create')
+ expect(post("/-/snippets")).to route_to('snippets#create')
end
it "to #new" do
- expect(get("/snippets/new")).to route_to('snippets#new')
+ expect(get("/-/snippets/new")).to route_to('snippets#new')
end
it "to #edit" do
- expect(get("/snippets/1/edit")).to route_to('snippets#edit', id: '1')
+ expect(get("/-/snippets/1/edit")).to route_to('snippets#edit', id: '1')
end
it "to #show" do
- expect(get("/snippets/1")).to route_to('snippets#show', id: '1')
+ expect(get("/-/snippets/1")).to route_to('snippets#show', id: '1')
end
it "to #update" do
- expect(put("/snippets/1")).to route_to('snippets#update', id: '1')
+ expect(put("/-/snippets/1")).to route_to('snippets#update', id: '1')
end
it "to #destroy" do
- expect(delete("/snippets/1")).to route_to('snippets#destroy', id: '1')
+ expect(delete("/-/snippets/1")).to route_to('snippets#destroy', id: '1')
end
- it 'to #show from scope routing' do
- expect(get("/-/snippets/1")).to route_to('snippets#show', id: '1')
+ it 'to #show from unscoped routing' do
+ expect(get("/snippets/1")).to route_to('snippets#show', id: '1')
end
end
# help GET /help(.:format) help#index
# help_page GET /help/*path(.:format) help#show
# help_shortcuts GET /help/shortcuts(.:format) help#shortcuts
-# help_ui GET /help/ui(.:format) help#ui
RSpec.describe HelpController, "routing" do
it "to #index" do
expect(get("/help")).to route_to('help#index')
@@ -131,9 +130,6 @@ RSpec.describe HelpController, "routing" do
expect(get(path)).to route_to('help#show',
path: 'workflow/protected_branches/protected_branches1',
format: 'png')
-
- path = '/help/ui'
- expect(get(path)).to route_to('help#ui')
end
end
diff --git a/spec/rubocop/cop/avoid_becomes_spec.rb b/spec/rubocop/cop/avoid_becomes_spec.rb
new file mode 100644
index 00000000000..3e3e3abc27d
--- /dev/null
+++ b/spec/rubocop/cop/avoid_becomes_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rubocop'
+require 'rubocop/rspec/support'
+require_relative '../../../rubocop/cop/avoid_becomes'
+
+RSpec.describe RuboCop::Cop::AvoidBecomes, type: :rubocop do
+ include CopHelper
+
+ subject(:cop) { described_class.new }
+
+ it 'flags the use of becomes with a constant parameter' do
+ inspect_source('foo.becomes(Project)')
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it 'flags the use of becomes with a namespaced constant parameter' do
+ inspect_source('foo.becomes(Namespace::Group)')
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it 'flags the use of becomes with a dynamic parameter' do
+ inspect_source(<<~RUBY)
+ model = Namespace
+ project = Project.first
+ project.becomes(model)
+ RUBY
+
+ expect(cop.offenses.size).to eq(1)
+ end
+end
diff --git a/spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb b/spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb
index bc9db9cafec..4fb47e758bb 100644
--- a/spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb
+++ b/spec/rubocop/cop/avoid_break_from_strong_memoize_spec.rb
@@ -1,11 +1,10 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/avoid_break_from_strong_memoize'
-RSpec.describe RuboCop::Cop::AvoidBreakFromStrongMemoize do
+RSpec.describe RuboCop::Cop::AvoidBreakFromStrongMemoize, type: :rubocop do
include CopHelper
subject(:cop) { described_class.new }
@@ -62,7 +61,7 @@ RSpec.describe RuboCop::Cop::AvoidBreakFromStrongMemoize do
end
end
RUBY
- expect_next_instance_of(described_class) do |instance|
+ expect_any_instance_of(described_class) do |instance|
expect(instance).to receive(:add_offense).once
end
diff --git a/spec/rubocop/cop/avoid_return_from_blocks_spec.rb b/spec/rubocop/cop/avoid_return_from_blocks_spec.rb
index 9e571bf96b9..a157183646c 100644
--- a/spec/rubocop/cop/avoid_return_from_blocks_spec.rb
+++ b/spec/rubocop/cop/avoid_return_from_blocks_spec.rb
@@ -1,11 +1,10 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/avoid_return_from_blocks'
-RSpec.describe RuboCop::Cop::AvoidReturnFromBlocks do
+RSpec.describe RuboCop::Cop::AvoidReturnFromBlocks, type: :rubocop do
include CopHelper
subject(:cop) { described_class.new }
@@ -29,7 +28,7 @@ RSpec.describe RuboCop::Cop::AvoidReturnFromBlocks do
end
end
RUBY
- expect_next_instance_of(described_class) do |instance|
+ expect_any_instance_of(described_class) do |instance|
expect(instance).to receive(:add_offense).once
end
diff --git a/spec/rubocop/cop/gitlab/finder_with_find_by_spec.rb b/spec/rubocop/cop/gitlab/finder_with_find_by_spec.rb
index 30ee422f420..a47625d5dc1 100644
--- a/spec/rubocop/cop/gitlab/finder_with_find_by_spec.rb
+++ b/spec/rubocop/cop/gitlab/finder_with_find_by_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe RuboCop::Cop::Gitlab::FinderWithFindBy, type: :rubocop do
.find_by!(1)
SRC
end
+
let(:corrected_source) do
<<~SRC
DummyFinder.new(some_args)
diff --git a/spec/rubocop/cop/graphql/json_type_spec.rb b/spec/rubocop/cop/graphql/json_type_spec.rb
new file mode 100644
index 00000000000..ac25e0feb69
--- /dev/null
+++ b/spec/rubocop/cop/graphql/json_type_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rubocop'
+require_relative '../../../../rubocop/cop/graphql/json_type'
+
+RSpec.describe RuboCop::Cop::Graphql::JSONType, type: :rubocop do
+ include CopHelper
+
+ subject(:cop) { described_class.new }
+
+ context 'fields' do
+ it 'adds an offense when GraphQL::Types::JSON is used' do
+ inspect_source(<<~RUBY.strip)
+ class MyType
+ field :some_field, GraphQL::Types::JSON
+ end
+ RUBY
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it 'adds an offense when GraphQL::Types::JSON is used with other keywords' do
+ inspect_source(<<~RUBY.strip)
+ class MyType
+ field :some_field, GraphQL::Types::JSON, null: true, description: 'My description'
+ end
+ RUBY
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it 'does not add an offense for other types' do
+ expect_no_offenses(<<~RUBY.strip)
+ class MyType
+ field :some_field, GraphQL::STRING_TYPE
+ end
+ RUBY
+ end
+ end
+
+ context 'arguments' do
+ it 'adds an offense when GraphQL::Types::JSON is used' do
+ inspect_source(<<~RUBY.strip)
+ class MyType
+ argument :some_arg, GraphQL::Types::JSON
+ end
+ RUBY
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it 'adds an offense when GraphQL::Types::JSON is used with other keywords' do
+ inspect_source(<<~RUBY.strip)
+ class MyType
+ argument :some_arg, GraphQL::Types::JSON, null: true, description: 'My description'
+ end
+ RUBY
+
+ expect(cop.offenses.size).to eq(1)
+ end
+
+ it 'does not add an offense for other types' do
+ expect_no_offenses(<<~RUBY.strip)
+ class MyType
+ argument :some_arg, GraphQL::STRING_TYPE
+ end
+ RUBY
+ end
+ end
+
+ it 'does not add an offense for uses outside of field or argument' do
+ expect_no_offenses(<<~RUBY.strip)
+ class MyType
+ foo :some_field, GraphQL::Types::JSON
+ end
+ RUBY
+ end
+end
diff --git a/spec/rubocop/cop/migration/drop_table_spec.rb b/spec/rubocop/cop/migration/drop_table_spec.rb
index 44a1106ba62..9ce5ee45b08 100644
--- a/spec/rubocop/cop/migration/drop_table_spec.rb
+++ b/spec/rubocop/cop/migration/drop_table_spec.rb
@@ -1,13 +1,10 @@
# frozen_string_literal: true
-require 'spec_helper'
-
+require 'fast_spec_helper'
require 'rubocop'
-require 'rubocop/rspec/support'
-
require_relative '../../../../rubocop/cop/migration/drop_table'
-RSpec.describe RuboCop::Cop::Migration::DropTable do
+RSpec.describe RuboCop::Cop::Migration::DropTable, type: :rubocop do
include CopHelper
subject(:cop) { described_class.new }
diff --git a/spec/rubocop/cop/put_group_routes_under_scope_spec.rb b/spec/rubocop/cop/put_group_routes_under_scope_spec.rb
index 2e577c9c578..c55d9bf22d6 100644
--- a/spec/rubocop/cop/put_group_routes_under_scope_spec.rb
+++ b/spec/rubocop/cop/put_group_routes_under_scope_spec.rb
@@ -9,19 +9,20 @@ RSpec.describe RuboCop::Cop::PutGroupRoutesUnderScope, type: :rubocop do
subject(:cop) { described_class.new }
- before do
- allow(cop).to receive(:in_group_routes?).and_return(true)
- end
+ %w[resource resources get post put patch delete].each do |route_method|
+ it "registers an offense when route is outside scope for `#{route_method}`" do
+ offense = "#{route_method} :notes"
+ marker = '^' * offense.size
- it 'registers an offense when route is outside scope' do
- expect_offense(<<~PATTERN)
+ expect_offense(<<~PATTERN)
scope(path: 'groups/*group_id/-', module: :groups) do
resource :issues
end
- resource :notes
- ^^^^^^^^^^^^^^^ Put new group routes under /-/ scope
- PATTERN
+ #{offense}
+ #{marker} Put new group routes under /-/ scope
+ PATTERN
+ end
end
it 'does not register an offense when resource inside the scope' do
diff --git a/spec/rubocop/cop/put_project_routes_under_scope_spec.rb b/spec/rubocop/cop/put_project_routes_under_scope_spec.rb
index 66e9044c453..05e1cd7b693 100644
--- a/spec/rubocop/cop/put_project_routes_under_scope_spec.rb
+++ b/spec/rubocop/cop/put_project_routes_under_scope_spec.rb
@@ -9,19 +9,20 @@ RSpec.describe RuboCop::Cop::PutProjectRoutesUnderScope, type: :rubocop do
subject(:cop) { described_class.new }
- before do
- allow(cop).to receive(:in_project_routes?).and_return(true)
- end
+ %w[resource resources get post put patch delete].each do |route_method|
+ it "registers an offense when route is outside scope for `#{route_method}`" do
+ offense = "#{route_method} :notes"
+ marker = '^' * offense.size
- it 'registers an offense when route is outside scope' do
- expect_offense(<<~PATTERN)
+ expect_offense(<<~PATTERN)
scope '-' do
resource :issues
end
- resource :notes
- ^^^^^^^^^^^^^^^ Put new project routes under /-/ scope
- PATTERN
+ #{offense}
+ #{marker} Put new project routes under /-/ scope
+ PATTERN
+ end
end
it 'does not register an offense when resource inside the scope' do
diff --git a/spec/rubocop/cop/rspec/any_instance_of_spec.rb b/spec/rubocop/cop/rspec/any_instance_of_spec.rb
index 11c0f109850..971e28853a3 100644
--- a/spec/rubocop/cop/rspec/any_instance_of_spec.rb
+++ b/spec/rubocop/cop/rspec/any_instance_of_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe RuboCop::Cop::RSpec::AnyInstanceOf, type: :rubocop do
allow_any_instance_of(User).to receive(:invalidate_issue_cache_counts)
SRC
end
+
let(:corrected_source) do
<<~SRC
allow_next_instance_of(User) do |instance|
@@ -40,6 +41,7 @@ RSpec.describe RuboCop::Cop::RSpec::AnyInstanceOf, type: :rubocop do
expect_any_instance_of(User).to receive(:invalidate_issue_cache_counts).with(args).and_return(double)
SRC
end
+
let(:corrected_source) do
<<~SRC
expect_next_instance_of(User) do |instance|
diff --git a/spec/rubocop/cop/usage_data/distinct_count_by_large_foreign_key_spec.rb b/spec/rubocop/cop/usage_data/distinct_count_by_large_foreign_key_spec.rb
new file mode 100644
index 00000000000..db931c50bdf
--- /dev/null
+++ b/spec/rubocop/cop/usage_data/distinct_count_by_large_foreign_key_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+require 'rubocop'
+require 'rubocop/rspec/support'
+
+require_relative '../../../../rubocop/cop/usage_data/distinct_count_by_large_foreign_key'
+
+RSpec.describe RuboCop::Cop::UsageData::DistinctCountByLargeForeignKey, type: :rubocop do
+ include CopHelper
+
+ let(:allowed_foreign_keys) { %i[author_id user_id] }
+
+ let(:config) do
+ RuboCop::Config.new('UsageData/DistinctCountByLargeForeignKey' => {
+ 'AllowedForeignKeys' => allowed_foreign_keys
+ })
+ end
+
+ subject(:cop) { described_class.new(config) }
+
+ context 'when counting by disallowed key' do
+ it 'register an offence' do
+ inspect_source('distinct_count(Issue, :creator_id)')
+
+ expect(cop.offenses.size).to eq(1)
+ end
+ end
+
+ context 'when calling by allowed key' do
+ it 'does not register an offence' do
+ inspect_source('distinct_count(Issue, :author_id)')
+
+ expect(cop.offenses).to be_empty
+ end
+ end
+end
diff --git a/spec/rubocop/cop/usage_data/large_table_spec.rb b/spec/rubocop/cop/usage_data/large_table_spec.rb
new file mode 100644
index 00000000000..de6fb9c17e2
--- /dev/null
+++ b/spec/rubocop/cop/usage_data/large_table_spec.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+require 'rubocop'
+require 'rubocop/rspec/support'
+
+require_relative '../../../../rubocop/cop/usage_data/large_table'
+
+RSpec.describe RuboCop::Cop::UsageData::LargeTable, type: :rubocop do
+ include CopHelper
+
+ let(:large_tables) { %i[Rails Time] }
+ let(:count_methods) { %i[count distinct_count] }
+ let(:allowed_methods) { %i[minimum maximum] }
+
+ let(:config) do
+ RuboCop::Config.new('UsageData/LargeTable' => {
+ 'NonRelatedClasses' => large_tables,
+ 'CountMethods' => count_methods,
+ 'AllowedMethods' => allowed_methods
+ })
+ end
+
+ subject(:cop) { described_class.new(config) }
+
+ context 'when in usage_data files' do
+ before do
+ allow(cop).to receive(:usage_data_files?).and_return(true)
+ end
+
+ context 'with large tables' do
+ context 'when calling Issue.count' do
+ it 'register an offence' do
+ inspect_source('Issue.count')
+
+ expect(cop.offenses.size).to eq(1)
+ end
+ end
+
+ context 'when calling Issue.active.count' do
+ it 'register an offence' do
+ inspect_source('Issue.active.count')
+
+ expect(cop.offenses.size).to eq(1)
+ end
+ end
+
+ context 'when calling count(Issue)' do
+ it 'does not register an offence' do
+ inspect_source('count(Issue)')
+
+ expect(cop.offenses).to be_empty
+ end
+ end
+
+ context 'when calling count(Ci::Build.active)' do
+ it 'does not register an offence' do
+ inspect_source('count(Ci::Build.active)')
+
+ expect(cop.offenses).to be_empty
+ end
+ end
+
+ context 'when calling Ci::Build.active.count' do
+ it 'register an offence' do
+ inspect_source('Ci::Build.active.count')
+
+ expect(cop.offenses.size).to eq(1)
+ end
+ end
+
+ context 'when using allowed methods' do
+ it 'does not register an offence' do
+ inspect_source('Issue.minimum')
+
+ expect(cop.offenses).to be_empty
+ end
+ end
+ end
+
+ context 'with non related class' do
+ it 'does not register an offence' do
+ inspect_source('Rails.count')
+
+ expect(cop.offenses).to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/serializers/accessibility_reports_comparer_entity_spec.rb b/spec/serializers/accessibility_reports_comparer_entity_spec.rb
index 3024974710e..c576dfa4dd1 100644
--- a/spec/serializers/accessibility_reports_comparer_entity_spec.rb
+++ b/spec/serializers/accessibility_reports_comparer_entity_spec.rb
@@ -22,6 +22,7 @@ RSpec.describe AccessibilityReportsComparerEntity do
}
]
end
+
let(:different_error) do
[
{
diff --git a/spec/serializers/accessibility_reports_comparer_serializer_spec.rb b/spec/serializers/accessibility_reports_comparer_serializer_spec.rb
index ef56f5b6b6b..94d2a2a4065 100644
--- a/spec/serializers/accessibility_reports_comparer_serializer_spec.rb
+++ b/spec/serializers/accessibility_reports_comparer_serializer_spec.rb
@@ -23,6 +23,7 @@ RSpec.describe AccessibilityReportsComparerSerializer do
}
]
end
+
let(:different_error) do
[
{
diff --git a/spec/serializers/analytics_issue_entity_spec.rb b/spec/serializers/analytics_issue_entity_spec.rb
index 2518eec8c23..447c5e7d02a 100644
--- a/spec/serializers/analytics_issue_entity_spec.rb
+++ b/spec/serializers/analytics_issue_entity_spec.rb
@@ -17,16 +17,13 @@ RSpec.describe AnalyticsIssueEntity do
}
end
- let(:project) { create(:project, name: 'my project') }
let(:request) { EntityRequest.new(entity: :merge_request) }
let(:entity) do
described_class.new(entity_hash, request: request, project: project)
end
- context 'generic entity' do
- subject { entity.as_json }
-
+ shared_examples 'generic entity' do
it 'contains the entity URL' do
expect(subject).to include(:url)
end
@@ -40,4 +37,24 @@ RSpec.describe AnalyticsIssueEntity do
expect(subject).not_to include(/variables/)
end
end
+
+ context 'without subgroup' do
+ let_it_be(:project) { create(:project, name: 'my project') }
+
+ subject { entity.as_json }
+
+ it_behaves_like 'generic entity'
+ end
+
+ context 'with subgroup' do
+ let_it_be(:project) { create(:project, :in_subgroup, name: 'my project') }
+
+ subject { entity.as_json }
+
+ it_behaves_like 'generic entity'
+
+ it 'has URL containing subgroup' do
+ expect(subject[:url]).to include("#{project.group.parent.name}/#{project.group.name}/my_project/")
+ end
+ end
end
diff --git a/spec/serializers/build_details_entity_spec.rb b/spec/serializers/build_details_entity_spec.rb
index ef6472e07a0..3166c08ff4e 100644
--- a/spec/serializers/build_details_entity_spec.rb
+++ b/spec/serializers/build_details_entity_spec.rb
@@ -185,12 +185,38 @@ RSpec.describe BuildDetailsEntity do
end
end
+ context 'when the build has expired artifacts' do
+ let!(:build) { create(:ci_build, :artifacts, artifacts_expire_at: 7.days.ago) }
+
+ it 'does not expose any artifact actions path' do
+ expect(subject[:artifact].keys).not_to include(:download_path, :browse_path, :keep_path)
+ end
+
+ it 'artifact locked is false' do
+ expect(subject.dig(:artifact, :locked)).to eq(false)
+ end
+
+ context 'when the pipeline is artifacts_locked' do
+ before do
+ build.pipeline.update!(locked: :artifacts_locked)
+ end
+
+ it 'artifact locked is true' do
+ expect(subject.dig(:artifact, :locked)).to eq(true)
+ end
+
+ it 'exposes download and browse artifact actions path' do
+ expect(subject[:artifact].keys).to include(:download_path, :browse_path)
+ end
+ end
+ end
+
context 'when the build has archive type artifacts' do
let!(:build) { create(:ci_build, :artifacts, artifacts_expire_at: 7.days.from_now) }
let!(:report) { create(:ci_job_artifact, :codequality, job: build) }
it 'exposes artifact details' do
- expect(subject[:artifact].keys).to include(:download_path, :browse_path, :keep_path, :expire_at, :expired)
+ expect(subject[:artifact].keys).to include(:download_path, :browse_path, :keep_path, :expire_at, :expired, :locked)
end
end
end
diff --git a/spec/serializers/ci/daily_build_group_report_result_serializer_spec.rb b/spec/serializers/ci/daily_build_group_report_result_serializer_spec.rb
index 69bf599c0dd..ddeeb367afe 100644
--- a/spec/serializers/ci/daily_build_group_report_result_serializer_spec.rb
+++ b/spec/serializers/ci/daily_build_group_report_result_serializer_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Ci::DailyBuildGroupReportResultSerializer do
double(date: '2020-05-19', group_name: 'karma', data: { 'coverage' => 89.1 })
]
end
+
let(:serializer) { described_class.new.represent(report_result, param_type: 'coverage') }
describe '#to_json' do
diff --git a/spec/serializers/cluster_error_entity_spec.rb b/spec/serializers/cluster_error_entity_spec.rb
new file mode 100644
index 00000000000..43ec41adf14
--- /dev/null
+++ b/spec/serializers/cluster_error_entity_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ClusterErrorEntity do
+ describe '#as_json' do
+ let(:cluster) { create(:cluster, :provided_by_user, :group) }
+
+ subject { described_class.new(cluster).as_json }
+
+ context 'when connection_error is present' do
+ before do
+ allow(cluster).to receive(:connection_error).and_return(:connection_error)
+ end
+
+ it { is_expected.to eq({ connection_error: :connection_error, metrics_connection_error: nil, node_connection_error: nil }) }
+ end
+
+ context 'when metrics_connection_error is present' do
+ before do
+ allow(cluster).to receive(:metrics_connection_error).and_return(:http_error)
+ end
+
+ it { is_expected.to eq({ connection_error: nil, metrics_connection_error: :http_error, node_connection_error: nil }) }
+ end
+
+ context 'when node_connection_error is present' do
+ before do
+ allow(cluster).to receive(:node_connection_error).and_return(:unknown_error)
+ end
+
+ it { is_expected.to eq({ connection_error: nil, metrics_connection_error: nil, node_connection_error: :unknown_error }) }
+ end
+ end
+end
diff --git a/spec/serializers/cluster_serializer_spec.rb b/spec/serializers/cluster_serializer_spec.rb
index ea1cf6ff59a..f34409c3cfb 100644
--- a/spec/serializers/cluster_serializer_spec.rb
+++ b/spec/serializers/cluster_serializer_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe ClusterSerializer do
:enabled,
:environment_scope,
:gitlab_managed_apps_logs_path,
+ :kubernetes_errors,
:name,
:nodes,
:path,
diff --git a/spec/serializers/commit_entity_spec.rb b/spec/serializers/commit_entity_spec.rb
index e2ea63893a4..6b4346faf5b 100644
--- a/spec/serializers/commit_entity_spec.rb
+++ b/spec/serializers/commit_entity_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe CommitEntity do
let(:entity) do
described_class.new(commit, request: request)
end
+
let(:request) { double('request') }
let(:project) { create(:project, :repository) }
let(:commit) { project.commit }
diff --git a/spec/serializers/diff_file_base_entity_spec.rb b/spec/serializers/diff_file_base_entity_spec.rb
index 153f854aa58..bf69a50a072 100644
--- a/spec/serializers/diff_file_base_entity_spec.rb
+++ b/spec/serializers/diff_file_base_entity_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe DiffFileBaseEntity do
let(:commit_sha_with_changed_submodule) do
"cfe32cf61b73a0d5e9f13e774abde7ff789b1660"
end
+
let(:commit) { project.commit(commit_sha_with_changed_submodule) }
let(:options) { { request: {}, submodule_links: Gitlab::SubmoduleLinks.new(repository) } }
let(:diff_file) { commit.diffs.diff_files.to_a.last }
diff --git a/spec/serializers/diffs_metadata_entity_spec.rb b/spec/serializers/diffs_metadata_entity_spec.rb
index 8ed47569b75..e8cbc2076d7 100644
--- a/spec/serializers/diffs_metadata_entity_spec.rb
+++ b/spec/serializers/diffs_metadata_entity_spec.rb
@@ -30,6 +30,7 @@ RSpec.describe DiffsMetadataEntity do
:email_patch_path, :plain_diff_path,
:merge_request_diffs, :context_commits,
:definition_path_prefix, :source_branch_exists,
+ :can_merge, :conflict_resolution_path, :has_conflicts,
# Attributes
:diff_files
)
diff --git a/spec/serializers/environment_entity_spec.rb b/spec/serializers/environment_entity_spec.rb
index 6232a0d2973..c969638614e 100644
--- a/spec/serializers/environment_entity_spec.rb
+++ b/spec/serializers/environment_entity_spec.rb
@@ -82,10 +82,30 @@ RSpec.describe EnvironmentEntity do
end
end
+ context 'with alert' do
+ let!(:environment) { create(:environment, project: project) }
+ let!(:prometheus_alert) { create(:prometheus_alert, project: project, environment: environment) }
+ let!(:alert) { create(:alert_management_alert, :triggered, :prometheus, project: project, environment: environment, prometheus_alert: prometheus_alert) }
+
+ it 'exposes active alert flag' do
+ project.add_maintainer(user)
+
+ expect(subject[:has_opened_alert]).to eq(true)
+ end
+
+ context 'when user does not have permission to read alert' do
+ it 'does not expose active alert flag' do
+ project.add_reporter(user)
+
+ expect(subject[:has_opened_alert]).to be_nil
+ end
+ end
+ end
+
context 'pod_logs' do
- context 'with developer access' do
+ context 'with reporter access' do
before do
- project.add_developer(user)
+ project.add_reporter(user)
end
it 'does not expose logs keys' do
@@ -95,9 +115,9 @@ RSpec.describe EnvironmentEntity do
end
end
- context 'with maintainer access' do
+ context 'with developer access' do
before do
- project.add_maintainer(user)
+ project.add_developer(user)
end
it 'exposes logs keys' do
diff --git a/spec/serializers/environment_serializer_spec.rb b/spec/serializers/environment_serializer_spec.rb
index 1e3980b7720..1eba9ae4e5e 100644
--- a/spec/serializers/environment_serializer_spec.rb
+++ b/spec/serializers/environment_serializer_spec.rb
@@ -26,6 +26,7 @@ RSpec.describe EnvironmentSerializer do
project: project,
sha: project.commit.id)
end
+
let(:resource) { deployment.environment }
before do
diff --git a/spec/serializers/group_deploy_key_entity_spec.rb b/spec/serializers/group_deploy_key_entity_spec.rb
new file mode 100644
index 00000000000..e6cef2f10b3
--- /dev/null
+++ b/spec/serializers/group_deploy_key_entity_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GroupDeployKeyEntity do
+ include RequestAwareEntity
+
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:group_deploy_key) { create(:group_deploy_key) }
+ let(:options) { { user: user } }
+
+ let(:entity) { described_class.new(group_deploy_key, options) }
+
+ before do
+ group.group_deploy_keys << group_deploy_key
+ end
+
+ describe 'returns group deploy keys with a group a user can read' do
+ let(:expected_result) do
+ {
+ id: group_deploy_key.id,
+ user_id: group_deploy_key.user_id,
+ title: group_deploy_key.title,
+ fingerprint: group_deploy_key.fingerprint,
+ fingerprint_sha256: group_deploy_key.fingerprint_sha256,
+ created_at: group_deploy_key.created_at,
+ updated_at: group_deploy_key.updated_at,
+ can_edit: false,
+ group_deploy_keys_groups: [
+ {
+ can_push: false,
+ group:
+ {
+ id: group.id,
+ name: group.name,
+ full_path: group.full_path,
+ full_name: group.full_name
+ }
+ }
+ ]
+ }
+ end
+
+ it { expect(entity.as_json).to eq(expected_result) }
+ end
+end
diff --git a/spec/serializers/import/bitbucket_provider_repo_entity_spec.rb b/spec/serializers/import/bitbucket_provider_repo_entity_spec.rb
index 2a0386b779f..d59a476961e 100644
--- a/spec/serializers/import/bitbucket_provider_repo_entity_spec.rb
+++ b/spec/serializers/import/bitbucket_provider_repo_entity_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Import::BitbucketProviderRepoEntity do
}
}
end
+
let(:repo) { Bitbucket::Representation::Repo.new(repo_data) }
subject { described_class.new(repo).as_json }
diff --git a/spec/serializers/import/bitbucket_server_provider_repo_entity_spec.rb b/spec/serializers/import/bitbucket_server_provider_repo_entity_spec.rb
index 6e00d608d9a..894bf6ac770 100644
--- a/spec/serializers/import/bitbucket_server_provider_repo_entity_spec.rb
+++ b/spec/serializers/import/bitbucket_server_provider_repo_entity_spec.rb
@@ -6,8 +6,10 @@ RSpec.describe Import::BitbucketServerProviderRepoEntity do
let(:repo_data) do
{
'name' => 'test',
+ 'slug' => 'TEST',
'project' => {
- 'name' => 'demo'
+ 'name' => 'demo',
+ 'key' => 'DEM'
},
'links' => {
'self' => [
@@ -19,6 +21,7 @@ RSpec.describe Import::BitbucketServerProviderRepoEntity do
}
}
end
+
let(:repo) { BitbucketServer::Representation::Repo.new(repo_data) }
subject { described_class.new(repo).as_json }
@@ -26,7 +29,7 @@ RSpec.describe Import::BitbucketServerProviderRepoEntity do
it_behaves_like 'exposes required fields for import entity' do
let(:expected_values) do
{
- id: 'demo/test',
+ id: 'DEM/TEST',
full_name: 'demo/test',
sanitized_name: 'test',
provider_link: 'http://local.bitbucket.server/demo/test.git'
diff --git a/spec/serializers/import/fogbugz_provider_repo_entity_spec.rb b/spec/serializers/import/fogbugz_provider_repo_entity_spec.rb
index 748ddd2a108..62bb0addb87 100644
--- a/spec/serializers/import/fogbugz_provider_repo_entity_spec.rb
+++ b/spec/serializers/import/fogbugz_provider_repo_entity_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Import::FogbugzProviderRepoEntity do
'sProject' => 'demo'
}
end
+
let(:repo) { Gitlab::FogbugzImport::Repository.new(repo_data) }
subject { described_class.represent(repo, { provider_url: provider_url }).as_json }
diff --git a/spec/serializers/import/manifest_provider_repo_entity_spec.rb b/spec/serializers/import/manifest_provider_repo_entity_spec.rb
new file mode 100644
index 00000000000..c11f8c42559
--- /dev/null
+++ b/spec/serializers/import/manifest_provider_repo_entity_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Import::ManifestProviderRepoEntity do
+ let(:current_user) { create(:user) }
+ let(:request) { double(:request, current_user: current_user) }
+ let(:repo_data) do
+ {
+ id: 1,
+ url: 'http://demo.repo/url',
+ path: '/demo/path'
+ }
+ end
+
+ subject { described_class.represent(repo_data, { group_full_path: 'group', request: request }).as_json }
+
+ it_behaves_like 'exposes required fields for import entity' do
+ let(:expected_values) do
+ {
+ id: repo_data[:id],
+ full_name: repo_data[:url],
+ sanitized_name: nil,
+ provider_link: repo_data[:url]
+ }
+ end
+ end
+end
diff --git a/spec/serializers/merge_request_poll_widget_entity_spec.rb b/spec/serializers/merge_request_poll_widget_entity_spec.rb
index f0493699209..e5f88e31025 100644
--- a/spec/serializers/merge_request_poll_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_widget_entity_spec.rb
@@ -36,13 +36,28 @@ RSpec.describe MergeRequestPollWidgetEntity do
it 'returns merge_pipeline' do
pipeline.reload
- pipeline_payload = PipelineDetailsEntity
- .represent(pipeline, request: request)
- .as_json
+ pipeline_payload =
+ MergeRequests::PipelineEntity
+ .represent(pipeline, request: request)
+ .as_json
expect(subject[:merge_pipeline]).to eq(pipeline_payload)
end
+ context 'when merge_request_short_pipeline_serializer is disabled' do
+ it 'returns detailed info about pipeline' do
+ stub_feature_flags(merge_request_short_pipeline_serializer: false)
+
+ pipeline.reload
+ pipeline_payload =
+ PipelineDetailsEntity
+ .represent(pipeline, request: request)
+ .as_json
+
+ expect(subject[:merge_pipeline]).to eq(pipeline_payload)
+ end
+ end
+
context 'when user cannot read pipelines on target project' do
before do
project.add_guest(user)
@@ -222,13 +237,27 @@ RSpec.describe MergeRequestPollWidgetEntity do
let(:req) { double('request', current_user: user, project: project) }
it 'returns pipeline' do
- pipeline_payload = PipelineDetailsEntity
- .represent(pipeline, request: req)
- .as_json
+ pipeline_payload =
+ MergeRequests::PipelineEntity
+ .represent(pipeline, request: req)
+ .as_json
expect(subject[:pipeline]).to eq(pipeline_payload)
end
+ context 'when merge_request_short_pipeline_serializer is disabled' do
+ it 'returns detailed info about pipeline' do
+ stub_feature_flags(merge_request_short_pipeline_serializer: false)
+
+ pipeline_payload =
+ PipelineDetailsEntity
+ .represent(pipeline, request: req)
+ .as_json
+
+ expect(subject[:pipeline]).to eq(pipeline_payload)
+ end
+ end
+
it 'returns ci_status' do
expect(subject[:ci_status]).to eq('pending')
end
diff --git a/spec/serializers/merge_request_widget_entity_spec.rb b/spec/serializers/merge_request_widget_entity_spec.rb
index aaee47fb981..1704208d8b9 100644
--- a/spec/serializers/merge_request_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_widget_entity_spec.rb
@@ -31,6 +31,28 @@ RSpec.describe MergeRequestWidgetEntity do
end
end
+ describe 'can_create_pipeline_in_target_project' do
+ context 'when user has permission' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'includes the correct permission info' do
+ expect(subject[:can_create_pipeline_in_target_project]).to eq(true)
+ end
+ end
+
+ context 'when user does not have permission' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'includes the correct permission info' do
+ expect(subject[:can_create_pipeline_in_target_project]).to eq(false)
+ end
+ end
+ end
+
describe 'issues links' do
it 'includes issues links when requested' do
data = described_class.new(resource, request: request, issues_links: true).as_json
@@ -234,6 +256,62 @@ RSpec.describe MergeRequestWidgetEntity do
end
end
+ describe 'user callouts' do
+ context 'when suggest pipeline feature is enabled' do
+ before do
+ stub_feature_flags(suggest_pipeline: true)
+ end
+
+ it 'provides a valid path value for user callout path' do
+ expect(subject[:user_callouts_path]).to eq '/-/user_callouts'
+ end
+
+ it 'provides a valid value for suggest pipeline feature id' do
+ expect(subject[:suggest_pipeline_feature_id]).to eq described_class::SUGGEST_PIPELINE
+ end
+
+ it 'provides a valid value for if it is dismissed' do
+ expect(subject[:is_dismissed_suggest_pipeline]).to be(false)
+ end
+
+ context 'when the suggest pipeline has been dismissed' do
+ before do
+ create(:user_callout, user: user, feature_name: described_class::SUGGEST_PIPELINE)
+ end
+
+ it 'indicates suggest pipeline has been dismissed' do
+ expect(subject[:is_dismissed_suggest_pipeline]).to be(true)
+ end
+ end
+
+ context 'when user is not logged in' do
+ let(:request) { double('request', current_user: nil, project: project) }
+
+ it 'returns a blank is dismissed value' do
+ expect(subject[:is_dismissed_suggest_pipeline]).to be_nil
+ end
+ end
+ end
+
+ context 'when suggest pipeline feature is not enabled' do
+ before do
+ stub_feature_flags(suggest_pipeline: false)
+ end
+
+ it 'provides no valid value for user callout path' do
+ expect(subject[:user_callouts_path]).to be_nil
+ end
+
+ it 'provides no valid value for suggest pipeline feature id' do
+ expect(subject[:suggest_pipeline_feature_id]).to be_nil
+ end
+
+ it 'provides no valid value for if it is dismissed' do
+ expect(subject[:is_dismissed_suggest_pipeline]).to be_nil
+ end
+ end
+ end
+
it 'has human access' do
project.add_maintainer(user)
diff --git a/spec/serializers/merge_requests/pipeline_entity_spec.rb b/spec/serializers/merge_requests/pipeline_entity_spec.rb
new file mode 100644
index 00000000000..8685d4bb846
--- /dev/null
+++ b/spec/serializers/merge_requests/pipeline_entity_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::PipelineEntity do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+
+ let(:request) { double('request') }
+
+ before do
+ stub_not_protect_default_branch
+
+ allow(request).to receive(:current_user).and_return(user)
+ allow(request).to receive(:project).and_return(project)
+ end
+
+ let(:entity) do
+ described_class.represent(pipeline, request: request)
+ end
+
+ subject { entity.as_json }
+
+ describe '#as_json' do
+ it 'contains required fields' do
+ is_expected.to include(
+ :id, :path, :active, :coverage, :ref, :commit, :details,
+ :flags, :triggered, :triggered_by
+ )
+ expect(subject[:commit]).to include(:short_id, :commit_path)
+ expect(subject[:ref]).to include(:branch)
+ expect(subject[:details]).to include(:name, :status, :stages)
+ expect(subject[:details][:status]).to include(:icon, :favicon, :text, :label, :tooltip)
+ expect(subject[:flags]).to include(:merge_request_pipeline)
+ end
+
+ it 'excludes coverage data when disabled' do
+ entity = described_class
+ .represent(pipeline, request: request, disable_coverage: true)
+
+ expect(entity.as_json).not_to include(:coverage)
+ end
+ end
+end
diff --git a/spec/serializers/paginated_diff_entity_spec.rb b/spec/serializers/paginated_diff_entity_spec.rb
index 30360b00537..a2c58baed55 100644
--- a/spec/serializers/paginated_diff_entity_spec.rb
+++ b/spec/serializers/paginated_diff_entity_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe PaginatedDiffEntity do
pagination_data: diff_batch.pagination_data
}
end
+
let(:entity) { described_class.new(diff_batch, options) }
subject { entity.as_json }
diff --git a/spec/serializers/pipeline_entity_spec.rb b/spec/serializers/pipeline_entity_spec.rb
index e638b14765b..e00f05a8fe8 100644
--- a/spec/serializers/pipeline_entity_spec.rb
+++ b/spec/serializers/pipeline_entity_spec.rb
@@ -7,7 +7,6 @@ RSpec.describe PipelineEntity do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
let(:request) { double('request') }
before do
@@ -265,24 +264,8 @@ RSpec.describe PipelineEntity do
context 'when pipeline has build report results' do
let(:pipeline) { create(:ci_pipeline, :with_report_results, project: project, user: user) }
- context 'when feature is enabled' do
- before do
- stub_feature_flags(build_report_summary: true)
- end
-
- it 'exposes tests total count' do
- expect(subject[:tests_total_count]).to eq(2)
- end
- end
-
- context 'when feature is disabled' do
- before do
- stub_feature_flags(build_report_summary: false)
- end
-
- it 'do not expose tests total count' do
- expect(subject).not_to include(:tests_total_count)
- end
+ it 'exposes tests total count' do
+ expect(subject[:tests_total_count]).to eq(2)
end
end
end
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index c1386ac4eb2..dfe51e9006f 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -160,20 +160,6 @@ RSpec.describe PipelineSerializer do
expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0)
end
-
- context 'with the :build_report_summary flag turned off' do
- before do
- stub_feature_flags(build_report_summary: false)
- end
-
- it 'verifies number of queries', :request_store do
- recorded = ActiveRecord::QueryRecorder.new { subject }
- expected_queries = Gitlab.ee? ? 43 : 40
-
- expect(recorded.count).to be_within(2).of(expected_queries)
- expect(recorded.cached_count).to eq(0)
- end
- end
end
context 'with different refs' do
@@ -195,20 +181,6 @@ RSpec.describe PipelineSerializer do
expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0)
end
-
- context 'with the :build_report_summary flag turned off' do
- before do
- stub_feature_flags(build_report_summary: false)
- end
-
- it 'verifies number of queries', :request_store do
- recorded = ActiveRecord::QueryRecorder.new { subject }
- expected_queries = Gitlab.ee? ? 46 : 43
-
- expect(recorded.count).to be_within(2).of(expected_queries)
- expect(recorded.cached_count).to eq(0)
- end
- end
end
context 'with triggered pipelines' do
@@ -231,7 +203,7 @@ RSpec.describe PipelineSerializer do
# :source_pipeline and :source_job
# Existing numbers are high and require performance optimization
# https://gitlab.com/gitlab-org/gitlab/-/issues/225156
- expected_queries = Gitlab.ee? ? 101 : 92
+ expected_queries = Gitlab.ee? ? 95 : 86
expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0)
diff --git a/spec/serializers/prometheus_alert_entity_spec.rb b/spec/serializers/prometheus_alert_entity_spec.rb
index aeee8de2a5b..ae8c97401f8 100644
--- a/spec/serializers/prometheus_alert_entity_spec.rb
+++ b/spec/serializers/prometheus_alert_entity_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe PrometheusAlertEntity do
end
it 'exposes prometheus_alert attributes' do
- expect(subject).to include(:id, :title, :query, :operator, :threshold)
+ expect(subject).to include(:id, :title, :query, :operator, :threshold, :runbook_url)
end
it 'exposes alert_path' do
diff --git a/spec/serializers/release_serializer_spec.rb b/spec/serializers/release_serializer_spec.rb
new file mode 100644
index 00000000000..518d281f370
--- /dev/null
+++ b/spec/serializers/release_serializer_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ReleaseSerializer do
+ let(:user) { create(:user) }
+ let(:project) { create :project }
+
+ subject { described_class.new.represent(resource, current_user: user) }
+
+ before do
+ project.add_developer(user)
+ end
+
+ describe '#represent' do
+ context 'when a single object is being serialized' do
+ let(:resource) { create(:release, project: project) }
+
+ it 'serializes the label object' do
+ expect(subject[:tag]).to eq resource.tag
+ end
+ end
+
+ context 'when multiple objects are being serialized' do
+ let(:resource) { create_list(:release, 3) }
+
+ it 'serializes the array of releases' do
+ expect(subject.size).to eq(3)
+ end
+ end
+ end
+end
diff --git a/spec/serializers/suggestion_entity_spec.rb b/spec/serializers/suggestion_entity_spec.rb
index b133c3fb82e..25301bb20cc 100644
--- a/spec/serializers/suggestion_entity_spec.rb
+++ b/spec/serializers/suggestion_entity_spec.rb
@@ -36,87 +36,11 @@ RSpec.describe SuggestionEntity do
let(:can_apply_suggestion) { true }
before do
- allow(suggestion).to receive(:appliable?).and_return(appliable)
+ allow(suggestion).to receive(:inapplicable_reason).and_return("Can't apply this suggestion.")
end
- context 'and suggestion is appliable' do
- let(:appliable) { true }
-
- it 'returns nil' do
- expect(inapplicable_reason).to be_nil
- end
- end
-
- context 'but suggestion is not applicable' do
- let(:appliable) { false }
-
- before do
- allow(suggestion).to receive(:inapplicable_reason).and_return(reason)
- end
-
- context 'and merge request was merged' do
- let(:reason) { :merge_request_merged }
-
- it 'returns appropriate message' do
- expect(inapplicable_reason).to eq("This merge request was merged. To apply this suggestion, edit this file directly.")
- end
- end
-
- context 'and source branch was deleted' do
- let(:reason) { :source_branch_deleted }
-
- it 'returns appropriate message' do
- expect(inapplicable_reason).to eq("Can't apply as the source branch was deleted.")
- end
- end
-
- context 'and merge request is closed' do
- let(:reason) { :merge_request_closed }
-
- it 'returns appropriate message' do
- expect(inapplicable_reason).to eq("This merge request is closed. To apply this suggestion, edit this file directly.")
- end
- end
-
- context 'and suggestion is outdated' do
- let(:reason) { :outdated }
-
- before do
- allow(suggestion).to receive(:single_line?).and_return(single_line)
- end
-
- context 'and suggestion is for a single line' do
- let(:single_line) { true }
-
- it 'returns appropriate message' do
- expect(inapplicable_reason).to eq("Can't apply as this line was changed in a more recent version.")
- end
- end
-
- context 'and suggestion is for multiple lines' do
- let(:single_line) { false }
-
- it 'returns appropriate message' do
- expect(inapplicable_reason).to eq("Can't apply as these lines were changed in a more recent version.")
- end
- end
- end
-
- context 'and suggestion has the same content' do
- let(:reason) { :same_content }
-
- it 'returns appropriate message' do
- expect(inapplicable_reason).to eq("This suggestion already matches its content.")
- end
- end
-
- context 'and suggestion is inapplicable for other reasons' do
- let(:reason) { :some_other_reason }
-
- it 'returns default message' do
- expect(inapplicable_reason).to eq("Can't apply this suggestion.")
- end
- end
+ it 'returns the inapplicable reason' do
+ expect(inapplicable_reason).to eq(suggestion.inapplicable_reason)
end
end
diff --git a/spec/serializers/test_report_summary_entity_spec.rb b/spec/serializers/test_report_summary_entity_spec.rb
index fcac9af5c23..4b0af398b83 100644
--- a/spec/serializers/test_report_summary_entity_spec.rb
+++ b/spec/serializers/test_report_summary_entity_spec.rb
@@ -9,12 +9,8 @@ RSpec.describe TestReportSummaryEntity do
describe '#as_json' do
subject(:as_json) { entity.as_json }
- it 'contains the total time' do
- expect(as_json).to include(:total_time)
- end
-
- it 'contains the counts' do
- expect(as_json).to include(:total_count, :success_count, :failed_count, :skipped_count, :error_count)
+ it 'contains the total' do
+ expect(as_json).to include(:total)
end
context 'when summary has test suites' do
diff --git a/spec/serializers/test_suite_summary_entity_spec.rb b/spec/serializers/test_suite_summary_entity_spec.rb
index d26592bc60e..864781ccfce 100644
--- a/spec/serializers/test_suite_summary_entity_spec.rb
+++ b/spec/serializers/test_suite_summary_entity_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe TestSuiteSummaryEntity do
let(:pipeline) { create(:ci_pipeline, :with_report_results) }
- let(:entity) { described_class.new(pipeline.test_report_summary.total) }
+ let(:entity) { described_class.new(pipeline.test_report_summary.test_suites.each_value.first) }
describe '#as_json' do
subject(:as_json) { entity.as_json }
diff --git a/spec/services/admin/propagate_integration_service_spec.rb b/spec/services/admin/propagate_integration_service_spec.rb
index 843b78a41e9..2e879cf06d1 100644
--- a/spec/services/admin/propagate_integration_service_spec.rb
+++ b/spec/services/admin/propagate_integration_service_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Admin::PropagateIntegrationService do
describe '.propagate' do
- let(:excluded_attributes) { %w[id project_id inherit_from_id instance created_at updated_at title description] }
+ let(:excluded_attributes) { %w[id project_id inherit_from_id instance created_at updated_at default] }
let!(:project) { create(:project) }
let!(:instance_integration) do
JiraService.create!(
diff --git a/spec/services/alert_management/alerts/update_service_spec.rb b/spec/services/alert_management/alerts/update_service_spec.rb
index 91b02325bad..ee04fc55984 100644
--- a/spec/services/alert_management/alerts/update_service_spec.rb
+++ b/spec/services/alert_management/alerts/update_service_spec.rb
@@ -147,8 +147,7 @@ RSpec.describe AlertManagement::Alerts::UpdateService do
end
it_behaves_like 'does not add a system note'
- # TODO: We should not add another todo in this scenario
- it_behaves_like 'adds a todo'
+ it_behaves_like 'does not add a todo'
end
context 'with multiple users included' do
diff --git a/spec/services/alert_management/create_alert_issue_service_spec.rb b/spec/services/alert_management/create_alert_issue_service_spec.rb
index a8f2b4ee09c..cf24188a738 100644
--- a/spec/services/alert_management/create_alert_issue_service_spec.rb
+++ b/spec/services/alert_management/create_alert_issue_service_spec.rb
@@ -88,7 +88,6 @@ RSpec.describe AlertManagement::CreateAlertIssueService do
it_behaves_like 'creating an alert issue'
it_behaves_like 'setting an issue attributes'
- it_behaves_like 'create alert issue sets issue labels'
end
context 'when the alert is generic' do
@@ -97,7 +96,6 @@ RSpec.describe AlertManagement::CreateAlertIssueService do
it_behaves_like 'creating an alert issue'
it_behaves_like 'setting an issue attributes'
- it_behaves_like 'create alert issue sets issue labels'
end
context 'when issue cannot be created' do
diff --git a/spec/services/award_emojis/copy_service_spec.rb b/spec/services/award_emojis/copy_service_spec.rb
new file mode 100644
index 00000000000..e85c548968e
--- /dev/null
+++ b/spec/services/award_emojis/copy_service_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AwardEmojis::CopyService do
+ let_it_be(:from_awardable) do
+ create(:issue, award_emoji: [
+ build(:award_emoji, name: 'thumbsup'),
+ build(:award_emoji, name: 'thumbsdown')
+ ])
+ end
+
+ describe '#initialize' do
+ it 'validates that we cannot copy AwardEmoji to the same Awardable' do
+ expect { described_class.new(from_awardable, from_awardable) }.to raise_error(ArgumentError)
+ end
+ end
+
+ describe '#execute' do
+ let(:to_awardable) { create(:issue) }
+
+ subject(:execute_service) { described_class.new(from_awardable, to_awardable).execute }
+
+ it 'copies AwardEmojis', :aggregate_failures do
+ expect { execute_service }.to change { AwardEmoji.count }.by(2)
+ expect(to_awardable.award_emoji.map(&:name)).to match_array(%w(thumbsup thumbsdown))
+ end
+
+ it 'returns success', :aggregate_failures do
+ expect(execute_service).to be_kind_of(ServiceResponse)
+ expect(execute_service).to be_success
+ end
+ end
+end
diff --git a/spec/services/boards/lists/create_service_spec.rb b/spec/services/boards/lists/create_service_spec.rb
index f3d4e62eeca..88b6c3098d1 100644
--- a/spec/services/boards/lists/create_service_spec.rb
+++ b/spec/services/boards/lists/create_service_spec.rb
@@ -59,6 +59,21 @@ RSpec.describe Boards::Lists::CreateService do
expect { service.execute(board) }.to raise_error(ActiveRecord::RecordNotFound)
end
end
+
+ context 'when backlog param is sent' do
+ it 'creates one and only one backlog list' do
+ service = described_class.new(parent, user, 'backlog' => true)
+ list = service.execute(board)
+
+ expect(list.list_type).to eq('backlog')
+ expect(list.position).to be_nil
+ expect(list).to be_valid
+
+ another_backlog = service.execute(board)
+
+ expect(another_backlog).to eq list
+ end
+ end
end
context 'when board parent is a project' do
diff --git a/spec/services/boards/lists/list_service_spec.rb b/spec/services/boards/lists/list_service_spec.rb
index 3d71c467e96..dfe65f3d241 100644
--- a/spec/services/boards/lists/list_service_spec.rb
+++ b/spec/services/boards/lists/list_service_spec.rb
@@ -6,12 +6,14 @@ RSpec.describe Boards::Lists::ListService do
let(:user) { create(:user) }
describe '#execute' do
+ let(:service) { described_class.new(parent, user) }
+
context 'when board parent is a project' do
let(:project) { create(:project) }
let(:board) { create(:board, project: project) }
let(:label) { create(:label, project: project) }
let!(:list) { create(:list, board: board, label: label) }
- let(:service) { described_class.new(project, user) }
+ let(:parent) { project }
it_behaves_like 'lists list service'
end
@@ -21,7 +23,7 @@ RSpec.describe Boards::Lists::ListService do
let(:board) { create(:board, group: group) }
let(:label) { create(:group_label, group: group) }
let!(:list) { create(:list, board: board, label: label) }
- let(:service) { described_class.new(group, user) }
+ let(:parent) { group }
it_behaves_like 'lists list service'
end
diff --git a/spec/services/branches/create_service_spec.rb b/spec/services/branches/create_service_spec.rb
index b682a3f26ec..5cf0d5af75f 100644
--- a/spec/services/branches/create_service_spec.rb
+++ b/spec/services/branches/create_service_spec.rb
@@ -44,5 +44,25 @@ RSpec.describe Branches::CreateService do
expect(result[:message]).to eq('Invalid reference name: unknown')
end
end
+
+ it 'logs and returns an error if there is a PreReceiveError exception' do
+ error_message = 'pre receive error'
+ raw_message = "GitLab: #{error_message}"
+ pre_receive_error = Gitlab::Git::PreReceiveError.new(raw_message)
+
+ allow(project.repository).to receive(:add_branch).and_raise(pre_receive_error)
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ pre_receive_error,
+ pre_receive_message: raw_message,
+ branch_name: 'new-feature',
+ ref: 'unknown'
+ )
+
+ result = service.execute('new-feature', 'unknown')
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq(error_message)
+ end
end
end
diff --git a/spec/services/ci/build_report_result_service_spec.rb b/spec/services/ci/build_report_result_service_spec.rb
index 3c1ef5301fc..70bcf74ba43 100644
--- a/spec/services/ci/build_report_result_service_spec.rb
+++ b/spec/services/ci/build_report_result_service_spec.rb
@@ -19,16 +19,6 @@ RSpec.describe Ci::BuildReportResultService do
expect(Ci::BuildReportResult.count).to eq(1)
end
- context 'when feature is disable' do
- it 'does not persist the data' do
- stub_feature_flags(build_report_summary: false)
-
- subject
-
- expect(Ci::BuildReportResult.count).to eq(0)
- end
- end
-
context 'when data has already been persisted' do
it 'raises an error and do not persist the same data twice' do
expect { 2.times { described_class.new.execute(build) } }.to raise_error(ActiveRecord::RecordNotUnique)
diff --git a/spec/services/ci/change_variable_service_spec.rb b/spec/services/ci/change_variable_service_spec.rb
new file mode 100644
index 00000000000..7acdd4e834f
--- /dev/null
+++ b/spec/services/ci/change_variable_service_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::ChangeVariableService do
+ let(:service) { described_class.new(container: group, current_user: user, params: params) }
+
+ let_it_be(:user) { create(:user) }
+ let(:group) { create(:group) }
+
+ describe '#execute' do
+ subject(:execute) { service.execute }
+
+ context 'when creating a variable' do
+ let(:params) { { variable_params: { key: 'new_variable', value: 'variable_value' }, action: :create } }
+
+ it 'persists a variable' do
+ expect { execute }.to change(Ci::GroupVariable, :count).from(0).to(1)
+ end
+ end
+
+ context 'when updating a variable' do
+ let!(:variable) { create(:ci_group_variable, value: 'old_value') }
+ let(:params) { { variable_params: { key: variable.key, value: 'new_value' }, action: :update } }
+
+ before do
+ group.variables << variable
+ end
+
+ it 'updates a variable' do
+ expect { execute }.to change { variable.reload.value }.from('old_value').to('new_value')
+ end
+
+ context 'when the variable does not exist' do
+ before do
+ variable.destroy!
+ end
+
+ it 'raises a record not found error' do
+ expect { execute }.to raise_error(::ActiveRecord::RecordNotFound)
+ end
+ end
+ end
+
+ context 'when destroying a variable' do
+ let!(:variable) { create(:ci_group_variable) }
+ let(:params) { { variable_params: { key: variable.key }, action: :destroy } }
+
+ before do
+ group.variables << variable
+ end
+
+ it 'destroys a variable' do
+ expect { execute }.to change { Ci::GroupVariable.exists?(variable.id) }.from(true).to(false)
+ end
+
+ context 'when the variable does not exist' do
+ before do
+ variable.destroy!
+ end
+
+ it 'raises a record not found error' do
+ expect { execute }.to raise_error(::ActiveRecord::RecordNotFound)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/change_variables_service_spec.rb b/spec/services/ci/change_variables_service_spec.rb
new file mode 100644
index 00000000000..5f1207eaf58
--- /dev/null
+++ b/spec/services/ci/change_variables_service_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::ChangeVariablesService do
+ let(:service) { described_class.new(container: group, current_user: user, params: params) }
+
+ let_it_be(:user) { create(:user) }
+ let(:group) { spy(:group, variables: []) }
+ let(:params) { { variables_attributes: [{ key: 'new_variable', value: 'variable_value' }] } }
+
+ describe '#execute' do
+ subject(:execute) { service.execute }
+
+ it 'delegates to ActiveRecord update' do
+ execute
+
+ expect(group).to have_received(:update).with(params)
+ end
+ end
+end
diff --git a/spec/services/ci/create_job_artifacts_service_spec.rb b/spec/services/ci/create_job_artifacts_service_spec.rb
index 3f5cf079025..72b0d220b11 100644
--- a/spec/services/ci/create_job_artifacts_service_spec.rb
+++ b/spec/services/ci/create_job_artifacts_service_spec.rb
@@ -73,7 +73,7 @@ RSpec.describe Ci::CreateJobArtifactsService do
expect(metadata_artifact.expire_at).to be_within(1.minute).of(expected_expire_at)
end
- context 'when expire_in params is set' do
+ context 'when expire_in params is set to a specific value' do
before do
params.merge!('expire_in' => '2 hours')
end
@@ -89,6 +89,23 @@ RSpec.describe Ci::CreateJobArtifactsService do
expect(metadata_artifact.expire_at).to be_within(1.minute).of(expected_expire_at)
end
end
+
+ context 'when expire_in params is set to `never`' do
+ before do
+ params.merge!('expire_in' => 'never')
+ end
+
+ it 'sets expiration date according to the parameter' do
+ expected_expire_at = nil
+
+ expect(subject).to be_truthy
+ archive_artifact, metadata_artifact = job.job_artifacts.last(2)
+
+ expect(job.artifacts_expire_at).to eq(expected_expire_at)
+ expect(archive_artifact.expire_at).to eq(expected_expire_at)
+ expect(metadata_artifact.expire_at).to eq(expected_expire_at)
+ end
+ end
end
end
diff --git a/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb b/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb
index 16205529f1c..3be5ac1f739 100644
--- a/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb
+++ b/spec/services/ci/create_pipeline_service/creation_errors_and_warnings_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe Ci::CreatePipelineService do
test:
script: rspec
rules:
- - if: '$CI_COMMIT_BRANCH'
+ - when: always
YAML
end
@@ -32,7 +32,7 @@ RSpec.describe Ci::CreatePipelineService do
expect(pipeline.error_messages.map(&:content)).to be_empty
expect(pipeline.warning_messages.map(&:content)).to contain_exactly(
- 'jobs:test uses `rules` without defining `workflow:rules`'
+ /jobs:test may allow multiple pipelines to run/
)
end
@@ -77,13 +77,13 @@ RSpec.describe Ci::CreatePipelineService do
stage: test
script: echo
rules:
- - if: '$CI_COMMIT_BRANCH'
+ - when: on_success
YAML
end
it 'contains both errors and warnings' do
error_message = 'build job: need test is not defined in prior stages'
- warning_message = 'jobs:test uses `rules` without defining `workflow:rules`'
+ warning_message = /jobs:test may allow multiple pipelines to run/
expect(pipeline.yaml_errors).to eq(error_message)
expect(pipeline.error_messages.map(&:content)).to contain_exactly(error_message)
diff --git a/spec/services/ci/create_pipeline_service/dry_run_spec.rb b/spec/services/ci/create_pipeline_service/dry_run_spec.rb
new file mode 100644
index 00000000000..93378df80f0
--- /dev/null
+++ b/spec/services/ci/create_pipeline_service/dry_run_spec.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::CreatePipelineService do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:admin) }
+ let(:ref) { 'refs/heads/master' }
+ let(:service) { described_class.new(project, user, { ref: ref }) }
+
+ subject { service.execute(:push, dry_run: true) }
+
+ before do
+ stub_ci_pipeline_yaml_file(config)
+ end
+
+ describe 'dry run' do
+ shared_examples 'returns a non persisted pipeline' do
+ it 'does not persist the pipeline' do
+ expect(subject).not_to be_persisted
+ expect(subject.id).to be_nil
+ end
+
+ it 'does not process the pipeline' do
+ expect(Ci::ProcessPipelineService).not_to receive(:new)
+
+ subject
+ end
+
+ it 'does not schedule merge request head pipeline update' do
+ expect(service).not_to receive(:schedule_head_pipeline_update)
+
+ subject
+ end
+ end
+
+ context 'when pipeline is valid' do
+ let(:config) { gitlab_ci_yaml }
+
+ it_behaves_like 'returns a non persisted pipeline'
+
+ it 'returns a valid pipeline' do
+ expect(subject.error_messages).to be_empty
+ expect(subject.yaml_errors).to be_nil
+ expect(subject.errors).to be_empty
+ end
+ end
+
+ context 'when pipeline is not valid' do
+ context 'when there are syntax errors' do
+ let(:config) do
+ <<~YAML
+ rspec:
+ script: echo
+ something: wrong
+ YAML
+ end
+
+ it_behaves_like 'returns a non persisted pipeline'
+
+ it 'returns a pipeline with errors', :aggregate_failures do
+ error_message = 'jobs:rspec config contains unknown keys: something'
+
+ expect(subject.error_messages.map(&:content)).to eq([error_message])
+ expect(subject.errors).not_to be_empty
+ expect(subject.yaml_errors).to eq(error_message)
+ end
+ end
+
+ context 'when there are logical errors' do
+ let(:config) do
+ <<~YAML
+ build:
+ script: echo
+ stage: build
+ needs: [test]
+ test:
+ script: echo
+ stage: test
+ YAML
+ end
+
+ it_behaves_like 'returns a non persisted pipeline'
+
+ it 'returns a pipeline with errors', :aggregate_failures do
+ error_message = 'build job: need test is not defined in prior stages'
+
+ expect(subject.error_messages.map(&:content)).to eq([error_message])
+ expect(subject.errors).not_to be_empty
+ end
+ end
+
+ context 'when there are errors at the seeding stage' do
+ let(:config) do
+ <<~YAML
+ build:
+ stage: build
+ script: echo
+ rules:
+ - if: '$CI_MERGE_REQUEST_ID'
+ test:
+ stage: test
+ script: echo
+ needs: ['build']
+ YAML
+ end
+
+ it_behaves_like 'returns a non persisted pipeline'
+
+ it 'returns a pipeline with errors', :aggregate_failures do
+ error_message = "test: needs 'build'"
+
+ expect(subject.error_messages.map(&:content)).to eq([error_message])
+ expect(subject.errors).not_to be_empty
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/create_pipeline_service/parameter_content_spec.rb b/spec/services/ci/create_pipeline_service/parameter_content_spec.rb
index 5157574ea04..f656ad52ac8 100644
--- a/spec/services/ci/create_pipeline_service/parameter_content_spec.rb
+++ b/spec/services/ci/create_pipeline_service/parameter_content_spec.rb
@@ -49,14 +49,5 @@ RSpec.describe Ci::CreatePipelineService do
end
end
end
-
- context 'when source is not a dangling build' do
- subject { service.execute(:web, content: content) }
-
- it 'raises an exception' do
- klass = Gitlab::Ci::Pipeline::Chain::Config::Content::Parameter::UnsupportedSourceError
- expect { subject }.to raise_error(klass)
- end
- end
end
end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 9dc518be996..db4c2f5a047 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -93,6 +93,7 @@ RSpec.describe Ci::CreatePipelineService do
let(:merge_request_1) do
create(:merge_request, source_branch: 'feature', target_branch: "master", source_project: project)
end
+
let(:merge_request_2) do
create(:merge_request, source_branch: 'feature', target_branch: "v1.1.0", source_project: project)
end
@@ -512,7 +513,7 @@ RSpec.describe Ci::CreatePipelineService do
it 'pull it from Auto-DevOps' do
pipeline = execute_service
expect(pipeline).to be_auto_devops_source
- expect(pipeline.builds.map(&:name)).to match_array(%w[build code_quality eslint-sast test])
+ expect(pipeline.builds.map(&:name)).to match_array(%w[build code_quality eslint-sast secret_detection_default_branch secrets-sast test])
end
end
@@ -905,6 +906,7 @@ RSpec.describe Ci::CreatePipelineService do
stub_ci_pipeline_yaml_file(YAML.dump({
rspec: { script: 'rspec', retry: retry_value }
}))
+ rspec_job.update!(options: { retry: retry_value })
end
context 'as an integer' do
@@ -912,8 +914,6 @@ RSpec.describe Ci::CreatePipelineService do
it 'correctly creates builds with auto-retry value configured' do
expect(pipeline).to be_persisted
- expect(rspec_job.options_retry_max).to eq 2
- expect(rspec_job.options_retry_when).to eq ['always']
end
end
@@ -922,8 +922,6 @@ RSpec.describe Ci::CreatePipelineService do
it 'correctly creates builds with auto-retry value configured' do
expect(pipeline).to be_persisted
- expect(rspec_job.options_retry_max).to eq 2
- expect(rspec_job.options_retry_when).to eq ['runner_system_failure']
end
end
end
@@ -985,7 +983,6 @@ RSpec.describe Ci::CreatePipelineService do
context 'with release' do
shared_examples_for 'a successful release pipeline' do
before do
- stub_feature_flags(ci_release_generation: true)
stub_ci_pipeline_yaml_file(YAML.dump(config))
end
@@ -1695,16 +1692,23 @@ RSpec.describe Ci::CreatePipelineService do
context 'when pipeline on feature is created' do
let(:ref_name) { 'refs/heads/feature' }
+ shared_examples 'has errors' do
+ it 'contains the expected errors' do
+ expect(pipeline.builds).to be_empty
+ expect(pipeline.yaml_errors).to eq("test_a: needs 'build_a'")
+ expect(pipeline.error_messages.map(&:content)).to contain_exactly("test_a: needs 'build_a'")
+ expect(pipeline.errors[:base]).to contain_exactly("test_a: needs 'build_a'")
+ end
+ end
+
context 'when save_on_errors is enabled' do
let(:pipeline) { execute_service(save_on_errors: true) }
it 'does create a pipeline as test_a depends on build_a' do
expect(pipeline).to be_persisted
- expect(pipeline.builds).to be_empty
- expect(pipeline.yaml_errors).to eq("test_a: needs 'build_a'")
- expect(pipeline.messages.pluck(:content)).to contain_exactly("test_a: needs 'build_a'")
- expect(pipeline.errors[:base]).to contain_exactly("test_a: needs 'build_a'")
end
+
+ it_behaves_like 'has errors'
end
context 'when save_on_errors is disabled' do
@@ -1712,11 +1716,9 @@ RSpec.describe Ci::CreatePipelineService do
it 'does not create a pipeline as test_a depends on build_a' do
expect(pipeline).not_to be_persisted
- expect(pipeline.builds).to be_empty
- expect(pipeline.yaml_errors).to be_nil
- expect(pipeline.messages).not_to be_empty
- expect(pipeline.errors[:base]).to contain_exactly("test_a: needs 'build_a'")
end
+
+ it_behaves_like 'has errors'
end
end
diff --git a/spec/services/ci/daily_build_group_report_result_service_spec.rb b/spec/services/ci/daily_build_group_report_result_service_spec.rb
index 7d181a5c2ba..f196afb05e8 100644
--- a/spec/services/ci/daily_build_group_report_result_service_spec.rb
+++ b/spec/services/ci/daily_build_group_report_result_service_spec.rb
@@ -65,6 +65,7 @@ RSpec.describe Ci::DailyBuildGroupReportResultService, '#execute' do
created_at: '2020-02-06 00:02:20'
)
end
+
let!(:new_rspec_job) { create(:ci_build, pipeline: new_pipeline, name: '4/4 rspec', coverage: 84) }
let!(:new_karma_job) { create(:ci_build, pipeline: new_pipeline, name: '3/3 karma', coverage: 92) }
@@ -104,6 +105,7 @@ RSpec.describe Ci::DailyBuildGroupReportResultService, '#execute' do
created_at: '2020-02-06 00:02:20'
)
end
+
let!(:new_rspec_job) { create(:ci_build, pipeline: new_pipeline, name: '4/4 rspec', coverage: 84) }
let!(:new_karma_job) { create(:ci_build, pipeline: new_pipeline, name: '3/3 karma', coverage: 92) }
@@ -149,6 +151,7 @@ RSpec.describe Ci::DailyBuildGroupReportResultService, '#execute' do
created_at: '2020-02-06 00:02:20'
)
end
+
let!(:some_job) { create(:ci_build, pipeline: new_pipeline, name: 'foo') }
it 'does nothing' do
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
index a10a333b462..bc8b6b2d113 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
@@ -5,20 +5,12 @@ require_relative 'shared_processing_service.rb'
require_relative 'shared_processing_service_tests_with_yaml.rb'
RSpec.describe Ci::PipelineProcessing::AtomicProcessingService do
- before do
- stub_feature_flags(ci_atomic_processing: true)
-
- # This feature flag is implicit
- # Atomic Processing does not process statuses differently
- stub_feature_flags(ci_composite_status: true)
- end
-
it_behaves_like 'Pipeline Processing Service'
it_behaves_like 'Pipeline Processing Service Tests With Yaml'
private
- def process_pipeline(initial_process: false)
+ def process_pipeline
described_class.new(pipeline).execute
end
end
diff --git a/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb b/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb
deleted file mode 100644
index 569a6d62dc1..00000000000
--- a/spec/services/ci/pipeline_processing/legacy_processing_service_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_relative 'shared_processing_service.rb'
-require_relative 'shared_processing_service_tests_with_yaml.rb'
-
-RSpec.describe Ci::PipelineProcessing::LegacyProcessingService do
- before do
- stub_feature_flags(ci_atomic_processing: false)
- end
-
- context 'when ci_composite_status is enabled' do
- before do
- stub_feature_flags(ci_composite_status: true)
- end
-
- it_behaves_like 'Pipeline Processing Service'
- it_behaves_like 'Pipeline Processing Service Tests With Yaml'
- end
-
- context 'when ci_composite_status is disabled' do
- before do
- stub_feature_flags(ci_composite_status: false)
- end
-
- it_behaves_like 'Pipeline Processing Service'
- it_behaves_like 'Pipeline Processing Service Tests With Yaml'
- end
-
- private
-
- def process_pipeline(initial_process: false)
- described_class.new(pipeline).execute(initial_process: initial_process)
- end
-end
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service.rb b/spec/services/ci/pipeline_processing/shared_processing_service.rb
index 224066885b6..7de22b6a4cc 100644
--- a/spec/services/ci/pipeline_processing/shared_processing_service.rb
+++ b/spec/services/ci/pipeline_processing/shared_processing_service.rb
@@ -788,8 +788,7 @@ RSpec.shared_examples 'Pipeline Processing Service' do
let!(:deploy_pages) { create_build('deploy_pages', stage: 'deploy', stage_idx: 2, scheduling_type: :dag) }
it 'runs deploy_pages without waiting prior stages' do
- # Ci::PipelineProcessing::LegacyProcessingService requires :initial_process parameter
- expect(process_pipeline(initial_process: true)).to be_truthy
+ expect(process_pipeline).to be_truthy
expect(stages).to eq(%w(pending created pending))
expect(builds.pending).to contain_exactly(linux_build, mac_build, deploy_pages)
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb b/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
index 17d254ba48e..77645298bc7 100644
--- a/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
+++ b/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
@@ -42,7 +42,7 @@ RSpec.shared_context 'Pipeline Processing Service Tests With Yaml' do
{
pipeline: pipeline.status,
- stages: pipeline.ordered_stages.pluck(:name, :status).to_h,
+ stages: pipeline.stages.pluck(:name, :status).to_h,
jobs: pipeline.statuses.latest.pluck(:name, :status).to_h
}
end
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_needs_one_build_and_test.yml
index a133023b12d..a133023b12d 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_needs_one_build_and_test.yml
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_always.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_needs_one_build_and_test_when_always.yml
index 4c676761e5c..4c676761e5c 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_always.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_other_build_succeeds_deploy_needs_one_build_and_test_when_always.yml
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_allow_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_fails_with_allow_failure.yml
index ea7046262c3..ea7046262c3 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_allow_failure.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_fails_with_allow_failure.yml
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_on_failure_deploy_needs_test.yml
index 5ace621e89c..5ace621e89c 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_failure.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_test_on_failure_deploy_needs_test.yml
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_allow_failure_test_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_with_allow_failure_test_on_failure.yml
index cfc456387ff..cfc456387ff 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_build_allow_failure_test_on_failure.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_fails_with_allow_failure_test_on_failure.yml
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_succeds_test_manual_allow_failure_true_deploy_needs_both.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_succeds_test_manual_allow_failure_true_deploy_needs_both.yml
new file mode 100644
index 00000000000..60f803bc3d0
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_succeds_test_manual_allow_failure_true_deploy_needs_both.yml
@@ -0,0 +1,41 @@
+config:
+ build:
+ stage: build
+ script: exit 0
+
+ test:
+ stage: test
+ when: manual
+ allow_failure: true
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [build, test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: success
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ build: success
+ test: skipped
+ deploy: created
+ jobs:
+ build: success
+ test: manual
+ deploy: created
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_succeds_test_manual_allow_failure_true_deploy_needs_test.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_succeds_test_manual_allow_failure_true_deploy_needs_test.yml
new file mode 100644
index 00000000000..4e4b2f22224
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_succeds_test_manual_allow_failure_true_deploy_needs_test.yml
@@ -0,0 +1,41 @@
+config:
+ build:
+ stage: build
+ script: exit 0
+
+ test:
+ stage: test
+ when: manual
+ allow_failure: true
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ deploy: created
+
+transitions:
+ - event: success
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ build: success
+ test: skipped
+ deploy: created
+ jobs:
+ build: success
+ test: manual
+ deploy: created
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_success.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_succeeds_test_on_failure_deploy_needs_test.yml
index 19524cfd3e4..19524cfd3e4 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_test_on_failure_with_success.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_succeeds_test_on_failure_deploy_needs_test.yml
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_test_manual_review_deploy.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_test_manual_review_deploy.yml
new file mode 100644
index 00000000000..fef28dcfbbe
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_test_manual_review_deploy.yml
@@ -0,0 +1,82 @@
+config:
+ stages: [build, test, review, deploy]
+
+ build:
+ stage: build
+ script: exit 0
+
+ test:
+ stage: test
+ script: exit 0
+
+ release_test:
+ stage: test
+ when: manual
+ allow_failure: true
+ script: exit 0
+
+ review:
+ stage: review
+ script: exit 0
+ needs: [test, release_test]
+
+ staging:
+ stage: deploy
+ script: exit 0
+ needs: [test, release_test]
+
+ production:
+ stage: deploy
+ script: exit 0
+ needs: [review]
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ review: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ release_test: created
+ review: created
+ staging: created
+ production: created
+
+transitions:
+ - event: success
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ build: success
+ test: pending
+ review: created
+ deploy: created
+ jobs:
+ build: success
+ test: pending
+ release_test: manual
+ review: created
+ staging: created
+ production: created
+
+ - event: success
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ build: success
+ test: success
+ review: created
+ deploy: created
+ jobs:
+ build: success
+ test: success
+ release_test: manual
+ review: created
+ staging: created
+ production: created
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_needs_one_build_and_test.yml
index f324525bd56..f324525bd56 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_needs_one_build_and_test.yml
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_always.yml b/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_needs_one_build_and_test_when_always.yml
index 9986dbaa215..9986dbaa215 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_always.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_builds_succeed_test_on_failure_deploy_needs_one_build_and_test_when_always.yml
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_deploy_needs_empty.yml b/spec/services/ci/pipeline_processing/test_cases/dag_deploy_needs_empty.yml
new file mode 100644
index 00000000000..1783c0acb11
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_deploy_needs_empty.yml
@@ -0,0 +1,27 @@
+config:
+ build:
+ stage: build
+ script: exit 0
+
+ test:
+ stage: test
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: []
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ deploy: pending
+ jobs:
+ build: pending
+ test: created
+ deploy: pending
+
+transitions: []
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_allow_failure_true.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_fails_with_allow_failure.yml
index 8d4d9d403f1..8d4d9d403f1 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_test_allow_failure_true.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_fails_with_allow_failure.yml
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_deploy_always.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_deploy_always.yml
new file mode 100644
index 00000000000..bb8723aa303
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_deploy_always.yml
@@ -0,0 +1,45 @@
+config:
+ test:
+ stage: test
+ when: manual
+ allow_failure: false
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ when: always
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: manual
+ stages:
+ test: manual
+ deploy: created
+ jobs:
+ test: manual
+ deploy: created
+
+transitions:
+ - event: enqueue
+ jobs: [test]
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ deploy: created
+ jobs:
+ test: pending
+ deploy: created
+
+ - event: drop
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ test: failed
+ deploy: pending
+ jobs:
+ test: failed
+ deploy: pending
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_deploy_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_deploy_on_failure.yml
new file mode 100644
index 00000000000..3099a94befb
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_deploy_on_failure.yml
@@ -0,0 +1,45 @@
+config:
+ test:
+ stage: test
+ when: manual
+ allow_failure: false
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ when: on_failure
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: manual
+ stages:
+ test: manual
+ deploy: created
+ jobs:
+ test: manual
+ deploy: created
+
+transitions:
+ - event: enqueue
+ jobs: [test]
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ deploy: created
+ jobs:
+ test: pending
+ deploy: created
+
+ - event: drop
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ test: failed
+ deploy: pending
+ jobs:
+ test: failed
+ deploy: pending
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_other_test_succeeds_deploy_needs_both.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_other_test_succeeds_deploy_needs_both.yml
new file mode 100644
index 00000000000..7330a73b5a3
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_other_test_succeeds_deploy_needs_both.yml
@@ -0,0 +1,40 @@
+config:
+ test1:
+ stage: test
+ script: exit 0
+
+ test2:
+ stage: test
+ when: manual
+ allow_failure: false
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test1, test2]
+
+init:
+ expect:
+ pipeline: running
+ stages:
+ test: running
+ deploy: created
+ jobs:
+ test1: pending
+ test2: manual
+ deploy: created
+
+transitions:
+ - event: success
+ jobs: [test1]
+ expect:
+ pipeline: manual
+ stages:
+ test: manual
+ deploy: created
+ jobs:
+ test1: success
+ test2: manual
+ deploy: created
+
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_other_test_succeeds.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_other_test_succeeds_deploy_needs_both.yml
index 34073b92ccc..34073b92ccc 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_other_test_succeeds.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_other_test_succeeds_deploy_needs_both.yml
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_failure.yml b/spec/services/ci/pipeline_processing/test_cases/stage_build_fails_test_on_failure.yml
index 1751cbb2023..1751cbb2023 100644
--- a/spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_failure.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_build_fails_test_on_failure.yml
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_build_allow_failure_test_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/stage_build_fails_with_allow_failure_test_on_failure.yml
index 3e081d4411b..3e081d4411b 100644
--- a/spec/services/ci/pipeline_processing/test_cases/stage_build_allow_failure_test_on_failure.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_build_fails_with_allow_failure_test_on_failure.yml
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_build_fails_test_allow_failure.yml b/spec/services/ci/pipeline_processing/test_cases/stage_build_succeeds_test_manual_allow_failure_true.yml
index 362ac6e4239..2fd85b74d4d 100644
--- a/spec/services/ci/pipeline_processing/test_cases/stage_build_fails_test_allow_failure.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_build_succeeds_test_manual_allow_failure_true.yml
@@ -1,10 +1,11 @@
config:
build:
stage: build
- script: exit 1
+ script: exit 0
test:
stage: test
+ when: manual
allow_failure: true
script: exit 1
@@ -25,15 +26,15 @@ init:
deploy: created
transitions:
- - event: drop
+ - event: success
jobs: [build]
expect:
- pipeline: failed
+ pipeline: running
stages:
- build: failed
+ build: success
test: skipped
- deploy: skipped
+ deploy: pending
jobs:
- build: failed
- test: skipped
- deploy: skipped
+ build: success
+ test: manual
+ deploy: pending
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_success.yml b/spec/services/ci/pipeline_processing/test_cases/stage_build_succeeds_test_on_failure.yml
index 15afe1ce8e1..15afe1ce8e1 100644
--- a/spec/services/ci/pipeline_processing/test_cases/stage_test_on_failure_with_success.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_build_succeeds_test_on_failure.yml
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_build_test_manual_review_deploy.yml b/spec/services/ci/pipeline_processing/test_cases/stage_build_test_manual_review_deploy.yml
new file mode 100644
index 00000000000..2829765fd95
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_build_test_manual_review_deploy.yml
@@ -0,0 +1,79 @@
+config:
+ stages: [build, test, review, deploy]
+
+ build:
+ stage: build
+ script: exit 0
+
+ test:
+ stage: test
+ script: exit 0
+
+ release_test:
+ stage: test
+ when: manual
+ allow_failure: true
+ script: exit 0
+
+ review:
+ stage: review
+ script: exit 0
+
+ staging:
+ stage: deploy
+ script: exit 0
+
+ production:
+ stage: deploy
+ script: exit 0
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ review: created
+ deploy: created
+ jobs:
+ build: pending
+ test: created
+ release_test: created
+ review: created
+ staging: created
+ production: created
+
+transitions:
+ - event: success
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ build: success
+ test: pending
+ review: created
+ deploy: created
+ jobs:
+ build: success
+ test: pending
+ release_test: manual
+ review: created
+ staging: created
+ production: created
+
+ - event: success
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ build: success
+ test: success
+ review: pending
+ deploy: created
+ jobs:
+ build: success
+ test: success
+ release_test: manual
+ review: pending
+ staging: created
+ production: created
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true_deploy_always.yml b/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true_deploy_always.yml
new file mode 100644
index 00000000000..9181c8adf50
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true_deploy_always.yml
@@ -0,0 +1,23 @@
+config:
+ test:
+ stage: test
+ when: manual
+ allow_failure: true
+ script: exit 1
+
+ deploy:
+ stage: deploy
+ when: always
+ script: exit 0
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ test: skipped
+ deploy: pending
+ jobs:
+ test: manual
+ deploy: pending
+
+transitions: []
diff --git a/spec/services/ci/retry_pipeline_service_spec.rb b/spec/services/ci/retry_pipeline_service_spec.rb
index fa46d6c4d1d..212c8f99865 100644
--- a/spec/services/ci/retry_pipeline_service_spec.rb
+++ b/spec/services/ci/retry_pipeline_service_spec.rb
@@ -364,7 +364,7 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
stage: "stage_#{stage_num}",
stage_idx: stage_num,
pipeline: pipeline, **opts) do |build|
- pipeline.update_legacy_status
+ ::Ci::ProcessPipelineService.new(pipeline).execute
end
end
end
diff --git a/spec/services/clusters/aws/authorize_role_service_spec.rb b/spec/services/clusters/aws/authorize_role_service_spec.rb
index 530268340b7..3d12400a47b 100644
--- a/spec/services/clusters/aws/authorize_role_service_spec.rb
+++ b/spec/services/clusters/aws/authorize_role_service_spec.rb
@@ -53,6 +53,12 @@ RSpec.describe Clusters::Aws::AuthorizeRoleService do
expect(subject.status).to eq(:unprocessable_entity)
expect(subject.body).to eq({})
end
+
+ it 'logs the error' do
+ expect(::Gitlab::ErrorTracking).to receive(:track_exception)
+
+ subject
+ end
end
context 'cannot create role' do
diff --git a/spec/services/clusters/cleanup/project_namespace_service_spec.rb b/spec/services/clusters/cleanup/project_namespace_service_spec.rb
index 761ad8dd8c8..605aaea17e4 100644
--- a/spec/services/clusters/cleanup/project_namespace_service_spec.rb
+++ b/spec/services/clusters/cleanup/project_namespace_service_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe Clusters::Cleanup::ProjectNamespaceService do
execution_count: 0
}
end
+
let(:kubeclient_instance_double) do
instance_double(Gitlab::Kubernetes::KubeClient, delete_namespace: nil, delete_service_account: nil)
end
diff --git a/spec/services/clusters/cleanup/service_account_service_spec.rb b/spec/services/clusters/cleanup/service_account_service_spec.rb
index 6fe3d0c286e..f256df1b2fc 100644
--- a/spec/services/clusters/cleanup/service_account_service_spec.rb
+++ b/spec/services/clusters/cleanup/service_account_service_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe Clusters::Cleanup::ServiceAccountService do
execution_count: 0
}
end
+
let(:kubeclient_instance_double) do
instance_double(Gitlab::Kubernetes::KubeClient, delete_namespace: nil, delete_service_account: nil)
end
diff --git a/spec/services/clusters/parse_cluster_applications_artifact_service_spec.rb b/spec/services/clusters/parse_cluster_applications_artifact_service_spec.rb
index 3b155d95345..1f6ad218927 100644
--- a/spec/services/clusters/parse_cluster_applications_artifact_service_spec.rb
+++ b/spec/services/clusters/parse_cluster_applications_artifact_service_spec.rb
@@ -36,94 +36,89 @@ RSpec.describe Clusters::ParseClusterApplicationsArtifactService do
let(:job) { deployment.deployable }
let(:artifact) { create(:ci_job_artifact, :cluster_applications, job: job) }
- context 'when cluster_applications_artifact feature flag is disabled' do
- before do
- stub_feature_flags(cluster_applications_artifact: false)
- end
-
- it 'does not call Gitlab::Kubernetes::Helm::Parsers::ListV2 and returns success immediately' do
- expect(Gitlab::Kubernetes::Helm::Parsers::ListV2).not_to receive(:new)
+ it 'calls Gitlab::Kubernetes::Helm::Parsers::ListV2' do
+ expect(Gitlab::Kubernetes::Helm::Parsers::ListV2).to receive(:new).and_call_original
- result = described_class.new(job, user).execute(artifact)
+ result = described_class.new(job, user).execute(artifact)
- expect(result[:status]).to eq(:success)
- end
+ expect(result[:status]).to eq(:success)
end
- context 'when cluster_applications_artifact feature flag is enabled for project' do
- before do
- stub_feature_flags(cluster_applications_artifact: job.project)
+ context 'artifact is not of cluster_applications type' do
+ let(:artifact) { create(:ci_job_artifact, :archive) }
+ let(:job) { artifact.job }
+
+ it 'raise ArgumentError' do
+ expect do
+ described_class.new(job, user).execute(artifact)
+ end.to raise_error(ArgumentError, 'Artifact is not cluster_applications file type')
end
+ end
- it 'calls Gitlab::Kubernetes::Helm::Parsers::ListV2' do
- expect(Gitlab::Kubernetes::Helm::Parsers::ListV2).to receive(:new).and_call_original
+ context 'artifact exceeds acceptable size' do
+ it 'returns an error' do
+ stub_const("#{described_class}::MAX_ACCEPTABLE_ARTIFACT_SIZE", 1.byte)
result = described_class.new(job, user).execute(artifact)
- expect(result[:status]).to eq(:success)
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Cluster_applications artifact too big. Maximum allowable size: 1 Byte')
end
+ end
- context 'artifact is not of cluster_applications type' do
- let(:artifact) { create(:ci_job_artifact, :archive) }
- let(:job) { artifact.job }
+ context 'job has no deployment' do
+ let(:job) { build(:ci_build) }
- it 'raise ArgumentError' do
- expect do
- described_class.new(job, user).execute(artifact)
- end.to raise_error(ArgumentError, 'Artifact is not cluster_applications file type')
- end
+ it 'returns an error' do
+ result = described_class.new(job, user).execute(artifact)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('No deployment found for this job')
end
+ end
- context 'artifact exceeds acceptable size' do
- it 'returns an error' do
- stub_const("#{described_class}::MAX_ACCEPTABLE_ARTIFACT_SIZE", 1.byte)
+ context 'job has no deployment cluster' do
+ let(:deployment) { create(:deployment) }
+ let(:job) { deployment.deployable }
- result = described_class.new(job, user).execute(artifact)
+ it 'returns an error' do
+ result = described_class.new(job, user).execute(artifact)
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Cluster_applications artifact too big. Maximum allowable size: 1 Byte')
- end
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('No deployment cluster found for this job')
end
+ end
- context 'job has no deployment' do
- let(:job) { build(:ci_build) }
+ context 'blob is empty' do
+ let(:file) { fixture_file_upload(Rails.root.join("spec/fixtures/helm/helm_list_v2_empty_blob.json.gz")) }
+ let(:artifact) { create(:ci_job_artifact, :cluster_applications, job: job, file: file) }
- it 'returns an error' do
- result = described_class.new(job, user).execute(artifact)
+ it 'returns success' do
+ result = described_class.new(job, user).execute(artifact)
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('No deployment found for this job')
- end
+ expect(result[:status]).to eq(:success)
end
+ end
- context 'job has no deployment cluster' do
- let(:deployment) { create(:deployment) }
- let(:job) { deployment.deployable }
+ context 'job has deployment cluster' do
+ context 'current user does not have access to deployment cluster' do
+ let(:other_user) { create(:user) }
it 'returns an error' do
- result = described_class.new(job, user).execute(artifact)
+ result = described_class.new(job, other_user).execute(artifact)
expect(result[:status]).to eq(:error)
expect(result[:message]).to eq('No deployment cluster found for this job')
end
end
- context 'job has deployment cluster' do
- context 'current user does not have access to deployment cluster' do
- let(:other_user) { create(:user) }
-
- it 'returns an error' do
- result = described_class.new(job, other_user).execute(artifact)
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('No deployment cluster found for this job')
- end
- end
+ it 'does not affect unpermitted cluster applications' do
+ expect(Clusters::ParseClusterApplicationsArtifactService::RELEASE_NAMES).to contain_exactly('cilium')
+ end
- Clusters::ParseClusterApplicationsArtifactService::RELEASE_NAMES.each do |release_name|
- context release_name do
- include_examples 'parse cluster applications artifact', release_name
- end
+ Clusters::ParseClusterApplicationsArtifactService::RELEASE_NAMES.each do |release_name|
+ context release_name do
+ include_examples 'parse cluster applications artifact', release_name
end
end
end
diff --git a/spec/services/commits/commit_patch_service_spec.rb b/spec/services/commits/commit_patch_service_spec.rb
index c8c0cbe23b2..55cbd0e5d66 100644
--- a/spec/services/commits/commit_patch_service_spec.rb
+++ b/spec/services/commits/commit_patch_service_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Commits::CommitPatchService do
[content_1, content_2]
end
+
let(:user) { project.creator }
let(:branch_name) { 'branch-with-patches' }
let(:project) { create(:project, :repository) }
diff --git a/spec/services/design_management/move_designs_service_spec.rb b/spec/services/design_management/move_designs_service_spec.rb
new file mode 100644
index 00000000000..a05518dc28d
--- /dev/null
+++ b/spec/services/design_management/move_designs_service_spec.rb
@@ -0,0 +1,147 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe DesignManagement::MoveDesignsService do
+ include DesignManagementTestHelpers
+
+ let_it_be(:issue) { create(:issue) }
+ let_it_be(:developer) { create(:user, developer_projects: [issue.project]) }
+ let_it_be(:designs) { create_list(:design, 3, :with_relative_position, issue: issue) }
+
+ let(:project) { issue.project }
+
+ let(:service) { described_class.new(current_user, params) }
+
+ let(:params) do
+ {
+ current_design: current_design,
+ previous_design: previous_design,
+ next_design: next_design
+ }
+ end
+
+ let(:current_user) { developer }
+ let(:current_design) { nil }
+ let(:previous_design) { nil }
+ let(:next_design) { nil }
+
+ before do
+ enable_design_management
+ end
+
+ describe '#execute' do
+ subject { service.execute }
+
+ context 'the feature is unavailable' do
+ let(:current_design) { designs.first }
+ let(:previous_design) { designs.second }
+ let(:next_design) { designs.third }
+
+ before do
+ stub_feature_flags(reorder_designs: false)
+ end
+
+ it 'raises cannot_move' do
+ expect(subject).to be_error.and(have_attributes(message: :cannot_move))
+ end
+
+ context 'but it is available on the current project' do
+ before do
+ stub_feature_flags(reorder_designs: issue.project)
+ end
+
+ it 'is successful' do
+ expect(subject).to be_success
+ end
+ end
+ end
+
+ context 'the user cannot move designs' do
+ let(:current_design) { designs.first }
+ let(:current_user) { build_stubbed(:user) }
+
+ it 'raises cannot_move' do
+ expect(subject).to be_error.and(have_attributes(message: :cannot_move))
+ end
+ end
+
+ context 'the designs are not distinct' do
+ let(:current_design) { designs.first }
+ let(:previous_design) { designs.first }
+
+ it 'raises not_distinct' do
+ expect(subject).to be_error.and(have_attributes(message: :not_distinct))
+ end
+ end
+
+ context 'the designs are not on the same issue' do
+ let(:current_design) { designs.first }
+ let(:previous_design) { create(:design) }
+
+ it 'raises not_same_issue' do
+ expect(subject).to be_error.and(have_attributes(message: :not_same_issue))
+ end
+ end
+
+ context 'no focus is passed' do
+ let(:previous_design) { designs.second }
+ let(:next_design) { designs.third }
+
+ it 'raises no_focus' do
+ expect(subject).to be_error.and(have_attributes(message: :no_focus))
+ end
+ end
+
+ context 'no neighbours are passed' do
+ let(:current_design) { designs.first }
+
+ it 'raises no_neighbors' do
+ expect(subject).to be_error.and(have_attributes(message: :no_neighbors))
+ end
+ end
+
+ context 'the designs are not adjacent' do
+ let(:current_design) { designs.first }
+ let(:previous_design) { designs.second }
+ let(:next_design) { designs.third }
+
+ it 'raises not_adjacent' do
+ create(:design, issue: issue, relative_position: next_design.relative_position - 1)
+
+ expect(subject).to be_error.and(have_attributes(message: :not_adjacent))
+ end
+ end
+
+ context 'moving a design with neighbours' do
+ let(:current_design) { designs.first }
+ let(:previous_design) { designs.second }
+ let(:next_design) { designs.third }
+
+ it 'repositions existing designs and correctly places the given design' do
+ other_design1 = create(:design, issue: issue, relative_position: 10)
+ other_design2 = create(:design, issue: issue, relative_position: 20)
+ other_design3, other_design4 = create_list(:design, 2, issue: issue)
+
+ expect(subject).to be_success
+
+ expect(issue.designs.ordered(issue.project)).to eq([
+ # Existing designs which already had a relative_position set.
+ # These should stay at the beginning, in the same order.
+ other_design1,
+ other_design2,
+
+ # The designs we're passing into the service.
+ # These should be placed between the existing designs, in the correct order.
+ previous_design,
+ current_design,
+ next_design,
+
+ # Existing designs which didn't have a relative_position set.
+ # These should be placed at the end, in the order of their IDs.
+ other_design3,
+ other_design4
+ ])
+ end
+ end
+ end
+end
diff --git a/spec/services/design_management/save_designs_service_spec.rb b/spec/services/design_management/save_designs_service_spec.rb
index 24639632566..abba5de2c27 100644
--- a/spec/services/design_management/save_designs_service_spec.rb
+++ b/spec/services/design_management/save_designs_service_spec.rb
@@ -5,9 +5,9 @@ RSpec.describe DesignManagement::SaveDesignsService do
include DesignManagementTestHelpers
include ConcurrentHelpers
- let_it_be(:developer) { create(:user) }
+ let_it_be_with_reload(:issue) { create(:issue) }
+ let_it_be(:developer) { create(:user, developer_projects: [issue.project]) }
let(:project) { issue.project }
- let(:issue) { create(:issue) }
let(:user) { developer }
let(:files) { [rails_sample] }
let(:design_repository) { ::Gitlab::GlRepository::DESIGN.repository_resolver.call(project) }
@@ -19,8 +19,20 @@ RSpec.describe DesignManagement::SaveDesignsService do
fixture_file_upload("spec/fixtures/#{filename}")
end
+ def commit_count
+ design_repository.expire_statistics_caches
+ design_repository.expire_root_ref_cache
+ design_repository.commit_count
+ end
+
before do
- project.add_developer(developer)
+ if issue.design_collection.repository.exists?
+ issue.design_collection.repository.expire_all_method_caches
+ issue.design_collection.repository.raw.delete_all_refs_except([Gitlab::Git::BLANK_SHA])
+ end
+
+ allow(::DesignManagement::NewVersionWorker)
+ .to receive(:perform_async).with(Integer).and_return(nil)
end
def run_service(files_to_upload = nil)
@@ -83,24 +95,20 @@ RSpec.describe DesignManagement::SaveDesignsService do
design_repository.exists?
end
- it 'creates a design repository when it did not exist' do
- expect { run_service }.to change { repository_exists }.from(false).to(true)
+ it 'is ensured when the service runs' do
+ run_service
+
+ expect(repository_exists).to be true
end
end
- it 'updates the creation count' do
+ it 'creates a commit, an event in the activity stream and updates the creation count' do
counter = Gitlab::UsageDataCounters::DesignsCounter
- expect { run_service }.to change { counter.read(:create) }.by(1)
- end
- it 'creates an event in the activity stream' do
expect { run_service }
.to change { Event.count }.by(1)
.and change { Event.for_design.created_action.count }.by(1)
- end
-
- it 'creates a commit in the repository' do
- run_service
+ .and change { counter.read(:create) }.by(1)
expect(design_repository.commit).to have_attributes(
author: user,
@@ -109,35 +117,26 @@ RSpec.describe DesignManagement::SaveDesignsService do
end
it 'can run the same command in parallel' do
- blocks = Array.new(10).map do
- unique_files = %w(rails_sample.jpg dk.png)
- .map { |name| RenameableUpload.unique_file(name) }
+ parellism = 4
+
+ blocks = Array.new(parellism).map do
+ unique_files = [RenameableUpload.unique_file('rails_sample.jpg')]
-> { run_service(unique_files) }
end
- expect { run_parallel(blocks) }.to change(DesignManagement::Version, :count).by(10)
+ expect { run_parallel(blocks) }.to change(DesignManagement::Version, :count).by(parellism)
end
- it 'causes diff_refs not to be nil' do
- expect(response).to include(
- designs: all(have_attributes(diff_refs: be_present))
- )
- end
-
- it 'creates a design & a version for the filename if it did not exist' do
- expect(issue.designs.size).to eq(0)
-
- updated_designs = response[:designs]
-
- expect(updated_designs.size).to eq(1)
- expect(updated_designs.first.versions.size).to eq(1)
- end
-
- it 'saves the user as the author' do
- updated_designs = response[:designs]
+ describe 'the response' do
+ it 'includes designs with the expected properties' do
+ updated_designs = response[:designs]
- expect(updated_designs.first.versions.first.author).to eq(user)
+ expect(updated_designs).to all(have_attributes(diff_refs: be_present))
+ expect(updated_designs.size).to eq(1)
+ expect(updated_designs.first.versions.size).to eq(1)
+ expect(updated_designs.first.versions.first.author).to eq(user)
+ end
end
describe 'saving the file to LFS' do
@@ -147,14 +146,10 @@ RSpec.describe DesignManagement::SaveDesignsService do
end
end
- it 'saves the design to LFS' do
- expect { run_service }.to change { LfsObject.count }.by(1)
- end
-
- it 'saves the repository_type of the LfsObjectsProject as design' do
- expect do
- run_service
- end.to change { project.lfs_objects_projects.count }.from(0).to(1)
+ it 'saves the design to LFS and saves the repository_type of the LfsObjectsProject as design' do
+ expect { run_service }
+ .to change { LfsObject.count }.by(1)
+ .and change { project.lfs_objects_projects.count }.from(0).to(1)
expect(project.lfs_objects_projects.first.repository_type).to eq('design')
end
@@ -202,12 +197,10 @@ RSpec.describe DesignManagement::SaveDesignsService do
run_service
end
- it 'does not create a new version' do
- expect { run_service }.not_to change { issue.design_versions.count }
- end
+ it 'does not create a new version, and returns the design in `skipped_designs`' do
+ response = nil
- it 'returns the design in `skipped_designs` instead of `designs`' do
- response = run_service
+ expect { response = run_service }.not_to change { issue.design_versions.count }
expect(response[:designs]).to be_empty
expect(response[:skipped_designs].size).to eq(1)
@@ -223,35 +216,20 @@ RSpec.describe DesignManagement::SaveDesignsService do
touch_files([files.first])
end
- it 'counts one creation and one update' do
+ it 'has the correct side-effects' do
counter = Gitlab::UsageDataCounters::DesignsCounter
- expect { run_service }
- .to change { counter.read(:create) }.by(1)
- .and change { counter.read(:update) }.by(1)
- end
- it 'creates the correct activity stream events' do
+ expect(::DesignManagement::NewVersionWorker)
+ .to receive(:perform_async).once.with(Integer).and_return(nil)
+
expect { run_service }
.to change { Event.count }.by(2)
.and change { Event.for_design.count }.by(2)
.and change { Event.created_action.count }.by(1)
.and change { Event.updated_action.count }.by(1)
- end
-
- it 'creates a single commit' do
- commit_count = -> do
- design_repository.expire_all_method_caches
- design_repository.commit_count
- end
-
- expect { run_service }.to change { commit_count.call }.by(1)
- end
-
- it 'enqueues just one new version worker' do
- expect(::DesignManagement::NewVersionWorker)
- .to receive(:perform_async).once.with(Integer)
-
- run_service
+ .and change { counter.read(:create) }.by(1)
+ .and change { counter.read(:update) }.by(1)
+ .and change { commit_count }.by(1)
end
end
@@ -262,45 +240,28 @@ RSpec.describe DesignManagement::SaveDesignsService do
expect(response).to include(designs: have_attributes(size: 2), status: :success)
end
- it 'creates 2 designs with a single version' do
- expect { run_service }.to change { issue.designs.count }.from(0).to(2)
-
- expect(DesignManagement::Version.for_designs(issue.designs).size).to eq(1)
- end
-
- it 'increments the creation count by 2' do
+ it 'has the correct side-effects', :request_store do
counter = Gitlab::UsageDataCounters::DesignsCounter
- expect { run_service }.to change { counter.read(:create) }.by 2
- end
-
- it 'enqueues a new version worker' do
- expect(::DesignManagement::NewVersionWorker)
- .to receive(:perform_async).once.with(Integer)
-
- run_service
- end
-
- it 'creates a single commit' do
- commit_count = -> do
- design_repository.expire_all_method_caches
- design_repository.commit_count
- end
-
- expect { run_service }.to change { commit_count.call }.by(1)
- end
-
- it 'only does 5 gitaly calls', :request_store, :sidekiq_might_not_need_inline do
- allow(::DesignManagement::NewVersionWorker).to receive(:perform_async).with(Integer)
service = described_class.new(project, user, issue: issue, files: files)
+
# Some unrelated calls that are usually cached or happen only once
- service.__send__(:repository).create_if_not_exists
- service.__send__(:repository).has_visible_content?
+ # We expect:
+ # - An exists?
+ # - a check for existing blobs
+ # - default branch
+ # - an after_commit callback on LfsObjectsProject
+ design_repository.create_if_not_exists
+ design_repository.has_visible_content?
- request_count = -> { Gitlab::GitalyClient.get_request_count }
+ expect(::DesignManagement::NewVersionWorker)
+ .to receive(:perform_async).once.with(Integer).and_return(nil)
- # An exists?, a check for existing blobs, default branch, an after_commit
- # callback on LfsObjectsProject
- expect { service.execute }.to change(&request_count).by(4)
+ expect { service.execute }
+ .to change { issue.designs.count }.from(0).to(2)
+ .and change { DesignManagement::Version.count }.by(1)
+ .and change { counter.read(:create) }.by(2)
+ .and change { Gitlab::GitalyClient.get_request_count }.by(3)
+ .and change { commit_count }.by(1)
end
context 'when uploading too many files' do
@@ -313,7 +274,7 @@ RSpec.describe DesignManagement::SaveDesignsService do
end
context 'when the user is not allowed to upload designs' do
- let(:user) { create(:user) }
+ let(:user) { build_stubbed(:user) }
it_behaves_like 'a service error'
end
diff --git a/spec/services/discussions/capture_diff_note_positions_service_spec.rb b/spec/services/discussions/capture_diff_note_positions_service_spec.rb
index dede5a4c354..be53b02a4c1 100644
--- a/spec/services/discussions/capture_diff_note_positions_service_spec.rb
+++ b/spec/services/discussions/capture_diff_note_positions_service_spec.rb
@@ -12,24 +12,24 @@ RSpec.describe Discussions::CaptureDiffNotePositionsService do
let(:offset) { 30 }
let(:first_new_line) { 508 }
let(:second_new_line) { 521 }
+ let(:third_removed_line) { 1240 }
let(:service) { described_class.new(merge_request) }
- def build_position(new_line, diff_refs)
+ def build_position(diff_refs, new_line: nil, old_line: nil)
path = 'files/markdown/ruby-style-guide.md'
Gitlab::Diff::Position.new(old_path: path, new_path: path,
- new_line: new_line, diff_refs: diff_refs)
+ new_line: new_line, old_line: old_line, diff_refs: diff_refs)
end
- def note_for(new_line)
- position = build_position(new_line, merge_request.diff_refs)
+ def note_for(new_line: nil, old_line: nil)
+ position = build_position(merge_request.diff_refs, new_line: new_line, old_line: old_line)
create(:diff_note_on_merge_request, project: project, position: position, noteable: merge_request)
end
- def verify_diff_note_position!(note, line)
- id, old_line, new_line = note.line_code.split('_')
+ def verify_diff_note_position!(note, new_line: nil, old_line: nil)
+ id, removed_line, added_line = note.line_code.split('_')
- expect(new_line).to eq(line.to_s)
expect(note.diff_note_positions.size).to eq(1)
diff_position = note.diff_note_positions.last
@@ -38,12 +38,13 @@ RSpec.describe Discussions::CaptureDiffNotePositionsService do
start_sha: merge_request.target_branch_sha,
head_sha: merge_request.merge_ref_head.sha)
- expect(diff_position.line_code).to eq("#{id}_#{old_line.to_i - offset}_#{new_line}")
- expect(diff_position.position).to eq(build_position(new_line.to_i, diff_refs))
+ expect(diff_position.line_code).to eq("#{id}_#{removed_line.to_i - offset}_#{added_line}")
+ expect(diff_position.position).to eq(build_position(diff_refs, new_line: new_line, old_line: old_line))
end
- let!(:first_discussion_note) { note_for(first_new_line) }
- let!(:second_discussion_note) { note_for(second_new_line) }
+ let!(:first_discussion_note) { note_for(new_line: first_new_line) }
+ let!(:second_discussion_note) { note_for(new_line: second_new_line) }
+ let!(:third_discussion_note) { note_for(old_line: third_removed_line) }
let!(:second_discussion_another_note) do
create(:diff_note_on_merge_request,
project: project,
@@ -57,8 +58,9 @@ RSpec.describe Discussions::CaptureDiffNotePositionsService do
MergeRequests::MergeToRefService.new(project, merge_request.author).execute(merge_request)
service.execute
- verify_diff_note_position!(first_discussion_note, first_new_line)
- verify_diff_note_position!(second_discussion_note, second_new_line)
+ verify_diff_note_position!(first_discussion_note, new_line: first_new_line)
+ verify_diff_note_position!(second_discussion_note, new_line: second_new_line)
+ verify_diff_note_position!(third_discussion_note, old_line: third_removed_line - offset)
expect(second_discussion_another_note.diff_note_positions).to be_empty
end
diff --git a/spec/services/event_create_service_spec.rb b/spec/services/event_create_service_spec.rb
index d10ed7d6640..a91519a710f 100644
--- a/spec/services/event_create_service_spec.rb
+++ b/spec/services/event_create_service_spec.rb
@@ -171,45 +171,52 @@ RSpec.describe EventCreateService do
let_it_be(:wiki_page) { create(:wiki_page) }
let_it_be(:meta) { create(:wiki_page_meta, :for_wiki_page, wiki_page: wiki_page) }
- Event::WIKI_ACTIONS.each do |action|
- context "The action is #{action}" do
- let(:event) { service.wiki_event(meta, user, action) }
-
- it 'creates the event', :aggregate_failures do
- expect(event).to have_attributes(
- wiki_page?: true,
- valid?: true,
- persisted?: true,
- action: action.to_s,
- wiki_page: wiki_page,
- author: user
- )
- end
+ let(:fingerprint) { generate(:sha) }
- it 'records the event in the event counter' do
- stub_feature_flags(Gitlab::UsageDataCounters::TrackUniqueActions::FEATURE_FLAG => true)
- counter_class = Gitlab::UsageDataCounters::TrackUniqueActions
- tracking_params = { event_action: counter_class::WIKI_ACTION, date_from: Date.yesterday, date_to: Date.today }
+ def create_event
+ service.wiki_event(meta, user, action, fingerprint)
+ end
- expect { event }
- .to change { counter_class.count_unique_events(tracking_params) }
- .from(0).to(1)
- end
+ where(:action) { Event::WIKI_ACTIONS.map { |action| [action] } }
+
+ with_them do
+ it 'creates the event' do
+ expect(create_event).to have_attributes(
+ wiki_page?: true,
+ valid?: true,
+ persisted?: true,
+ action: action.to_s,
+ wiki_page: wiki_page,
+ author: user,
+ fingerprint: fingerprint
+ )
+ end
- it 'is idempotent', :aggregate_failures do
- expect { event }.to change(Event, :count).by(1)
- duplicate = nil
- expect { duplicate = service.wiki_event(meta, user, action) }.not_to change(Event, :count)
+ it 'is idempotent', :aggregate_failures do
+ event = nil
+ expect { event = create_event }.to change(Event, :count).by(1)
+ duplicate = nil
+ expect { duplicate = create_event }.not_to change(Event, :count)
- expect(duplicate).to eq(event)
- end
+ expect(duplicate).to eq(event)
+ end
+
+ it 'records the event in the event counter' do
+ counter_class = Gitlab::UsageDataCounters::TrackUniqueActions
+ tracking_params = { event_action: counter_class::WIKI_ACTION, date_from: Date.yesterday, date_to: Date.today }
+
+ expect { create_event }
+ .to change { counter_class.count_unique(tracking_params) }
+ .by(1)
end
end
(Event.actions.keys - Event::WIKI_ACTIONS).each do |bad_action|
context "The action is #{bad_action}" do
+ let(:action) { bad_action }
+
it 'raises an error' do
- expect { service.wiki_event(meta, user, bad_action) }.to raise_error(described_class::IllegalActionError)
+ expect { create_event }.to raise_error(described_class::IllegalActionError)
end
end
end
@@ -236,12 +243,11 @@ RSpec.describe EventCreateService do
it_behaves_like 'service for creating a push event', PushEventPayloadService
it 'records the event in the event counter' do
- stub_feature_flags(Gitlab::UsageDataCounters::TrackUniqueActions::FEATURE_FLAG => true)
counter_class = Gitlab::UsageDataCounters::TrackUniqueActions
tracking_params = { event_action: counter_class::PUSH_ACTION, date_from: Date.yesterday, date_to: Date.today }
expect { subject }
- .to change { counter_class.count_unique_events(tracking_params) }
+ .to change { counter_class.count_unique(tracking_params) }
.from(0).to(1)
end
end
@@ -260,12 +266,11 @@ RSpec.describe EventCreateService do
it_behaves_like 'service for creating a push event', BulkPushEventPayloadService
it 'records the event in the event counter' do
- stub_feature_flags(Gitlab::UsageDataCounters::TrackUniqueActions::FEATURE_FLAG => true)
counter_class = Gitlab::UsageDataCounters::TrackUniqueActions
tracking_params = { event_action: counter_class::PUSH_ACTION, date_from: Date.yesterday, date_to: Date.today }
expect { subject }
- .to change { counter_class.count_unique_events(tracking_params) }
+ .to change { counter_class.count_unique(tracking_params) }
.from(0).to(1)
end
end
@@ -315,12 +320,11 @@ RSpec.describe EventCreateService do
end
it 'records the event in the event counter' do
- stub_feature_flags(Gitlab::UsageDataCounters::TrackUniqueActions::FEATURE_FLAG => true)
counter_class = Gitlab::UsageDataCounters::TrackUniqueActions
tracking_params = { event_action: counter_class::DESIGN_ACTION, date_from: Date.yesterday, date_to: Date.today }
expect { result }
- .to change { counter_class.count_unique_events(tracking_params) }
+ .to change { counter_class.count_unique(tracking_params) }
.from(0).to(1)
end
end
@@ -343,12 +347,11 @@ RSpec.describe EventCreateService do
end
it 'records the event in the event counter' do
- stub_feature_flags(Gitlab::UsageDataCounters::TrackUniqueActions::FEATURE_FLAG => true)
counter_class = Gitlab::UsageDataCounters::TrackUniqueActions
tracking_params = { event_action: counter_class::DESIGN_ACTION, date_from: Date.yesterday, date_to: Date.today }
expect { result }
- .to change { counter_class.count_unique_events(tracking_params) }
+ .to change { counter_class.count_unique(tracking_params) }
.from(0).to(1)
end
end
diff --git a/spec/services/git/base_hooks_service_spec.rb b/spec/services/git/base_hooks_service_spec.rb
index 661c77b56bb..4ab27c7ab05 100644
--- a/spec/services/git/base_hooks_service_spec.rb
+++ b/spec/services/git/base_hooks_service_spec.rb
@@ -171,6 +171,7 @@ RSpec.describe Git::BaseHooksService do
}
}
end
+
let(:variables_attributes) { [] }
before do
@@ -192,6 +193,7 @@ RSpec.describe Git::BaseHooksService do
}
}
end
+
let(:variables_attributes) do
[
{ "key" => "FOO", "variable_type" => "env_var", "secret_value" => "123" },
@@ -219,6 +221,7 @@ RSpec.describe Git::BaseHooksService do
}
}
end
+
let(:variables_attributes) do
[
{ "key" => "FOO", "variable_type" => "env_var", "secret_value" => "123" }
diff --git a/spec/services/git/process_ref_changes_service_spec.rb b/spec/services/git/process_ref_changes_service_spec.rb
index c2fb40a0ed0..fc313bf6eb9 100644
--- a/spec/services/git/process_ref_changes_service_spec.rb
+++ b/spec/services/git/process_ref_changes_service_spec.rb
@@ -167,6 +167,7 @@ RSpec.describe Git::ProcessRefChangesService do
{ index: 0, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789012', ref: "refs/tags/v10.0.0" }
]
end
+
let(:branch_changes) do
[
{ index: 0, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789012', ref: "#{ref_prefix}/create1" },
@@ -174,6 +175,7 @@ RSpec.describe Git::ProcessRefChangesService do
{ index: 2, oldrev: Gitlab::Git::BLANK_SHA, newrev: '789014', ref: "#{ref_prefix}/create3" }
]
end
+
let(:git_changes) { double(branch_changes: branch_changes, tag_changes: tag_changes) }
it 'schedules job for existing merge requests' do
@@ -190,18 +192,6 @@ RSpec.describe Git::ProcessRefChangesService do
subject.execute
end
-
- context 'refresh_only_existing_merge_requests_on_push disabled' do
- before do
- stub_feature_flags(refresh_only_existing_merge_requests_on_push: false)
- end
-
- it 'refreshes all merge requests' do
- expect(UpdateMergeRequestsWorker).to receive(:perform_async).exactly(3).times
-
- subject.execute
- end
- end
end
end
diff --git a/spec/services/git/wiki_push_service_spec.rb b/spec/services/git/wiki_push_service_spec.rb
index f338b7a5709..7f709be8593 100644
--- a/spec/services/git/wiki_push_service_spec.rb
+++ b/spec/services/git/wiki_push_service_spec.rb
@@ -218,7 +218,7 @@ RSpec.describe Git::WikiPushService, services: true do
message = 'something went very very wrong'
allow_next_instance_of(WikiPages::EventCreateService, current_user) do |service|
allow(service).to receive(:execute)
- .with(String, WikiPage, Symbol)
+ .with(String, WikiPage, Symbol, String)
.and_return(ServiceResponse.error(message: message))
end
diff --git a/spec/services/groups/group_links/create_service_spec.rb b/spec/services/groups/group_links/create_service_spec.rb
index bca03863d1e..fb88433d8f6 100644
--- a/spec/services/groups/group_links/create_service_spec.rb
+++ b/spec/services/groups/group_links/create_service_spec.rb
@@ -25,6 +25,7 @@ RSpec.describe Groups::GroupLinks::CreateService, '#execute' do
expires_at: nil
}
end
+
let(:user) { group_user }
subject { described_class.new(group, user, opts) }
diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb
index c87fc7d941e..89e4d091ff7 100644
--- a/spec/services/groups/transfer_service_spec.rb
+++ b/spec/services/groups/transfer_service_spec.rb
@@ -8,6 +8,75 @@ RSpec.describe Groups::TransferService do
let!(:group_member) { create(:group_member, :owner, group: group, user: user) }
let(:transfer_service) { described_class.new(group, user) }
+ context 'handling packages' do
+ let_it_be(:group) { create(:group, :public) }
+ let(:project) { create(:project, :public, namespace: group) }
+ let(:new_group) { create(:group, :public) }
+
+ before do
+ group.add_owner(user)
+ new_group&.add_owner(user)
+ end
+
+ context 'with an npm package' do
+ before do
+ create(:npm_package, project: project)
+ end
+
+ shared_examples 'transfer not allowed' do
+ it 'does not allow transfer when there is a root namespace change' do
+ transfer_service.execute(new_group)
+
+ expect(transfer_service.error).to eq('Transfer failed: Group contains projects with NPM packages.')
+ expect(group.parent).not_to eq(new_group)
+ end
+ end
+
+ it_behaves_like 'transfer not allowed'
+
+ context 'with a project within subgroup' do
+ let(:root_group) { create(:group) }
+ let(:group) { create(:group, parent: root_group) }
+
+ before do
+ root_group.add_owner(user)
+ end
+
+ it_behaves_like 'transfer not allowed'
+
+ context 'without a root namespace change' do
+ let(:new_group) { create(:group, parent: root_group) }
+
+ it 'allows transfer' do
+ transfer_service.execute(new_group)
+
+ expect(transfer_service.error).to be nil
+ expect(group.parent).to eq(new_group)
+ end
+ end
+
+ context 'when transferring a group into a root group' do
+ let(:new_group) { nil }
+
+ it_behaves_like 'transfer not allowed'
+ end
+ end
+ end
+
+ context 'without an npm package' do
+ context 'when transferring a group into a root group' do
+ let(:group) { create(:group, parent: create(:group)) }
+
+ it 'allows transfer' do
+ transfer_service.execute(nil)
+
+ expect(transfer_service.error).to be nil
+ expect(group.parent).to be_nil
+ end
+ end
+ end
+ end
+
shared_examples 'ensuring allowed transfer for a group' do
context "when there's an exception on GitLab shell directories" do
let(:new_parent_group) { create(:group, :public) }
diff --git a/spec/services/groups/update_service_spec.rb b/spec/services/groups/update_service_spec.rb
index 25c79d9e600..1e6a8d53354 100644
--- a/spec/services/groups/update_service_spec.rb
+++ b/spec/services/groups/update_service_spec.rb
@@ -9,6 +9,50 @@ RSpec.describe Groups::UpdateService do
let!(:public_group) { create(:group, :public) }
describe "#execute" do
+ shared_examples 'with packages' do
+ before do
+ group.add_owner(user)
+ end
+
+ context 'with npm packages' do
+ let!(:package) { create(:npm_package, project: project) }
+
+ it 'does not allow a path update' do
+ expect(update_group(group, user, path: 'updated')).to be false
+ expect(group.errors[:path]).to include('cannot change when group contains projects with NPM packages')
+ end
+
+ it 'allows name update' do
+ expect(update_group(group, user, name: 'Updated')).to be true
+ expect(group.errors).to be_empty
+ expect(group.name).to eq('Updated')
+ end
+ end
+ end
+
+ context 'with project' do
+ let!(:group) { create(:group, :public) }
+ let(:project) { create(:project, namespace: group) }
+
+ it_behaves_like 'with packages'
+
+ context 'located in a subgroup' do
+ let(:subgroup) { create(:group, parent: group) }
+ let!(:project) { create(:project, namespace: subgroup) }
+
+ before do
+ subgroup.add_owner(user)
+ end
+
+ it_behaves_like 'with packages'
+
+ it 'does allow a path update if there is not a root namespace change' do
+ expect(update_group(subgroup, user, path: 'updated')).to be true
+ expect(subgroup.errors[:path]).to be_empty
+ end
+ end
+ end
+
context "project visibility_level validation" do
context "public group with public projects" do
let!(:service) { described_class.new(public_group, user, visibility_level: Gitlab::VisibilityLevel::INTERNAL) }
@@ -238,4 +282,8 @@ RSpec.describe Groups::UpdateService do
end
end
end
+
+ def update_group(group, user, opts)
+ Groups::UpdateService.new(group, user, opts).execute
+ end
end
diff --git a/spec/services/import/github_service_spec.rb b/spec/services/import/github_service_spec.rb
index 266ff309662..408d7767254 100644
--- a/spec/services/import/github_service_spec.rb
+++ b/spec/services/import/github_service_spec.rb
@@ -6,7 +6,6 @@ RSpec.describe Import::GithubService do
let_it_be(:user) { create(:user) }
let_it_be(:token) { 'complex-token' }
let_it_be(:access_params) { { github_access_token: 'github-complex-token' } }
- let_it_be(:client) { Gitlab::LegacyGithubImport::Client.new(token) }
let_it_be(:params) { { repo_id: 123, new_name: 'new_repo', target_namespace: 'root' } }
let(:subject) { described_class.new(client, user, params) }
@@ -15,41 +14,61 @@ RSpec.describe Import::GithubService do
allow(subject).to receive(:authorized?).and_return(true)
end
- context 'do not raise an exception on input error' do
- let(:exception) { Octokit::ClientError.new(status: 404, body: 'Not Found') }
+ shared_examples 'handles errors' do |klass|
+ let(:client) { klass.new(token) }
- before do
- expect(client).to receive(:repo).and_raise(exception)
- end
+ context 'do not raise an exception on input error' do
+ let(:exception) { Octokit::ClientError.new(status: 404, body: 'Not Found') }
+
+ before do
+ expect(client).to receive(:repository).and_raise(exception)
+ end
- it 'logs the original error' do
- expect(Gitlab::Import::Logger).to receive(:error).with({
- message: 'Import failed due to a GitHub error',
- status: 404,
- error: 'Not Found'
- }).and_call_original
+ it 'logs the original error' do
+ expect(Gitlab::Import::Logger).to receive(:error).with({
+ message: 'Import failed due to a GitHub error',
+ status: 404,
+ error: 'Not Found'
+ }).and_call_original
- subject.execute(access_params, :github)
+ subject.execute(access_params, :github)
+ end
+
+ it 'returns an error' do
+ result = subject.execute(access_params, :github)
+
+ expect(result).to include(
+ message: 'Import failed due to a GitHub error: Not Found',
+ status: :error,
+ http_status: :unprocessable_entity
+ )
+ end
end
- it 'returns an error' do
- result = subject.execute(access_params, :github)
+ it 'raises an exception for unknown error causes' do
+ exception = StandardError.new('Not Implemented')
+
+ expect(client).to receive(:repository).and_raise(exception)
- expect(result).to include(
- message: 'Import failed due to a GitHub error: Not Found',
- status: :error,
- http_status: :unprocessable_entity
- )
+ expect(Gitlab::Import::Logger).not_to receive(:error)
+
+ expect { subject.execute(access_params, :github) }.to raise_error(exception)
end
end
- it 'raises an exception for unknown error causes' do
- exception = StandardError.new('Not Implemented')
+ context 'when remove_legacy_github_client feature flag is enabled' do
+ before do
+ stub_feature_flags(remove_legacy_github_client: true)
+ end
- expect(client).to receive(:repo).and_raise(exception)
+ include_examples 'handles errors', Gitlab::GithubImport::Client
+ end
- expect(Gitlab::Import::Logger).not_to receive(:error)
+ context 'when remove_legacy_github_client feature flag is enabled' do
+ before do
+ stub_feature_flags(remove_legacy_github_client: false)
+ end
- expect { subject.execute(access_params, :github) }.to raise_error(exception)
+ include_examples 'handles errors', Gitlab::LegacyGithubImport::Client
end
end
diff --git a/spec/services/incident_management/create_incident_label_service_spec.rb b/spec/services/incident_management/create_incident_label_service_spec.rb
index 2f11bcf397e..18a7c019497 100644
--- a/spec/services/incident_management/create_incident_label_service_spec.rb
+++ b/spec/services/incident_management/create_incident_label_service_spec.rb
@@ -52,7 +52,15 @@ RSpec.describe IncidentManagement::CreateIncidentLabelService do
end
context 'without label' do
- it_behaves_like 'new label'
+ context 'when user has permissions to create labels' do
+ it_behaves_like 'new label'
+ end
+
+ context 'when user has no permissions to create labels' do
+ let_it_be(:user) { create(:user) }
+
+ it_behaves_like 'new label'
+ end
end
end
end
diff --git a/spec/services/incident_management/create_issue_service_spec.rb b/spec/services/incident_management/create_issue_service_spec.rb
deleted file mode 100644
index dab9a149458..00000000000
--- a/spec/services/incident_management/create_issue_service_spec.rb
+++ /dev/null
@@ -1,239 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe IncidentManagement::CreateIssueService do
- let(:project) { create(:project, :repository, :private) }
- let_it_be(:user) { User.alert_bot }
- let(:service) { described_class.new(project, alert_payload) }
- let(:alert_starts_at) { Time.current }
- let(:alert_title) { 'TITLE' }
- let(:alert_annotations) { { title: alert_title } }
-
- let(:alert_payload) do
- build_alert_payload(
- annotations: alert_annotations,
- starts_at: alert_starts_at
- )
- end
-
- let(:alert_presenter) do
- Gitlab::Alerting::Alert.new(project: project, payload: alert_payload).present
- end
-
- let!(:setting) do
- create(:project_incident_management_setting, project: project)
- end
-
- subject { service.execute }
-
- context 'when create_issue enabled' do
- let(:issue) { subject[:issue] }
-
- before do
- setting.update!(create_issue: true)
- end
-
- context 'without issue_template_content' do
- it 'creates an issue with alert summary only' do
- expect(subject).to include(status: :success)
-
- expect(issue.author).to eq(user)
- expect(issue.title).to eq(alert_title)
- expect(issue.description).to include(alert_presenter.issue_summary_markdown.strip)
- expect(separator_count(issue.description)).to eq(0)
- end
- end
-
- context 'with erroneous issue service' do
- let(:invalid_issue) do
- build(:issue, project: project, title: nil).tap(&:valid?)
- end
-
- let(:issue_error) { invalid_issue.errors.full_messages.to_sentence }
-
- it 'returns and logs the issue error' do
- expect_next_instance_of(Issues::CreateService) do |issue_service|
- expect(issue_service).to receive(:execute).and_return(invalid_issue)
- end
-
- expect(service)
- .to receive(:log_error)
- .with(error_message(issue_error))
-
- expect(subject).to include(status: :error, message: issue_error)
- end
- end
-
- shared_examples 'GFM template' do
- context 'plain content' do
- let(:template_content) { 'some content' }
-
- it 'creates an issue appending issue template' do
- expect(subject).to include(status: :success)
-
- expect(issue.description).to include(alert_presenter.issue_summary_markdown)
- expect(separator_count(issue.description)).to eq(1)
- expect(issue.description).to include(template_content)
- end
- end
-
- context 'quick actions' do
- let(:user) { create(:user) }
- let(:plain_text) { 'some content' }
-
- let(:template_content) do
- <<~CONTENT
- #{plain_text}
- /due tomorrow
- /assign @#{user.username}
- CONTENT
- end
-
- before do
- project.add_maintainer(user)
- end
-
- it 'creates an issue interpreting quick actions' do
- expect(subject).to include(status: :success)
-
- expect(issue.description).to include(plain_text)
- expect(issue.due_date).to be_present
- expect(issue.assignees).to eq([user])
- end
- end
- end
-
- context 'with gitlab_incident_markdown' do
- let(:alert_annotations) do
- { title: alert_title, gitlab_incident_markdown: template_content }
- end
-
- it_behaves_like 'GFM template'
- end
-
- context 'with issue_template_content' do
- before do
- create_issue_template('bug', template_content)
- setting.update!(issue_template_key: 'bug')
- end
-
- it_behaves_like 'GFM template'
-
- context 'and gitlab_incident_markdown' do
- let(:template_content) { 'plain text'}
- let(:alt_template) { 'alternate text' }
- let(:alert_annotations) do
- { title: alert_title, gitlab_incident_markdown: alt_template }
- end
-
- it 'includes both templates' do
- expect(subject).to include(status: :success)
-
- expect(issue.description).to include(alert_presenter.issue_summary_markdown)
- expect(issue.description).to include(template_content)
- expect(issue.description).to include(alt_template)
- expect(separator_count(issue.description)).to eq(2)
- end
- end
-
- private
-
- def create_issue_template(name, content)
- project.repository.create_file(
- project.creator,
- ".gitlab/issue_templates/#{name}.md",
- content,
- message: 'message',
- branch_name: 'master'
- )
- end
- end
-
- context 'with gitlab alert' do
- let(:gitlab_alert) { create(:prometheus_alert, project: project) }
-
- before do
- alert_payload['labels'] = {
- 'gitlab_alert_id' => gitlab_alert.prometheus_metric_id.to_s
- }
- end
-
- it 'creates an issue' do
- query_title = "#{gitlab_alert.title} #{gitlab_alert.computed_operator} #{gitlab_alert.threshold}"
-
- expect(subject).to include(status: :success)
-
- expect(issue.author).to eq(user)
- expect(issue.title).to eq(alert_presenter.full_title)
- expect(issue.title).to include(gitlab_alert.environment.name)
- expect(issue.title).to include(query_title)
- expect(issue.title).to include('for 5 minutes')
- expect(issue.description).to include(alert_presenter.issue_summary_markdown.strip)
- expect(separator_count(issue.description)).to eq(0)
- end
- end
-
- describe 'with invalid alert payload' do
- shared_examples 'invalid alert' do
- it 'does not create an issue' do
- expect(service)
- .to receive(:log_error)
- .with(error_message('invalid alert'))
-
- expect(subject).to eq(status: :error, message: 'invalid alert')
- end
- end
-
- context 'without title' do
- let(:alert_annotations) { {} }
-
- it_behaves_like 'invalid alert'
- end
-
- context 'without startsAt' do
- let(:alert_starts_at) { nil }
-
- it_behaves_like 'invalid alert'
- end
- end
-
- describe "label `incident`" do
- it_behaves_like 'create alert issue sets issue labels'
- end
- end
-
- context 'when create_issue disabled' do
- before do
- setting.update!(create_issue: false)
- end
-
- it 'returns an error' do
- expect(service)
- .to receive(:log_error)
- .with(error_message('setting disabled'))
-
- expect(subject).to eq(status: :error, message: 'setting disabled')
- end
- end
-
- private
-
- def build_alert_payload(annotations: {}, starts_at: Time.current)
- {
- 'annotations' => annotations.stringify_keys
- }.tap do |payload|
- payload['startsAt'] = starts_at.rfc3339 if starts_at
- end
- end
-
- def error_message(message)
- %{Cannot create incident issue for "#{project.full_name}": #{message}}
- end
-
- def separator_count(text)
- summary_separator = "\n\n---\n\n"
-
- text.scan(summary_separator).size
- end
-end
diff --git a/spec/services/incident_management/incidents/create_service_spec.rb b/spec/services/incident_management/incidents/create_service_spec.rb
new file mode 100644
index 00000000000..404c428cd94
--- /dev/null
+++ b/spec/services/incident_management/incidents/create_service_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe IncidentManagement::Incidents::CreateService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { User.alert_bot }
+ let(:description) { 'Incident description' }
+
+ describe '#execute' do
+ subject(:create_incident) { described_class.new(project, user, title: title, description: description).execute }
+
+ context 'when incident has title and description' do
+ let(:title) { 'Incident title' }
+ let(:new_issue) { Issue.last! }
+ let(:label_title) { IncidentManagement::CreateIncidentLabelService::LABEL_PROPERTIES[:title] }
+
+ it 'responds with success' do
+ expect(create_incident).to be_success
+ end
+
+ it 'creates an incident issue' do
+ expect { create_incident }.to change(Issue, :count).by(1)
+ end
+
+ it 'created issue has correct attributes' do
+ create_incident
+ aggregate_failures do
+ expect(new_issue.title).to eq(title)
+ expect(new_issue.description).to eq(description)
+ expect(new_issue.author).to eq(user)
+ expect(new_issue.issue_type).to eq('incident')
+ expect(new_issue.labels.map(&:title)).to eq([label_title])
+ end
+ end
+
+ context 'when incident label does not exists' do
+ it 'creates incident label' do
+ expect { create_incident }.to change { project.labels.where(title: label_title).count }.by(1)
+ end
+ end
+
+ context 'when incident label already exists' do
+ let!(:label) { create(:label, project: project, title: label_title) }
+
+ it 'does not create new labels' do
+ expect { create_incident }.not_to change(Label, :count)
+ end
+ end
+ end
+
+ context 'when incident has no title' do
+ let(:title) { '' }
+
+ it 'does not create an issue' do
+ expect { create_incident }.not_to change(Issue, :count)
+ end
+
+ it 'responds with errors' do
+ expect(create_incident).to be_error
+ expect(create_incident.message).to eq("Title can't be blank")
+ end
+
+ it 'result payload contains an Issue object' do
+ expect(create_incident.payload[:issue]).to be_kind_of(Issue)
+ end
+ end
+ end
+end
diff --git a/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb b/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb
index cf43ed2411d..73ad0532e07 100644
--- a/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb
+++ b/spec/services/incident_management/pager_duty/create_incident_issue_service_spec.rb
@@ -12,84 +12,63 @@ RSpec.describe IncidentManagement::PagerDuty::CreateIncidentIssueService do
subject(:execute) { described_class.new(project, incident_payload).execute }
describe '#execute' do
- context 'when pagerduty_webhook feature enabled' do
- before do
- stub_feature_flags(pagerduty_webhook: project)
- end
+ context 'when PagerDuty webhook setting is active' do
+ let_it_be(:incident_management_setting) { create(:project_incident_management_setting, project: project, pagerduty_active: true) }
- context 'when PagerDuty webhook setting is active' do
- let_it_be(:incident_management_setting) { create(:project_incident_management_setting, project: project, pagerduty_active: true) }
-
- context 'when issue can be created' do
- it 'creates a new issue' do
- expect { execute }.to change(Issue, :count).by(1)
- end
-
- it 'responds with success' do
- response = execute
-
- expect(response).to be_success
- expect(response.payload[:issue]).to be_kind_of(Issue)
- end
-
- it 'the issue author is Alert bot' do
- expect(execute.payload[:issue].author).to eq(User.alert_bot)
- end
-
- it 'issue has a correct title' do
- expect(execute.payload[:issue].title).to eq(incident_payload['title'])
- end
-
- it 'issue has a correct description' do
- markdown_line_break = ' '
-
- expect(execute.payload[:issue].description).to eq(
- <<~MARKDOWN.chomp
- **Incident:** [My new incident](https://webdemo.pagerduty.com/incidents/PRORDTY)#{markdown_line_break}
- **Incident number:** 33#{markdown_line_break}
- **Urgency:** high#{markdown_line_break}
- **Status:** triggered#{markdown_line_break}
- **Incident key:** #{markdown_line_break}
- **Created at:** 26 September 2017, 3:14PM (UTC)#{markdown_line_break}
- **Assignees:** [Laura Haley](https://webdemo.pagerduty.com/users/P553OPV)#{markdown_line_break}
- **Impacted services:** [Production XDB Cluster](https://webdemo.pagerduty.com/services/PN49J75)
- MARKDOWN
- )
- end
+ context 'when issue can be created' do
+ it 'creates a new issue' do
+ expect { execute }.to change(Issue, :count).by(1)
end
- context 'when the payload does not contain a title' do
- let(:incident_payload) { {} }
+ it 'responds with success' do
+ response = execute
+
+ expect(response).to be_success
+ expect(response.payload[:issue]).to be_kind_of(Issue)
+ end
- it 'does not create a GitLab issue' do
- expect { execute }.not_to change(Issue, :count)
- end
+ it 'the issue author is Alert bot' do
+ expect(execute.payload[:issue].author).to eq(User.alert_bot)
+ end
- it 'responds with error' do
- expect(execute).to be_error
- expect(execute.message).to eq("Title can't be blank")
- end
+ it 'issue has a correct title' do
+ expect(execute.payload[:issue].title).to eq(incident_payload['title'])
+ end
+
+ it 'issue has a correct description' do
+ markdown_line_break = ' '
+
+ expect(execute.payload[:issue].description).to eq(
+ <<~MARKDOWN.chomp
+ **Incident:** [My new incident](https://webdemo.pagerduty.com/incidents/PRORDTY)#{markdown_line_break}
+ **Incident number:** 33#{markdown_line_break}
+ **Urgency:** high#{markdown_line_break}
+ **Status:** triggered#{markdown_line_break}
+ **Incident key:** #{markdown_line_break}
+ **Created at:** 26 September 2017, 3:14PM (UTC)#{markdown_line_break}
+ **Assignees:** [Laura Haley](https://webdemo.pagerduty.com/users/P553OPV)#{markdown_line_break}
+ **Impacted services:** [Production XDB Cluster](https://webdemo.pagerduty.com/services/PN49J75)
+ MARKDOWN
+ )
end
end
- context 'when PagerDuty webhook setting is not active' do
- let_it_be(:incident_management_setting) { create(:project_incident_management_setting, project: project, pagerduty_active: false) }
+ context 'when the payload does not contain a title' do
+ let(:incident_payload) { {} }
it 'does not create a GitLab issue' do
expect { execute }.not_to change(Issue, :count)
end
- it 'responds with forbidden' do
+ it 'responds with error' do
expect(execute).to be_error
- expect(execute.http_status).to eq(:forbidden)
+ expect(execute.message).to eq("Title can't be blank")
end
end
end
- context 'when pagerduty_webhook feature disabled' do
- before do
- stub_feature_flags(pagerduty_webhook: false)
- end
+ context 'when PagerDuty webhook setting is not active' do
+ let_it_be(:incident_management_setting) { create(:project_incident_management_setting, project: project, pagerduty_active: false) }
it 'does not create a GitLab issue' do
expect { execute }.not_to change(Issue, :count)
diff --git a/spec/services/incident_management/pager_duty/process_webhook_service_spec.rb b/spec/services/incident_management/pager_duty/process_webhook_service_spec.rb
index 11ce8388427..4c8aebe5fe2 100644
--- a/spec/services/incident_management/pager_duty/process_webhook_service_spec.rb
+++ b/spec/services/incident_management/pager_duty/process_webhook_service_spec.rb
@@ -19,92 +19,68 @@ RSpec.describe IncidentManagement::PagerDuty::ProcessWebhookService do
subject(:execute) { described_class.new(project, nil, webhook_payload).execute(token) }
- context 'when pagerduty_webhook feature is enabled' do
- before do
- stub_feature_flags(pagerduty_webhook: project)
- end
-
- context 'when PagerDuty webhook setting is active' do
- let_it_be(:incident_management_setting) { create(:project_incident_management_setting, project: project, pagerduty_active: true) }
-
- context 'when token is valid' do
- let(:token) { incident_management_setting.pagerduty_token }
+ context 'when PagerDuty webhook setting is active' do
+ let_it_be(:incident_management_setting) { create(:project_incident_management_setting, project: project, pagerduty_active: true) }
- context 'when webhook payload has acceptable size' do
- it 'responds with Accepted' do
- result = execute
+ context 'when token is valid' do
+ let(:token) { incident_management_setting.pagerduty_token }
- expect(result).to be_success
- expect(result.http_status).to eq(:accepted)
- end
-
- it 'processes issues' do
- incident_payload = ::PagerDuty::WebhookPayloadParser.call(webhook_payload).first['incident']
-
- expect(::IncidentManagement::PagerDuty::ProcessIncidentWorker)
- .to receive(:perform_async)
- .with(project.id, incident_payload)
- .once
+ context 'when webhook payload has acceptable size' do
+ it 'responds with Accepted' do
+ result = execute
- execute
- end
+ expect(result).to be_success
+ expect(result.http_status).to eq(:accepted)
end
- context 'when webhook payload is too big' do
- let(:deep_size) { instance_double(Gitlab::Utils::DeepSize, valid?: false) }
-
- before do
- allow(Gitlab::Utils::DeepSize)
- .to receive(:new)
- .with(webhook_payload, max_size: described_class::PAGER_DUTY_PAYLOAD_SIZE_LIMIT)
- .and_return(deep_size)
- end
+ it 'processes issues' do
+ incident_payload = ::PagerDuty::WebhookPayloadParser.call(webhook_payload).first['incident']
- it 'responds with Bad Request' do
- result = execute
+ expect(::IncidentManagement::PagerDuty::ProcessIncidentWorker)
+ .to receive(:perform_async)
+ .with(project.id, incident_payload)
+ .once
- expect(result).to be_error
- expect(result.http_status).to eq(:bad_request)
- end
-
- it_behaves_like 'does not process incidents'
+ execute
end
+ end
- context 'when webhook payload is blank' do
- let(:webhook_payload) { nil }
+ context 'when webhook payload is too big' do
+ let(:deep_size) { instance_double(Gitlab::Utils::DeepSize, valid?: false) }
- it 'responds with Accepted' do
- result = execute
+ before do
+ allow(Gitlab::Utils::DeepSize)
+ .to receive(:new)
+ .with(webhook_payload, max_size: described_class::PAGER_DUTY_PAYLOAD_SIZE_LIMIT)
+ .and_return(deep_size)
+ end
- expect(result).to be_success
- expect(result.http_status).to eq(:accepted)
- end
+ it 'responds with Bad Request' do
+ result = execute
- it_behaves_like 'does not process incidents'
+ expect(result).to be_error
+ expect(result.http_status).to eq(:bad_request)
end
+
+ it_behaves_like 'does not process incidents'
end
- context 'when token is invalid' do
- let(:token) { 'invalid-token' }
+ context 'when webhook payload is blank' do
+ let(:webhook_payload) { nil }
- it 'responds with Unauthorized' do
+ it 'responds with Accepted' do
result = execute
- expect(result).to be_error
- expect(result.http_status).to eq(:unauthorized)
+ expect(result).to be_success
+ expect(result.http_status).to eq(:accepted)
end
it_behaves_like 'does not process incidents'
end
end
- context 'when both tokens are nil' do
- let_it_be(:incident_management_setting) { create(:project_incident_management_setting, project: project, pagerduty_active: false) }
- let(:token) { nil }
-
- before do
- incident_management_setting.update_column(:pagerduty_active, true)
- end
+ context 'when token is invalid' do
+ let(:token) { 'invalid-token' }
it 'responds with Unauthorized' do
result = execute
@@ -115,25 +91,28 @@ RSpec.describe IncidentManagement::PagerDuty::ProcessWebhookService do
it_behaves_like 'does not process incidents'
end
+ end
- context 'when PagerDuty webhook setting is not active' do
- let_it_be(:incident_management_setting) { create(:project_incident_management_setting, project: project, pagerduty_active: false) }
+ context 'when both tokens are nil' do
+ let_it_be(:incident_management_setting) { create(:project_incident_management_setting, project: project, pagerduty_active: false) }
+ let(:token) { nil }
- it 'responds with Forbidden' do
- result = execute
+ before do
+ incident_management_setting.update_column(:pagerduty_active, true)
+ end
- expect(result).to be_error
- expect(result.http_status).to eq(:forbidden)
- end
+ it 'responds with Unauthorized' do
+ result = execute
- it_behaves_like 'does not process incidents'
+ expect(result).to be_error
+ expect(result.http_status).to eq(:unauthorized)
end
+
+ it_behaves_like 'does not process incidents'
end
- context 'when pagerduty_webhook feature is disabled' do
- before do
- stub_feature_flags(pagerduty_webhook: false)
- end
+ context 'when PagerDuty webhook setting is not active' do
+ let_it_be(:incident_management_setting) { create(:project_incident_management_setting, project: project, pagerduty_active: false) }
it 'responds with Forbidden' do
result = execute
diff --git a/spec/services/issuable/clone/content_rewriter_spec.rb b/spec/services/issuable/clone/content_rewriter_spec.rb
deleted file mode 100644
index f39439b7c2f..00000000000
--- a/spec/services/issuable/clone/content_rewriter_spec.rb
+++ /dev/null
@@ -1,182 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Issuable::Clone::ContentRewriter do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:project1) { create(:project, :public, group: group) }
- let(:project2) { create(:project, :public, group: group) }
-
- let(:other_issue) { create(:issue, project: project1) }
- let(:merge_request) { create(:merge_request) }
-
- subject { described_class.new(user, original_issue, new_issue)}
-
- let(:description) { 'Simple text' }
- let(:original_issue) { create(:issue, description: description, project: project1) }
- let(:new_issue) { create(:issue, project: project2) }
-
- context 'rewriting award emojis' do
- it 'copies the award emojis' do
- create(:award_emoji, awardable: original_issue, name: 'thumbsup')
- create(:award_emoji, awardable: original_issue, name: 'thumbsdown')
-
- expect { subject.execute }.to change { AwardEmoji.count }.by(2)
-
- expect(new_issue.award_emoji.map(&:name)).to match_array(%w(thumbsup thumbsdown))
- end
- end
-
- context 'rewriting description' do
- before do
- subject.execute
- end
-
- context 'when description is a simple text' do
- it 'does not rewrite the description' do
- expect(new_issue.reload.description).to eq(original_issue.description)
- end
- end
-
- context 'when description contains a local reference' do
- let(:description) { "See ##{other_issue.iid}" }
-
- it 'rewrites the local reference correctly' do
- expected_description = "See #{project1.path}##{other_issue.iid}"
-
- expect(new_issue.reload.description).to eq(expected_description)
- end
- end
-
- context 'when description contains a cross reference' do
- let(:description) { "See #{merge_request.project.full_path}!#{merge_request.iid}" }
-
- it 'rewrites the cross reference correctly' do
- expected_description = "See #{merge_request.project.full_path}!#{merge_request.iid}"
-
- expect(new_issue.reload.description).to eq(expected_description)
- end
- end
-
- context 'when description contains a user reference' do
- let(:description) { "FYU #{user.to_reference}" }
-
- it 'works with a user reference' do
- expect(new_issue.reload.description).to eq("FYU #{user.to_reference}")
- end
- end
-
- context 'when description contains uploads' do
- let(:uploader) { build(:file_uploader, project: project1) }
- let(:description) { "Text and #{uploader.markdown_link}" }
-
- it 'rewrites uploads in the description' do
- upload = Upload.last
-
- expect(new_issue.description).not_to eq(description)
- expect(new_issue.description).to match(/Text and #{FileUploader::MARKDOWN_PATTERN}/)
- expect(upload.secret).not_to eq(uploader.secret)
- expect(new_issue.description).to include(upload.secret)
- expect(new_issue.description).to include(upload.path)
- end
- end
- end
-
- context 'rewriting notes' do
- context 'simple notes' do
- let!(:notes) do
- [
- create(:note, noteable: original_issue, project: project1,
- created_at: 2.weeks.ago, updated_at: 1.week.ago),
- create(:note, noteable: original_issue, project: project1),
- create(:note, system: true, noteable: original_issue, project: project1)
- ]
- end
- let!(:system_note_metadata) { create(:system_note_metadata, note: notes.last) }
- let!(:award_emoji) { create(:award_emoji, awardable: notes.first, name: 'thumbsup')}
-
- before do
- subject.execute
- end
-
- it 'rewrites existing notes in valid order' do
- expect(new_issue.notes.order('id ASC').pluck(:note).first(3)).to eq(notes.map(&:note))
- end
-
- it 'copies all the issue notes' do
- expect(new_issue.notes.count).to eq(3)
- end
-
- it 'does not change the note attributes' do
- subject.execute
-
- new_note = new_issue.notes.first
-
- expect(new_note.note).to eq(notes.first.note)
- expect(new_note.author).to eq(notes.first.author)
- end
-
- it 'copies the award emojis' do
- subject.execute
-
- new_note = new_issue.notes.first
- new_note.award_emoji.first.name = 'thumbsup'
- end
-
- it 'copies system_note_metadata for system note' do
- new_note = new_issue.notes.last
-
- expect(new_note.system_note_metadata.action).to eq(system_note_metadata.action)
- expect(new_note.system_note_metadata.id).not_to eq(system_note_metadata.id)
- end
- end
-
- context 'notes with reference' do
- let(:text) do
- "See ##{other_issue.iid} and #{merge_request.project.full_path}!#{merge_request.iid}"
- end
- let!(:note) { create(:note, noteable: original_issue, note: text, project: project1) }
-
- it 'rewrites the references correctly' do
- subject.execute
-
- new_note = new_issue.notes.first
-
- expected_text = "See #{other_issue.project.path}##{other_issue.iid} and #{merge_request.project.full_path}!#{merge_request.iid}"
-
- expect(new_note.note).to eq(expected_text)
- expect(new_note.author).to eq(note.author)
- end
- end
-
- context 'notes with upload' do
- let(:uploader) { build(:file_uploader, project: project1) }
- let(:text) { "Simple text with image: #{uploader.markdown_link} "}
- let!(:note) { create(:note, noteable: original_issue, note: text, project: project1) }
-
- it 'rewrites note content correctly' do
- subject.execute
- new_note = new_issue.notes.first
-
- expect(note.note).to match(/Simple text with image: #{FileUploader::MARKDOWN_PATTERN}/)
- expect(new_note.note).to match(/Simple text with image: #{FileUploader::MARKDOWN_PATTERN}/)
- expect(note.note).not_to eq(new_note.note)
- expect(note.note_html).not_to eq(new_note.note_html)
- end
- end
-
- context "discussion notes" do
- let(:note) { create(:note, noteable: original_issue, note: "sample note", project: project1) }
- let!(:discussion) { create(:discussion_note_on_issue, in_reply_to: note, note: "reply to sample note") }
-
- it 'rewrites discussion correctly' do
- subject.execute
-
- expect(new_issue.notes.count).to eq(original_issue.notes.count)
- expect(new_issue.notes.where(discussion_id: discussion.discussion_id).count).to eq(0)
- expect(original_issue.notes.where(discussion_id: discussion.discussion_id).count).to eq(1)
- end
- end
- end
-end
diff --git a/spec/services/issues/build_service_spec.rb b/spec/services/issues/build_service_spec.rb
index 68b226b02da..93eef8a2732 100644
--- a/spec/services/issues/build_service_spec.rb
+++ b/spec/services/issues/build_service_spec.rb
@@ -147,5 +147,19 @@ RSpec.describe Issues::BuildService do
expect(issue.milestone).to be_nil
end
+
+ context 'setting issue type' do
+ it 'sets the issue_type on the issue' do
+ issue = build_issue(issue_type: 'incident')
+
+ expect(issue.issue_type).to eq('incident')
+ end
+
+ it 'defaults to issue if issue_type not given' do
+ issue = build_issue
+
+ expect(issue.issue_type).to eq('issue')
+ end
+ end
end
end
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index 6678d831775..7ca7d3be99c 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -252,6 +252,41 @@ RSpec.describe Issues::CloseService do
expect(todo.reload).to be_done
end
+ context 'when there is an associated Alert Management Alert' do
+ context 'when alert can be resolved' do
+ let!(:alert) { create(:alert_management_alert, issue: issue, project: project) }
+
+ it 'resolves an alert and sends a system note' do
+ expect_next_instance_of(SystemNotes::AlertManagementService) do |notes_service|
+ expect(notes_service).to receive(:closed_alert_issue).with(issue)
+ end
+
+ close_issue
+
+ expect(alert.reload.resolved?).to eq(true)
+ end
+ end
+
+ context 'when alert cannot be resolved' do
+ let!(:alert) { create(:alert_management_alert, :with_validation_errors, issue: issue, project: project) }
+
+ before do
+ allow(Gitlab::AppLogger).to receive(:warn).and_call_original
+ end
+
+ it 'writes a warning into the log' do
+ close_issue
+
+ expect(Gitlab::AppLogger).to have_received(:warn).with(
+ message: 'Cannot resolve an associated Alert Management alert',
+ issue_id: issue.id,
+ alert_id: alert.id,
+ alert_errors: { hosts: ['hosts array is over 255 chars'] }
+ )
+ end
+ end
+ end
+
it 'deletes milestone issue counters cache' do
issue.update(milestone: create(:milestone, project: project))
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index 8929907a179..5f944d1213b 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -3,15 +3,15 @@
require 'spec_helper'
RSpec.describe Issues::MoveService do
- let(:user) { create(:user) }
- let(:author) { create(:user) }
- let(:title) { 'Some issue' }
- let(:description) { "Some issue description with mention to #{user.to_reference}" }
- let(:group) { create(:group, :private) }
- let(:sub_group_1) { create(:group, :private, parent: group) }
- let(:sub_group_2) { create(:group, :private, parent: group) }
- let(:old_project) { create(:project, namespace: sub_group_1) }
- let(:new_project) { create(:project, namespace: sub_group_2) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:author) { create(:user) }
+ let_it_be(:title) { 'Some issue' }
+ let_it_be(:description) { "Some issue description with mention to #{user.to_reference}" }
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:sub_group_1) { create(:group, :private, parent: group) }
+ let_it_be(:sub_group_2) { create(:group, :private, parent: group) }
+ let_it_be(:old_project) { create(:project, namespace: sub_group_1) }
+ let_it_be(:new_project) { create(:project, namespace: sub_group_2) }
let(:old_issue) do
create(:issue, title: title, description: description, project: old_project, author: author)
@@ -30,15 +30,10 @@ RSpec.describe Issues::MoveService do
describe '#execute' do
shared_context 'issue move executed' do
- let!(:award_emoji) { create(:award_emoji, awardable: old_issue) }
-
let!(:new_issue) { move_service.execute(old_issue, new_project) }
end
context 'issue movable' do
- let!(:note_with_mention) { create(:note, noteable: old_issue, author: author, project: old_project, note: "note with mention #{user.to_reference}") }
- let!(:note_with_no_mention) { create(:note, noteable: old_issue, author: author, project: old_project, note: "note without mention") }
-
include_context 'user can move issue'
context 'generic issue' do
@@ -48,11 +43,11 @@ RSpec.describe Issues::MoveService do
expect(new_issue.project).to eq new_project
end
- it 'rewrites issue title' do
+ it 'copies issue title' do
expect(new_issue.title).to eq title
end
- it 'rewrites issue description' do
+ it 'copies issue description' do
expect(new_issue.description).to eq description
end
@@ -93,23 +88,21 @@ RSpec.describe Issues::MoveService do
it 'preserves create time' do
expect(old_issue.created_at).to eq new_issue.created_at
end
+ end
- it 'moves the award emoji' do
- expect(old_issue.award_emoji.first.name).to eq new_issue.reload.award_emoji.first.name
- end
+ context 'issue with award emoji' do
+ let!(:award_emoji) { create(:award_emoji, awardable: old_issue) }
- context 'when issue has notes with mentions' do
- it 'saves user mentions with actual mentions for new issue' do
- expect(new_issue.user_mentions.find_by(note_id: nil).mentioned_users_ids).to match_array([user.id])
- expect(new_issue.user_mentions.where.not(note_id: nil).first.mentioned_users_ids).to match_array([user.id])
- expect(new_issue.user_mentions.where.not(note_id: nil).count).to eq 1
- expect(new_issue.user_mentions.count).to eq 2
- end
+ it 'copies the award emoji' do
+ old_issue.reload
+ new_issue = move_service.execute(old_issue, new_project)
+
+ expect(old_issue.award_emoji.first.name).to eq new_issue.reload.award_emoji.first.name
end
end
context 'issue with assignee' do
- let(:assignee) { create(:user) }
+ let_it_be(:assignee) { create(:user) }
before do
old_issue.assignees = [assignee]
@@ -154,6 +147,25 @@ RSpec.describe Issues::MoveService do
.not_to raise_error # Sidekiq::Worker::EnqueueFromTransactionError
end
end
+
+ # These tests verify that notes are copied. More thorough tests are in
+ # the unit test for Notes::CopyService.
+ context 'issue with notes' do
+ let!(:notes) do
+ [
+ create(:note, noteable: old_issue, project: old_project, created_at: 2.weeks.ago, updated_at: 1.week.ago),
+ create(:note, noteable: old_issue, project: old_project)
+ ]
+ end
+
+ let(:copied_notes) { new_issue.notes.limit(notes.size) } # Remove the system note added by the copy itself
+
+ include_context 'issue move executed'
+
+ it 'copies existing notes in order' do
+ expect(copied_notes.order('id ASC').pluck(:note)).to eq(notes.map(&:note))
+ end
+ end
end
describe 'move permissions' do
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 77bd540e22f..42452e95f6b 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -840,27 +840,27 @@ RSpec.describe Issues::UpdateService, :mailer do
end
context 'real-time updates' do
- let(:update_params) { { assignee_ids: [user2.id] } }
+ using RSpec::Parameterized::TableSyntax
- context 'when broadcast_issue_updates is enabled' do
- before do
- stub_feature_flags(broadcast_issue_updates: true)
- end
-
- it 'broadcasts to the issues channel' do
- expect(IssuesChannel).to receive(:broadcast_to).with(issue, event: 'updated')
+ let(:update_params) { { assignee_ids: [user2.id] } }
- update_issue(update_params)
- end
+ where(:action_cable_in_app_enabled, :feature_flag_enabled, :should_broadcast) do
+ true | true | true
+ true | false | true
+ false | true | true
+ false | false | false
end
- context 'when broadcast_issue_updates is disabled' do
- before do
- stub_feature_flags(broadcast_issue_updates: false)
- end
+ with_them do
+ it 'broadcasts to the issues channel based on ActionCable and feature flag values' do
+ expect(Gitlab::ActionCable::Config).to receive(:in_app?).and_return(action_cable_in_app_enabled)
+ stub_feature_flags(broadcast_issue_updates: feature_flag_enabled)
- it 'does not broadcast to the issues channel' do
- expect(IssuesChannel).not_to receive(:broadcast_to)
+ if should_broadcast
+ expect(IssuesChannel).to receive(:broadcast_to).with(issue, event: 'updated')
+ else
+ expect(IssuesChannel).not_to receive(:broadcast_to)
+ end
update_issue(update_params)
end
diff --git a/spec/services/jira/requests/projects/list_service_spec.rb b/spec/services/jira/requests/projects/list_service_spec.rb
index 51e67dd821d..b4db77f8104 100644
--- a/spec/services/jira/requests/projects/list_service_spec.rb
+++ b/spec/services/jira/requests/projects/list_service_spec.rb
@@ -66,16 +66,28 @@ RSpec.describe Jira::Requests::Projects::ListService do
context 'when the request returns values' do
before do
- expect(client).to receive(:get).and_return([{ "key" => 'project1' }, { "key" => 'project2' }])
+ expect(client).to receive(:get).and_return([{ 'key' => 'pr1', 'name' => 'First Project' }, { 'key' => 'pr2', 'name' => 'Second Project' }])
end
- it 'returns a paylod with jira projets' do
+ it 'returns a paylod with Jira projets' do
payload = subject.payload
expect(subject.success?).to be_truthy
- expect(payload[:projects].map(&:key)).to eq(%w(project1 project2))
+ expect(payload[:projects].map(&:key)).to eq(%w(pr1 pr2))
expect(payload[:is_last]).to be_truthy
end
+
+ context 'when filtering projects by name' do
+ let(:params) { { query: 'first' } }
+
+ it 'returns a paylod with Jira projets' do
+ payload = subject.payload
+
+ expect(subject.success?).to be_truthy
+ expect(payload[:projects].map(&:key)).to eq(%w(pr1))
+ expect(payload[:is_last]).to be_truthy
+ end
+ end
end
end
end
diff --git a/spec/services/jira_import/cloud_users_mapper_service_spec.rb b/spec/services/jira_import/cloud_users_mapper_service_spec.rb
new file mode 100644
index 00000000000..591f80f3efc
--- /dev/null
+++ b/spec/services/jira_import/cloud_users_mapper_service_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JiraImport::CloudUsersMapperService do
+ let(:start_at) { 7 }
+ let(:url) { "/rest/api/2/users?maxResults=50&startAt=#{start_at}" }
+ let(:jira_users) do
+ [
+ { 'accountId' => 'abcd', 'displayName' => 'user1' },
+ { 'accountId' => 'efg' },
+ { 'accountId' => 'hij', 'displayName' => 'user3', 'emailAddress' => 'user3@example.com' }
+ ]
+ end
+
+ describe '#execute' do
+ it_behaves_like 'mapping jira users'
+ end
+end
diff --git a/spec/services/jira_import/server_users_mapper_service_spec.rb b/spec/services/jira_import/server_users_mapper_service_spec.rb
new file mode 100644
index 00000000000..22cb0327cc5
--- /dev/null
+++ b/spec/services/jira_import/server_users_mapper_service_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe JiraImport::ServerUsersMapperService do
+ let(:start_at) { 7 }
+ let(:url) { "/rest/api/2/user/search?username=''&maxResults=50&startAt=#{start_at}" }
+ let(:jira_users) do
+ [
+ { 'key' => 'abcd', 'name' => 'user1' },
+ { 'key' => 'efg' },
+ { 'key' => 'hij', 'name' => 'user3', 'emailAddress' => 'user3@example.com' }
+ ]
+ end
+
+ describe '#execute' do
+ it_behaves_like 'mapping jira users'
+ end
+end
diff --git a/spec/services/jira_import/users_importer_spec.rb b/spec/services/jira_import/users_importer_spec.rb
index 64cdc70f612..efb303dab9f 100644
--- a/spec/services/jira_import/users_importer_spec.rb
+++ b/spec/services/jira_import/users_importer_spec.rb
@@ -14,6 +14,27 @@ RSpec.describe JiraImport::UsersImporter do
subject { importer.execute }
describe '#execute' do
+ let(:mapped_users) do
+ [
+ {
+ jira_account_id: 'acc1',
+ jira_display_name: 'user1',
+ jira_email: 'sample@jira.com',
+ gitlab_id: nil,
+ gitlab_username: nil,
+ gitlab_name: nil
+ },
+ {
+ jira_account_id: 'acc2',
+ jira_display_name: 'user2',
+ jira_email: nil,
+ gitlab_id: nil,
+ gitlab_username: nil,
+ gitlab_name: nil
+ }
+ ]
+ end
+
before do
stub_jira_service_test
project.add_maintainer(user)
@@ -25,53 +46,83 @@ RSpec.describe JiraImport::UsersImporter do
end
end
- context 'when Jira import is configured correctly' do
- let_it_be(:jira_service) { create(:jira_service, project: project, active: true) }
- let(:client) { double }
+ RSpec.shared_examples 'maps jira users to gitlab users' do
+ context 'when Jira import is configured correctly' do
+ let_it_be(:jira_service) { create(:jira_service, project: project, active: true) }
+ let(:client) { double }
- before do
- expect(importer).to receive(:client).and_return(client)
- end
-
- context 'when jira client raises an error' do
- it 'returns an error response' do
- expect(client).to receive(:get).and_raise(Timeout::Error)
-
- expect(subject.error?).to be_truthy
- expect(subject.message).to include('There was an error when communicating to Jira')
- end
- end
-
- context 'when jira client returns result' do
before do
- allow(client).to receive(:get).with('/rest/api/2/users?maxResults=50&startAt=7')
- .and_return(jira_users)
+ expect(importer).to receive(:client).at_least(1).and_return(client)
+ allow(client).to receive_message_chain(:ServerInfo, :all, :deploymentType).and_return(deployment_type)
end
- context 'when jira client returns an empty array' do
- let(:jira_users) { [] }
+ context 'when jira client raises an error' do
+ it 'returns an error response' do
+ expect(client).to receive(:get).and_raise(Timeout::Error)
- it 'retturns nil payload' do
- expect(subject.success?).to be_truthy
- expect(subject.payload).to be_nil
+ expect(subject.error?).to be_truthy
+ expect(subject.message).to include('There was an error when communicating to Jira')
end
end
- context 'when jira client returns an results' do
- let(:jira_users) { [{ 'name' => 'user1' }, { 'name' => 'user2' }] }
- let(:mapped_users) { [{ jira_display_name: 'user1', gitlab_id: 5 }] }
+ context 'when jira client returns result' do
+ context 'when jira client returns an empty array' do
+ let(:jira_users) { [] }
- before do
- expect(JiraImport::UsersMapper).to receive(:new).with(project, jira_users)
- .and_return(double(execute: mapped_users))
+ it 'retturns nil payload' do
+ expect(subject.success?).to be_truthy
+ expect(subject.payload).to be_empty
+ end
end
- it 'returns the mapped users' do
- expect(subject.success?).to be_truthy
- expect(subject.payload).to eq(mapped_users)
+ context 'when jira client returns an results' do
+ it 'returns the mapped users' do
+ expect(subject.success?).to be_truthy
+ expect(subject.payload).to eq(mapped_users)
+ end
end
end
end
end
+
+ context 'when Jira instance is of Server deployment type' do
+ let(:deployment_type) { 'Server' }
+ let(:url) { "/rest/api/2/user/search?username=''&maxResults=50&startAt=#{start_at}" }
+ let(:jira_users) do
+ [
+ { 'key' => 'acc1', 'name' => 'user1', 'emailAddress' => 'sample@jira.com' },
+ { 'key' => 'acc2', 'name' => 'user2' }
+ ]
+ end
+
+ before do
+ allow_next_instance_of(JiraImport::ServerUsersMapperService) do |instance|
+ allow(instance).to receive(:client).and_return(client)
+ allow(client).to receive(:get).with(url).and_return(jira_users)
+ end
+ end
+
+ it_behaves_like 'maps jira users to gitlab users'
+ end
+
+ context 'when Jira instance is of Cloud deploymet type' do
+ let(:deployment_type) { 'Cloud' }
+ let(:url) { "/rest/api/2/users?maxResults=50&startAt=#{start_at}" }
+ let(:jira_users) do
+ [
+ { 'accountId' => 'acc1', 'displayName' => 'user1', 'emailAddress' => 'sample@jira.com' },
+ { 'accountId' => 'acc2', 'displayName' => 'user2' }
+ ]
+ end
+
+ before do
+ allow_next_instance_of(JiraImport::CloudUsersMapperService) do |instance|
+ allow(instance).to receive(:client).and_return(client)
+ allow(client).to receive(:get).with(url).and_return(jira_users)
+ end
+ end
+
+ it_behaves_like 'maps jira users to gitlab users'
+ end
end
end
diff --git a/spec/services/jira_import/users_mapper_spec.rb b/spec/services/jira_import/users_mapper_spec.rb
deleted file mode 100644
index e5e8279a6fb..00000000000
--- a/spec/services/jira_import/users_mapper_spec.rb
+++ /dev/null
@@ -1,43 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe JiraImport::UsersMapper do
- let_it_be(:project) { create(:project) }
-
- subject { described_class.new(project, jira_users).execute }
-
- describe '#execute' do
- context 'jira_users is nil' do
- let(:jira_users) { nil }
-
- it 'returns an empty array' do
- expect(subject).to be_empty
- end
- end
-
- context 'when jira_users is present' do
- let(:jira_users) do
- [
- { 'accountId' => 'abcd', 'displayName' => 'user1' },
- { 'accountId' => 'efg' },
- { 'accountId' => 'hij', 'displayName' => 'user3', 'emailAddress' => 'user3@example.com' }
- ]
- end
-
- # TODO: now we only create an array in a proper format
- # mapping is tracked in https://gitlab.com/gitlab-org/gitlab/-/issues/219023
- let(:mapped_users) do
- [
- { jira_account_id: 'abcd', jira_display_name: 'user1', jira_email: nil, gitlab_id: nil, gitlab_username: nil, gitlab_name: nil },
- { jira_account_id: 'efg', jira_display_name: nil, jira_email: nil, gitlab_id: nil, gitlab_username: nil, gitlab_name: nil },
- { jira_account_id: 'hij', jira_display_name: 'user3', jira_email: 'user3@example.com', gitlab_id: nil, gitlab_username: nil, gitlab_name: nil }
- ]
- end
-
- it 'returns users mapped to Gitlab' do
- expect(subject).to eq(mapped_users)
- end
- end
- end
-end
diff --git a/spec/services/labels/available_labels_service_spec.rb b/spec/services/labels/available_labels_service_spec.rb
index 9912f2cf469..9ee0b80edcd 100644
--- a/spec/services/labels/available_labels_service_spec.rb
+++ b/spec/services/labels/available_labels_service_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Labels::AvailableLabelsService do
let(:other_project_label) { create(:label) }
let(:group_label) { create(:group_label, group: group) }
let(:other_group_label) { create(:group_label) }
- let(:labels) { [project_label, other_project_label, group_label, other_group_label] }
+ let!(:labels) { [project_label, other_project_label, group_label, other_group_label] }
describe '#find_or_create_by_titles' do
let(:label_titles) { labels.map(&:title).push('non existing title') }
@@ -88,5 +88,33 @@ RSpec.describe Labels::AvailableLabelsService do
expect(result).to match_array([group_label.id])
end
end
+
+ it 'accepts a single id parameter' do
+ result = described_class.new(user, project, label_id: project_label.id).filter_labels_ids_in_param(:label_id)
+
+ expect(result).to match_array([project_label.id])
+ end
+ end
+
+ describe '#available_labels' do
+ context 'when parent is a project' do
+ it 'returns only relevant labels' do
+ result = described_class.new(user, project, {}).available_labels
+
+ expect(result.count).to eq(2)
+ expect(result).to include(project_label, group_label)
+ expect(result).not_to include(other_project_label, other_group_label)
+ end
+ end
+
+ context 'when parent is a group' do
+ it 'returns only relevant labels' do
+ result = described_class.new(user, group, {}).available_labels
+
+ expect(result.count).to eq(1)
+ expect(result).to include(group_label)
+ expect(result).not_to include(project_label, other_project_label, other_group_label)
+ end
+ end
end
end
diff --git a/spec/services/markdown_content_rewriter_service_spec.rb b/spec/services/markdown_content_rewriter_service_spec.rb
new file mode 100644
index 00000000000..47332bec319
--- /dev/null
+++ b/spec/services/markdown_content_rewriter_service_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MarkdownContentRewriterService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:source_parent) { create(:project, :public) }
+ let_it_be(:target_parent) { create(:project, :public) }
+ let(:content) { 'My content' }
+
+ describe '#initialize' do
+ it 'raises an error if source_parent is not a Project' do
+ expect do
+ described_class.new(user, content, create(:group), target_parent)
+ end.to raise_error(ArgumentError, 'The rewriter classes require that `source_parent` is a `Project`')
+ end
+ end
+
+ describe '#execute' do
+ subject { described_class.new(user, content, source_parent, target_parent).execute }
+
+ it 'calls the rewriter classes successfully', :aggregate_failures do
+ [Gitlab::Gfm::ReferenceRewriter, Gitlab::Gfm::UploadsRewriter].each do |rewriter_class|
+ service = double
+
+ expect(service).to receive(:rewrite).with(target_parent)
+ expect(rewriter_class).to receive(:new).and_return(service)
+ end
+
+ subject
+ end
+
+ # Perform simple integration-style tests for each rewriter class.
+ # to prove they run correctly.
+ context 'when content contains a reference' do
+ let_it_be(:issue) { create(:issue, project: source_parent) }
+ let(:content) { "See ##{issue.iid}" }
+
+ it 'rewrites content' do
+ expect(subject).to eq("See #{source_parent.full_path}##{issue.iid}")
+ end
+ end
+
+ context 'when content contains an upload' do
+ let(:image_uploader) { build(:file_uploader, project: source_parent) }
+ let(:content) { "Text and #{image_uploader.markdown_link}" }
+
+ it 'rewrites content' do
+ new_content = subject
+
+ expect(new_content).not_to eq(content)
+ expect(new_content.length).to eq(content.length)
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/conflicts/resolve_service_spec.rb b/spec/services/merge_requests/conflicts/resolve_service_spec.rb
index c4d50124ca9..0abc70f71b0 100644
--- a/spec/services/merge_requests/conflicts/resolve_service_spec.rb
+++ b/spec/services/merge_requests/conflicts/resolve_service_spec.rb
@@ -205,6 +205,7 @@ RSpec.describe MergeRequests::Conflicts::ResolveService do
let(:resolver) do
MergeRequests::Conflicts::ListService.new(merge_request).conflicts.resolver
end
+
let(:regex_conflict) do
resolver.conflict_for_path(resolver.conflicts, 'files/ruby/regex.rb', 'files/ruby/regex.rb')
end
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index a8661f027e8..bb62e594e7a 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -347,6 +347,7 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
target_branch: 'master'
}
end
+
let(:issuable) { described_class.new(project, user, params).execute }
end
diff --git a/spec/services/merge_requests/ff_merge_service_spec.rb b/spec/services/merge_requests/ff_merge_service_spec.rb
index c3da02273a4..55856deeaca 100644
--- a/spec/services/merge_requests/ff_merge_service_spec.rb
+++ b/spec/services/merge_requests/ff_merge_service_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe MergeRequests::FfMergeService do
assignees: [user2],
author: create(:user))
end
+
let(:project) { merge_request.project }
let(:valid_merge_params) { { sha: merge_request.diff_head_sha } }
@@ -113,9 +114,16 @@ RSpec.describe MergeRequests::FfMergeService do
it 'logs and saves error if there is an PreReceiveError exception' do
error_message = 'error message'
+ raw_message = 'The truth is out there'
- allow(service).to receive(:repository).and_raise(Gitlab::Git::PreReceiveError, "GitLab: #{error_message}")
+ pre_receive_error = Gitlab::Git::PreReceiveError.new(raw_message, "GitLab: #{error_message}")
+ allow(service).to receive(:repository).and_raise(pre_receive_error)
allow(service).to receive(:execute_hooks)
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ pre_receive_error,
+ pre_receive_message: raw_message,
+ merge_request_id: merge_request.id
+ )
service.execute(merge_request)
diff --git a/spec/services/merge_requests/push_options_handler_service_spec.rb b/spec/services/merge_requests/push_options_handler_service_spec.rb
index 55f92d6bd0a..85bcf4562b1 100644
--- a/spec/services/merge_requests/push_options_handler_service_spec.rb
+++ b/spec/services/merge_requests/push_options_handler_service_spec.rb
@@ -5,9 +5,10 @@ require 'spec_helper'
RSpec.describe MergeRequests::PushOptionsHandlerService do
include ProjectForksHelper
- let(:user) { create(:user) }
- let(:project) { create(:project, :public, :repository) }
- let(:forked_project) { fork_project(project, user, repository: true) }
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:user) { create(:user, developer_projects: [project]) }
+ let_it_be(:forked_project) { fork_project(project, user, repository: true) }
+
let(:service) { described_class.new(project, user, changes, push_options) }
let(:source_branch) { 'fix' }
let(:target_branch) { 'feature' }
@@ -21,28 +22,14 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
let(:deleted_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 #{Gitlab::Git::BLANK_SHA} refs/heads/#{source_branch}" }
let(:default_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{project.default_branch}" }
- before do
- project.add_developer(user)
- end
-
shared_examples_for 'a service that can create a merge request' do
subject(:last_mr) { MergeRequest.last }
- it 'creates a merge request' do
- expect { service.execute }.to change { MergeRequest.count }.by(1)
- end
-
- it 'sets the correct target branch' do
+ it 'creates a merge request with the correct target branch and assigned user' do
branch = push_options[:target] || project.default_branch
- service.execute
-
+ expect { service.execute }.to change { MergeRequest.count }.by(1)
expect(last_mr.target_branch).to eq(branch)
- end
-
- it 'assigns the MR to the user' do
- service.execute
-
expect(last_mr.assignees).to contain_exactly(user)
end
@@ -54,15 +41,10 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
allow(forked_project).to receive(:empty_repo?).and_return(false)
end
- it 'sets the correct source project' do
+ it 'sets the correct source and target project' do
service.execute
expect(last_mr.source_project).to eq(forked_project)
- end
-
- it 'sets the correct target project' do
- service.execute
-
expect(last_mr.target_project).to eq(project)
end
end
@@ -746,6 +728,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
end
describe 'when MRs are not enabled' do
+ let(:project) { create(:project, :public, :repository).tap { |pr| pr.add_developer(user) } }
let(:push_options) { { create: true } }
let(:changes) { new_branch_changes }
diff --git a/spec/services/merge_requests/pushed_branches_service_spec.rb b/spec/services/merge_requests/pushed_branches_service_spec.rb
index 6e9c77bd3b6..cd6af4c275e 100644
--- a/spec/services/merge_requests/pushed_branches_service_spec.rb
+++ b/spec/services/merge_requests/pushed_branches_service_spec.rb
@@ -8,19 +8,24 @@ RSpec.describe MergeRequests::PushedBranchesService do
context 'when branches pushed' do
let(:pushed_branches) do
- %w(branch1 branch2 extra1 extra2 extra3).map do |branch|
+ %w(branch1 branch2 closed-branch1 closed-branch2 extra1 extra2).map do |branch|
{ ref: "refs/heads/#{branch}" }
end
end
- it 'returns only branches which have a merge request' do
+ it 'returns only branches which have a open and closed merge request' do
create(:merge_request, source_branch: 'branch1', source_project: project)
create(:merge_request, source_branch: 'branch2', source_project: project)
create(:merge_request, target_branch: 'branch2', source_project: project)
- create(:merge_request, :closed, target_branch: 'extra1', source_project: project)
- create(:merge_request, source_branch: 'extra2')
-
- expect(service.execute).to contain_exactly('branch1', 'branch2')
+ create(:merge_request, :closed, target_branch: 'closed-branch1', source_project: project)
+ create(:merge_request, :closed, source_branch: 'closed-branch2', source_project: project)
+ create(:merge_request, source_branch: 'extra1')
+
+ expect(service.execute).to contain_exactly(
+ 'branch1',
+ 'branch2',
+ 'closed-branch2'
+ )
end
end
diff --git a/spec/services/merge_requests/rebase_service_spec.rb b/spec/services/merge_requests/rebase_service_spec.rb
index 2e525f2ed01..653fcf12a76 100644
--- a/spec/services/merge_requests/rebase_service_spec.rb
+++ b/spec/services/merge_requests/rebase_service_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe MergeRequests::RebaseService do
target_branch: 'master',
rebase_jid: rebase_jid
end
+
let(:project) { merge_request.project }
let(:repository) { project.repository.raw }
let(:skip_ci) { false }
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index 18c4cef7087..0696e8a247f 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -555,11 +555,13 @@ RSpec.describe MergeRequests::RefreshService do
message: 'Test commit',
branch_name: 'master')
end
+
let!(:second_commit) do
@fork_project.repository.create_file(@user, 'test2.txt', 'More test data',
message: 'Second test commit',
branch_name: 'master')
end
+
let!(:forked_master_mr) do
create(:merge_request,
source_project: @fork_project,
@@ -567,6 +569,7 @@ RSpec.describe MergeRequests::RefreshService do
target_branch: 'master',
target_project: @project)
end
+
let(:force_push_commit) { @project.commit('feature').id }
it 'reloads a new diff for a push to the forked project' do
diff --git a/spec/services/merge_requests/squash_service_spec.rb b/spec/services/merge_requests/squash_service_spec.rb
index 1ec1dc0f6eb..acbd0a42fcd 100644
--- a/spec/services/merge_requests/squash_service_spec.rb
+++ b/spec/services/merge_requests/squash_service_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe MergeRequests::SquashService do
let(:squash_dir_path) do
File.join(Gitlab.config.shared.path, 'tmp/squash', repository.gl_repository, merge_request.id.to_s)
end
+
let(:merge_request_with_one_commit) do
create(:merge_request,
source_branch: 'feature', source_project: project,
diff --git a/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb b/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
index 4a226fe386c..728b343b801 100644
--- a/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
+++ b/spec/services/metrics/dashboard/clone_dashboard_service_spec.rb
@@ -84,14 +84,12 @@ RSpec.describe Metrics::Dashboard::CloneDashboardService, :use_clean_rails_memor
it_behaves_like 'valid dashboard cloning process', ::Metrics::Dashboard::SystemDashboardService::DASHBOARD_PATH,
[
::Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter,
- ::Gitlab::Metrics::Dashboard::Stages::CustomMetricsInserter,
- ::Gitlab::Metrics::Dashboard::Stages::Sorter
+ ::Gitlab::Metrics::Dashboard::Stages::CustomMetricsInserter
]
it_behaves_like 'valid dashboard cloning process', ::Metrics::Dashboard::ClusterDashboardService::DASHBOARD_PATH,
[
- ::Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter,
- ::Gitlab::Metrics::Dashboard::Stages::Sorter
+ ::Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter
]
it_behaves_like 'valid dashboard cloning process',
diff --git a/spec/services/metrics/dashboard/cluster_metrics_embed_service_spec.rb b/spec/services/metrics/dashboard/cluster_metrics_embed_service_spec.rb
index e80911d6265..dbb89af45d0 100644
--- a/spec/services/metrics/dashboard/cluster_metrics_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/cluster_metrics_embed_service_spec.rb
@@ -52,6 +52,7 @@ RSpec.describe Metrics::Dashboard::ClusterMetricsEmbedService, :use_clean_rails_
}
]
end
+
let(:service_call) { described_class.new(*service_params).get_dashboard }
let(:panel_groups) { service_call[:dashboard][:panel_groups] }
let(:panel) { panel_groups.first[:panels].first }
diff --git a/spec/services/metrics/dashboard/custom_dashboard_service_spec.rb b/spec/services/metrics/dashboard/custom_dashboard_service_spec.rb
index d4391ecb6b9..aea9c25d104 100644
--- a/spec/services/metrics/dashboard/custom_dashboard_service_spec.rb
+++ b/spec/services/metrics/dashboard/custom_dashboard_service_spec.rb
@@ -57,6 +57,16 @@ RSpec.describe Metrics::Dashboard::CustomDashboardService, :use_clean_rails_memo
described_class.new(*service_params).get_dashboard
end
+ it 'tracks panel type' do
+ allow(::Gitlab::Tracking).to receive(:event).and_call_original
+
+ described_class.new(*service_params).get_dashboard
+
+ expect(::Gitlab::Tracking).to have_received(:event)
+ .with('MetricsDashboard::Chart', 'chart_rendered', { label: 'area-chart' })
+ .at_least(:once)
+ end
+
context 'and the dashboard is then deleted' do
it 'does not return the previously cached dashboard' do
described_class.new(*service_params).get_dashboard
@@ -104,6 +114,16 @@ RSpec.describe Metrics::Dashboard::CustomDashboardService, :use_clean_rails_memo
}]
)
end
+
+ it 'caches repo file list' do
+ expect(Gitlab::Metrics::Dashboard::RepoDashboardFinder).to receive(:list_dashboards)
+ .with(project)
+ .once
+ .and_call_original
+
+ described_class.all_dashboard_paths(project)
+ described_class.all_dashboard_paths(project)
+ end
end
end
diff --git a/spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb b/spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb
index a5f7c2ab8ab..82321dbc822 100644
--- a/spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/custom_metric_embed_service_spec.rb
@@ -111,7 +111,8 @@ RSpec.describe Metrics::Dashboard::CustomMetricEmbedService do
it_behaves_like 'valid embedded dashboard service response'
it 'does not cache the unprocessed dashboard' do
- expect(Gitlab::Metrics::Dashboard::Cache).not_to receive(:fetch)
+ # Fail spec if any method of Cache class is called.
+ stub_const('Gitlab::Metrics::Dashboard::Cache', double)
described_class.new(*service_params).get_dashboard
end
diff --git a/spec/services/metrics/dashboard/dynamic_embed_service_spec.rb b/spec/services/metrics/dashboard/dynamic_embed_service_spec.rb
index 72b356be60f..3c533b0c464 100644
--- a/spec/services/metrics/dashboard/dynamic_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/dynamic_embed_service_spec.rb
@@ -132,7 +132,7 @@ RSpec.describe Metrics::Dashboard::DynamicEmbedService, :use_clean_rails_memory_
end
shared_examples 'uses system dashboard' do
- it 'uses the default dashboard' do
+ it 'uses the overview dashboard' do
expect(Gitlab::Metrics::Dashboard::Finder)
.to receive(:find_raw)
.with(project, dashboard_path: system_dashboard_path)
diff --git a/spec/services/metrics/dashboard/gitlab_alert_embed_service_spec.rb b/spec/services/metrics/dashboard/gitlab_alert_embed_service_spec.rb
index 29c941826b5..dd9d498e307 100644
--- a/spec/services/metrics/dashboard/gitlab_alert_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/gitlab_alert_embed_service_spec.rb
@@ -72,12 +72,18 @@ RSpec.describe Metrics::Dashboard::GitlabAlertEmbedService do
it_behaves_like 'valid embedded dashboard service response'
it_behaves_like 'raises error for users with insufficient permissions'
- it 'uses the metric info corresponding to the alert' do
+ it 'generates an panel based on the alert' do
result = service_call
- metrics = result[:dashboard][:panel_groups][0][:panels][0][:metrics]
-
- expect(metrics.length).to eq 1
- expect(metrics.first[:metric_id]).to eq alert.prometheus_metric_id
+ panel = result[:dashboard][:panel_groups][0][:panels][0]
+ metric = panel[:metrics].first
+
+ expect(panel[:metrics].length).to eq 1
+ expect(panel).to include(
+ title: alert.prometheus_metric.title,
+ y_label: alert.prometheus_metric.y_label,
+ type: 'area-chart'
+ )
+ expect(metric[:metric_id]).to eq alert.prometheus_metric_id
end
context 'when the metric does not exist' do
@@ -87,7 +93,8 @@ RSpec.describe Metrics::Dashboard::GitlabAlertEmbedService do
end
it 'does not cache the unprocessed dashboard' do
- expect(Gitlab::Metrics::Dashboard::Cache).not_to receive(:fetch)
+ # Fail spec if any method of Cache class is called.
+ stub_const('Gitlab::Metrics::Dashboard::Cache', double)
described_class.new(*service_params).get_dashboard
end
diff --git a/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb b/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb
index ee3c55cb642..5263fd40a40 100644
--- a/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb
+++ b/spec/services/metrics/dashboard/grafana_metric_embed_service_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Metrics::Dashboard::GrafanaMetricEmbedService do
include ReactiveCachingHelpers
include GrafanaApiHelpers
- let_it_be(:project) { build(:project) }
+ let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:grafana_integration) { create(:grafana_integration, project: project) }
@@ -15,7 +15,7 @@ RSpec.describe Metrics::Dashboard::GrafanaMetricEmbedService do
valid_grafana_dashboard_link(grafana_integration.grafana_url)
end
- before do
+ before_all do
project.add_maintainer(user)
end
@@ -58,6 +58,31 @@ RSpec.describe Metrics::Dashboard::GrafanaMetricEmbedService do
expect(subject.current_user).to eq(user)
expect(subject.params[:grafana_url]).to eq(grafana_url)
end
+
+ context 'with unknown users' do
+ let(:params) { [project.id, current_user_id, grafana_url] }
+
+ context 'when anonymous' do
+ where(:current_user_id) do
+ [nil, '']
+ end
+
+ with_them do
+ it 'sets current_user as nil' do
+ expect(subject.current_user).to be_nil
+ end
+ end
+ end
+
+ context 'when invalid' do
+ let(:current_user_id) { non_existing_record_id }
+
+ it 'raise record not found error' do
+ expect { subject }
+ .to raise_error(ActiveRecord::RecordNotFound, /Couldn't find User/)
+ end
+ end
+ end
end
describe '#get_dashboard', :use_clean_rails_memory_store_caching do
@@ -145,7 +170,17 @@ RSpec.describe Metrics::Dashboard::GrafanaMetricEmbedService do
stub_datasource_request(grafana_integration.grafana_url)
end
- it_behaves_like 'valid embedded dashboard service response'
+ context 'when project is private and user is member' do
+ it_behaves_like 'valid embedded dashboard service response'
+ end
+
+ context 'when project is public and user is anonymous' do
+ let(:project) { create(:project, :public) }
+ let(:user) { nil }
+ let(:grafana_integration) { create(:grafana_integration, project: project) }
+
+ it_behaves_like 'valid embedded dashboard service response'
+ end
end
end
diff --git a/spec/services/metrics/dashboard/panel_preview_service_spec.rb b/spec/services/metrics/dashboard/panel_preview_service_spec.rb
new file mode 100644
index 00000000000..d58dee3e7a3
--- /dev/null
+++ b/spec/services/metrics/dashboard/panel_preview_service_spec.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Metrics::Dashboard::PanelPreviewService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:environment) { create(:environment, project: project) }
+ let_it_be(:panel_yml) do
+ <<~YML
+ ---
+ title: test panel
+ YML
+ end
+ let_it_be(:dashboard) do
+ {
+ panel_groups: [
+ {
+ panels: [{ 'title' => 'test panel' }]
+ }
+ ]
+ }
+ end
+
+ describe '#execute' do
+ subject(:service_response) { described_class.new(project, panel_yml, environment).execute }
+
+ context "valid panel's yaml" do
+ before do
+ allow_next_instance_of(::Gitlab::Metrics::Dashboard::Processor) do |processor|
+ allow(processor).to receive(:process).and_return(dashboard)
+ end
+ end
+
+ it 'returns success service response' do
+ expect(service_response.success?).to be_truthy
+ end
+
+ it 'returns processed panel' do
+ expect(service_response.payload).to eq('title' => 'test panel')
+ end
+
+ it 'uses dashboard processor' do
+ sequence = [
+ ::Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter,
+ ::Gitlab::Metrics::Dashboard::Stages::MetricEndpointInserter,
+ ::Gitlab::Metrics::Dashboard::Stages::PanelIdsInserter,
+ ::Gitlab::Metrics::Dashboard::Stages::AlertsInserter,
+ ::Gitlab::Metrics::Dashboard::Stages::UrlValidator
+ ]
+ processor_params = [project, dashboard, sequence, environment: environment]
+
+ expect_next_instance_of(::Gitlab::Metrics::Dashboard::Processor, *processor_params) do |processor|
+ expect(processor).to receive(:process).and_return(dashboard)
+ end
+
+ service_response
+ end
+ end
+
+ context "invalid panel's yaml" do
+ [
+ Gitlab::Metrics::Dashboard::Errors::DashboardProcessingError,
+ Gitlab::Config::Loader::Yaml::NotHashError,
+ Gitlab::Config::Loader::Yaml::DataTooLargeError,
+ Gitlab::Config::Loader::FormatError
+ ].each do |error_class|
+ before do
+ allow_next_instance_of(::Gitlab::Metrics::Dashboard::Processor) do |processor|
+ allow(processor).to receive(:process).and_raise(error_class.new('error'))
+ end
+ end
+
+ it 'returns error service response' do
+ expect(service_response.error?).to be_truthy
+ end
+
+ it 'returns error message' do
+ expect(service_response.message).to eq('error')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/metrics/dashboard/pod_dashboard_service_spec.rb b/spec/services/metrics/dashboard/pod_dashboard_service_spec.rb
index ae0e38a04b2..0ea812e93ee 100644
--- a/spec/services/metrics/dashboard/pod_dashboard_service_spec.rb
+++ b/spec/services/metrics/dashboard/pod_dashboard_service_spec.rb
@@ -54,4 +54,20 @@ RSpec.describe Metrics::Dashboard::PodDashboardService, :use_clean_rails_memory_
let(:dashboard_version) { subject.send(:dashboard_version) }
end
end
+
+ describe '.all_dashboard_paths' do
+ it 'returns the dashboard attributes' do
+ all_dashboards = described_class.all_dashboard_paths(project)
+
+ expect(all_dashboards).to eq(
+ [{
+ path: described_class::DASHBOARD_PATH,
+ display_name: described_class::DASHBOARD_NAME,
+ default: false,
+ system_dashboard: false,
+ out_of_the_box_dashboard: true
+ }]
+ )
+ end
+ end
end
diff --git a/spec/services/metrics/dashboard/self_monitoring_dashboard_service_spec.rb b/spec/services/metrics/dashboard/self_monitoring_dashboard_service_spec.rb
index aea3e41a013..33b7e3c85cd 100644
--- a/spec/services/metrics/dashboard/self_monitoring_dashboard_service_spec.rb
+++ b/spec/services/metrics/dashboard/self_monitoring_dashboard_service_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe Metrics::Dashboard::SelfMonitoringDashboardService, :use_clean_ra
path: described_class::DASHBOARD_PATH,
display_name: described_class::DASHBOARD_NAME,
default: true,
- system_dashboard: false,
+ system_dashboard: true,
out_of_the_box_dashboard: true
}]
)
diff --git a/spec/services/notes/copy_service_spec.rb b/spec/services/notes/copy_service_spec.rb
new file mode 100644
index 00000000000..fd44aa7cf40
--- /dev/null
+++ b/spec/services/notes/copy_service_spec.rb
@@ -0,0 +1,157 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Notes::CopyService do
+ describe '#initialize' do
+ let_it_be(:noteable) { create(:issue) }
+
+ it 'validates that we cannot copy notes to the same Noteable' do
+ expect { described_class.new(noteable, noteable) }.to raise_error(ArgumentError)
+ end
+ end
+
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:from_project) { create(:project, :public, group: group) }
+ let_it_be(:to_project) { create(:project, :public, group: group) }
+ let(:from_noteable) { create(:issue, project: from_project) }
+ let(:to_noteable) { create(:issue, project: to_project) }
+
+ subject(:execute_service) { described_class.new(user, from_noteable, to_noteable).execute }
+
+ context 'rewriting the note body' do
+ context 'simple notes' do
+ let!(:notes) do
+ [
+ create(:note, noteable: from_noteable, project: from_noteable.project,
+ created_at: 2.weeks.ago, updated_at: 1.week.ago),
+ create(:note, noteable: from_noteable, project: from_noteable.project),
+ create(:note, system: true, noteable: from_noteable, project: from_noteable.project)
+ ]
+ end
+
+ it 'rewrites existing notes in valid order' do
+ execute_service
+
+ expect(to_noteable.notes.order('id ASC').pluck(:note).first(3)).to eq(notes.map(&:note))
+ end
+
+ it 'copies all the issue notes' do
+ execute_service
+
+ expect(to_noteable.notes.count).to eq(3)
+ end
+
+ it 'does not change the note attributes' do
+ execute_service
+
+ new_note = to_noteable.notes.first
+
+ expect(new_note).to have_attributes(
+ note: notes.first.note,
+ author: notes.first.author
+ )
+ end
+
+ it 'copies the award emojis' do
+ create(:award_emoji, awardable: notes.first, name: 'thumbsup')
+
+ execute_service
+
+ new_award_emoji = to_noteable.notes.first.award_emoji.first
+
+ expect(new_award_emoji.name).to eq('thumbsup')
+ end
+
+ it 'copies system_note_metadata for system note' do
+ system_note_metadata = create(:system_note_metadata, note: notes.last)
+
+ execute_service
+
+ new_note = to_noteable.notes.last
+
+ aggregate_failures do
+ expect(new_note.system_note_metadata.action).to eq(system_note_metadata.action)
+ expect(new_note.system_note_metadata.id).not_to eq(system_note_metadata.id)
+ end
+ end
+
+ it 'returns success' do
+ aggregate_failures do
+ expect(execute_service).to be_kind_of(ServiceResponse)
+ expect(execute_service).to be_success
+ end
+ end
+ end
+
+ context 'notes with mentions' do
+ let!(:note_with_mention) { create(:note, noteable: from_noteable, author: from_noteable.author, project: from_noteable.project, note: "note with mention #{user.to_reference}") }
+ let!(:note_with_no_mention) { create(:note, noteable: from_noteable, author: from_noteable.author, project: from_noteable.project, note: "note without mention") }
+
+ it 'saves user mentions with actual mentions for new issue' do
+ execute_service
+
+ aggregate_failures do
+ expect(to_noteable.user_mentions.first.mentioned_users_ids).to match_array([user.id])
+ expect(to_noteable.user_mentions.count).to eq(1)
+ end
+ end
+ end
+
+ context 'notes with reference' do
+ let(:other_issue) { create(:issue, project: from_noteable.project) }
+ let(:merge_request) { create(:merge_request) }
+ let(:text) { "See ##{other_issue.iid} and #{merge_request.project.full_path}!#{merge_request.iid}" }
+ let!(:note) { create(:note, noteable: from_noteable, note: text, project: from_noteable.project) }
+
+ it 'rewrites the references correctly' do
+ execute_service
+
+ new_note = to_noteable.notes.first
+
+ expected_text = "See #{other_issue.project.path}##{other_issue.iid} and #{merge_request.project.full_path}!#{merge_request.iid}"
+
+ aggregate_failures do
+ expect(new_note.note).to eq(expected_text)
+ expect(new_note.author).to eq(note.author)
+ end
+ end
+ end
+
+ context 'notes with upload' do
+ let(:uploader) { build(:file_uploader, project: from_noteable.project) }
+ let(:text) { "Simple text with image: #{uploader.markdown_link} "}
+ let!(:note) { create(:note, noteable: from_noteable, note: text, project: from_noteable.project) }
+
+ it 'rewrites note content correctly' do
+ execute_service
+ new_note = to_noteable.notes.first
+
+ aggregate_failures do
+ expect(note.note).to match(/Simple text with image: #{FileUploader::MARKDOWN_PATTERN}/)
+ expect(new_note.note).to match(/Simple text with image: #{FileUploader::MARKDOWN_PATTERN}/)
+ expect(note.note).not_to eq(new_note.note)
+ expect(note.note_html).not_to eq(new_note.note_html)
+ end
+ end
+ end
+
+ context 'discussion notes' do
+ let(:note) { create(:note, noteable: from_noteable, note: 'sample note', project: from_noteable.project) }
+ let!(:discussion) { create(:discussion_note_on_issue, in_reply_to: note, note: 'reply to sample note') }
+
+ it 'rewrites discussion correctly' do
+ execute_service
+
+ aggregate_failures do
+ expect(to_noteable.notes.count).to eq(from_noteable.notes.count)
+ expect(to_noteable.notes.where(discussion_id: discussion.discussion_id).count).to eq(0)
+ expect(from_noteable.notes.where(discussion_id: discussion.discussion_id).count).to eq(1)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index fd824621db7..f087f72ca46 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -117,6 +117,7 @@ RSpec.describe Notes::CreateService do
source_project: project_with_repo,
target_project: project_with_repo)
end
+
let(:line_number) { 14 }
let(:position) do
Gitlab::Diff::Position.new(old_path: "files/ruby/popen.rb",
@@ -125,6 +126,7 @@ RSpec.describe Notes::CreateService do
new_line: line_number,
diff_refs: merge_request.diff_refs)
end
+
let(:previous_note) do
create(:diff_note_on_merge_request, noteable: merge_request, project: project_with_repo)
end
@@ -432,6 +434,13 @@ RSpec.describe Notes::CreateService do
.and change { existing_note.updated_at }
end
+ it 'returns a DiscussionNote with its parent discussion refreshed correctly' do
+ discussion_notes = subject.discussion.notes
+
+ expect(discussion_notes.size).to eq(2)
+ expect(discussion_notes.first).to be_a(DiscussionNote)
+ end
+
context 'discussion to reply cannot be found' do
before do
existing_note.delete
diff --git a/spec/services/notes/quick_actions_service_spec.rb b/spec/services/notes/quick_actions_service_spec.rb
index d20824efaaa..e9decd44730 100644
--- a/spec/services/notes/quick_actions_service_spec.rb
+++ b/spec/services/notes/quick_actions_service_spec.rb
@@ -58,14 +58,29 @@ RSpec.describe Notes::QuickActionsService do
end
describe '/spend' do
- let(:note_text) { '/spend 1h' }
+ context 'when note is not persisted' do
+ let(:note_text) { '/spend 1h' }
- it 'updates the spent time on the noteable' do
- content, update_params = service.execute(note)
- service.apply_updates(update_params, note)
+ it 'adds time to noteable, adds timelog with nil note_id and has no content' do
+ content, update_params = service.execute(note)
+ service.apply_updates(update_params, note)
- expect(content).to eq ''
- expect(note.noteable.time_spent).to eq(3600)
+ expect(content).to eq ''
+ expect(note.noteable.time_spent).to eq(3600)
+ expect(Timelog.last.note_id).to be_nil
+ end
+ end
+
+ context 'when note is persisted' do
+ let(:note_text) { "a note \n/spend 1h" }
+
+ it 'updates the spent time and populates timelog with note_id' do
+ new_content, update_params = service.execute(note)
+ note.update!(note: new_content)
+ service.apply_updates(update_params, note)
+
+ expect(Timelog.last.note_id).to eq(note.id)
+ end
end
end
end
diff --git a/spec/services/notes/update_service_spec.rb b/spec/services/notes/update_service_spec.rb
index 70dea99de4a..47b8ba0cd72 100644
--- a/spec/services/notes/update_service_spec.rb
+++ b/spec/services/notes/update_service_spec.rb
@@ -13,6 +13,17 @@ RSpec.describe Notes::UpdateService do
let(:issue) { create(:issue, project: project) }
let(:issue2) { create(:issue, project: private_project) }
let(:note) { create(:note, project: project, noteable: issue, author: user, note: "Old note #{user2.to_reference}") }
+ let(:markdown) do
+ <<-MARKDOWN.strip_heredoc
+ ```suggestion
+ foo
+ ```
+
+ ```suggestion
+ bar
+ ```
+ MARKDOWN
+ end
before do
project.add_maintainer(user)
@@ -36,18 +47,18 @@ RSpec.describe Notes::UpdateService do
end
end
- context 'suggestions' do
- it 'refreshes note suggestions' do
- markdown = <<-MARKDOWN.strip_heredoc
- ```suggestion
- foo
- ```
+ context 'with system note' do
+ before do
+ note.update_column(:system, true)
+ end
- ```suggestion
- bar
- ```
- MARKDOWN
+ it 'does not update the note' do
+ expect { update_note(note: 'new text') }.not_to change { note.reload.note }
+ end
+ end
+ context 'suggestions' do
+ it 'refreshes note suggestions' do
suggestion = create(:suggestion)
note = suggestion.note
@@ -191,5 +202,24 @@ RSpec.describe Notes::UpdateService do
end
end
end
+
+ context 'for a personal snippet' do
+ let_it_be(:snippet) { create(:personal_snippet, :public) }
+ let(:note) { create(:note, project: nil, noteable: snippet, author: user, note: "Note on a snippet with reference #{issue.to_reference}" ) }
+
+ it 'does not create todos' do
+ expect { update_note({ note: "Mentioning user #{user2}" }) }.not_to change { note.todos.count }
+ end
+
+ it 'does not create suggestions' do
+ expect { update_note({ note: "Updated snippet with markdown suggestion #{markdown}" }) }
+ .not_to change { note.suggestions.count }
+ end
+
+ it 'does not create mentions' do
+ expect(note).not_to receive(:create_new_cross_references!)
+ update_note({ note: "Updated with new reference: #{issue.to_reference}" })
+ end
+ end
end
end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 2fe7a46de4b..8186bc40bc0 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -238,6 +238,26 @@ RSpec.describe NotificationService, :mailer do
expect { subject }.to have_enqueued_email(user, mail: "access_token_about_to_expire_email")
end
end
+
+ describe '#access_token_expired' do
+ let_it_be(:user) { create(:user) }
+
+ subject { notification.access_token_expired(user) }
+
+ it 'sends email to the token owner' do
+ expect { subject }.to have_enqueued_email(user, mail: "access_token_expired_email")
+ end
+
+ context 'when user is not allowed to receive notifications' do
+ before do
+ user.block!
+ end
+
+ it 'does not send email to the token owner' do
+ expect { subject }.not_to have_enqueued_email(user, mail: "access_token_expired_email")
+ end
+ end
+ end
end
describe '#unknown_sign_in' do
@@ -2054,16 +2074,66 @@ RSpec.describe NotificationService, :mailer do
end
describe '#project_was_moved' do
- it 'notifies the expected users' do
- notification.project_was_moved(project, "gitlab/gitlab")
+ context 'when notifications are disabled' do
+ before do
+ @u_custom_global.global_notification_setting.update!(moved_project: false)
+ end
- should_email(@u_watcher)
- should_email(@u_participating)
- should_email(@u_lazy_participant)
- should_email(@u_custom_global)
- should_not_email(@u_guest_watcher)
- should_not_email(@u_guest_custom)
- should_not_email(@u_disabled)
+ it 'does not send a notification' do
+ notification.project_was_moved(project, "gitlab/gitlab")
+
+ should_not_email(@u_custom_global)
+ end
+ end
+
+ context 'with users at both project and group level' do
+ let(:maintainer) { create(:user) }
+ let(:developer) { create(:user) }
+ let(:group_owner) { create(:user) }
+ let(:group_maintainer) { create(:user) }
+ let(:group_developer) { create(:user) }
+ let(:blocked_user) { create(:user, :blocked) }
+ let(:invited_user) { create(:user) }
+
+ let!(:group) do
+ create(:group, :public) do |group|
+ project.group = group
+ project.save!
+
+ group.add_owner(group_owner)
+ group.add_maintainer(group_maintainer)
+ group.add_developer(group_developer)
+ # This is to check for dupes
+ group.add_maintainer(maintainer)
+ group.add_maintainer(blocked_user)
+ end
+ end
+
+ before do
+ project.add_maintainer(maintainer)
+ project.add_developer(developer)
+ project.add_maintainer(blocked_user)
+ reset_delivered_emails!
+ end
+
+ it 'notifies the expected users' do
+ notification.project_was_moved(project, "gitlab/gitlab")
+
+ should_email(@u_watcher)
+ should_email(@u_participating)
+ should_email(@u_lazy_participant)
+ should_email(@u_custom_global)
+ should_not_email(@u_guest_watcher)
+ should_not_email(@u_guest_custom)
+ should_not_email(@u_disabled)
+
+ should_email(maintainer)
+ should_email(group_owner)
+ should_email(group_maintainer)
+ should_not_email(group_developer)
+ should_not_email(developer)
+ should_not_email(blocked_user)
+ end
end
it_behaves_like 'project emails are disabled' do
diff --git a/spec/services/packages/create_dependency_service_spec.rb b/spec/services/packages/create_dependency_service_spec.rb
index 00e5e5c6d96..3ad59b31b2c 100644
--- a/spec/services/packages/create_dependency_service_spec.rb
+++ b/spec/services/packages/create_dependency_service_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe Packages::CreateDependencyService do
.gsub('1.0.1', version))
.with_indifferent_access
end
+
let(:package_version) { params[:versions].each_key.first }
let(:dependencies) { params[:versions][package_version] }
let(:package) { create(:npm_package) }
diff --git a/spec/services/packages/maven/find_or_create_package_service_spec.rb b/spec/services/packages/maven/find_or_create_package_service_spec.rb
index c9441324216..4406e4037e2 100644
--- a/spec/services/packages/maven/find_or_create_package_service_spec.rb
+++ b/spec/services/packages/maven/find_or_create_package_service_spec.rb
@@ -4,34 +4,77 @@ require 'spec_helper'
RSpec.describe Packages::Maven::FindOrCreatePackageService do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
- let_it_be(:app_name) { 'my-app' }
- let_it_be(:version) { '1.0-SNAPSHOT' }
- let_it_be(:path) { "my/company/app/#{app_name}" }
- let_it_be(:path_with_version) { "#{path}/#{version}" }
- let_it_be(:params) do
- {
- path: path_with_version,
- name: path,
- version: version
- }
- end
+
+ let(:app_name) { 'my-app' }
+ let(:path) { "sandbox/test/app/#{app_name}" }
+ let(:version) { '1.0.0' }
+ let(:file_name) { 'test.jar' }
+ let(:param_path) { "#{path}/#{version}" }
describe '#execute' do
- subject { described_class.new(project, user, params).execute }
+ using RSpec::Parameterized::TableSyntax
+
+ subject { described_class.new(project, user, { path: param_path, file_name: file_name }).execute }
+
+ RSpec.shared_examples 'reuse existing package' do
+ it { expect { subject}.not_to change { Packages::Package.count } }
+
+ it { is_expected.to eq(existing_package) }
+ end
+
+ RSpec.shared_examples 'create package' do
+ it { expect { subject}.to change { Packages::Package.count }.by(1) }
+
+ it 'sets the proper name and version' do
+ pkg = subject
+
+ expect(pkg.name).to eq(path)
+ expect(pkg.version).to eq(version)
+ end
+ end
- context 'without any existing package' do
- it 'creates a package' do
- expect { subject }.to change { Packages::Package.count }.by(1)
+ context 'path with version' do
+ # Note that "path with version" and "file type maven metadata xml" only exists for snapshot versions
+ # In other words, we will never have an metadata xml upload on a path with version for a non snapshot version
+ where(:package_exist, :file_type, :snapshot_version, :shared_example_name) do
+ true | :jar | false | 'reuse existing package'
+ false | :jar | false | 'create package'
+ true | :jar | true | 'reuse existing package'
+ false | :jar | true | 'create package'
+ true | :maven_xml | true | 'reuse existing package'
+ false | :maven_xml | true | 'create package'
+ end
+
+ with_them do
+ let(:version) { snapshot_version ? '1.0-SNAPSHOT' : '1.0.0' }
+ let(:file_name) { file_type == :maven_xml ? 'maven-metadata.xml' : 'test.jar' }
+
+ let!(:existing_package) do
+ if package_exist
+ create(:maven_package, name: path, version: version, project: project)
+ end
+ end
+
+ it_behaves_like params[:shared_example_name]
end
end
- context 'with an existing package' do
- let_it_be(:existing_package) { create(:maven_package, name: path, version: version, project: project) }
+ context 'path without version' do
+ let(:param_path) { path }
+ let(:version) { nil }
+
+ context 'maven-metadata.xml file' do
+ let(:file_name) { 'maven-metadata.xml' }
+
+ context 'with existing package' do
+ let!(:existing_package) { create(:maven_package, name: path, version: version, project: project) }
+
+ it_behaves_like 'reuse existing package'
+ end
- it { is_expected.to eq existing_package }
- it "doesn't create a new package" do
- expect { subject }
- .to not_change { Packages::Package.count }
+ context 'without existing package' do
+ it_behaves_like 'create package'
+ end
end
end
end
diff --git a/spec/services/packages/npm/create_package_service_spec.rb b/spec/services/packages/npm/create_package_service_spec.rb
index 25bbbf82bec..c1391746f52 100644
--- a/spec/services/packages/npm/create_package_service_spec.rb
+++ b/spec/services/packages/npm/create_package_service_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe Packages::Npm::CreatePackageService do
.gsub('1.0.1', version)).with_indifferent_access
.merge!(override)
end
+
let(:override) { {} }
let(:package_name) { "@#{namespace.path}/my-app".freeze }
diff --git a/spec/services/packages/pypi/create_package_service_spec.rb b/spec/services/packages/pypi/create_package_service_spec.rb
index 250b43d1f75..bfecb32f9ef 100644
--- a/spec/services/packages/pypi/create_package_service_spec.rb
+++ b/spec/services/packages/pypi/create_package_service_spec.rb
@@ -49,18 +49,11 @@ RSpec.describe Packages::Pypi::CreatePackageService do
params[:md5_digest] = 'def'
end
- it 'replaces the file' do
+ it 'throws an error' do
expect { subject }
.to change { Packages::Package.pypi.count }.by(0)
- .and change { Packages::PackageFile.count }.by(1)
-
- expect(created_package.package_files.size).to eq 2
- expect(created_package.package_files.first.file_name).to eq 'foo.tgz'
- expect(created_package.package_files.first.file_sha256).to eq '123'
- expect(created_package.package_files.first.file_md5).to eq '567'
- expect(created_package.package_files.last.file_name).to eq 'foo.tgz'
- expect(created_package.package_files.last.file_sha256).to eq 'abc'
- expect(created_package.package_files.last.file_md5).to eq 'def'
+ .and change { Packages::PackageFile.count }.by(0)
+ .and raise_error(/File name has already been taken/)
end
end
diff --git a/spec/services/personal_access_tokens/revoke_service_spec.rb b/spec/services/personal_access_tokens/revoke_service_spec.rb
new file mode 100644
index 00000000000..5afa43cef76
--- /dev/null
+++ b/spec/services/personal_access_tokens/revoke_service_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe PersonalAccessTokens::RevokeService do
+ shared_examples_for 'a successfully revoked token' do
+ it { expect(subject.success?).to be true }
+ it { expect(service.token.revoked?).to be true }
+ end
+
+ shared_examples_for 'an unsuccessfully revoked token' do
+ it { expect(subject.success?).to be false }
+ it { expect(service.token.revoked?).to be false }
+ end
+
+ describe '#execute' do
+ subject { service.execute }
+
+ let(:service) { described_class.new(current_user, token: token) }
+
+ context 'when current_user is an administrator' do
+ let_it_be(:current_user) { create(:admin) }
+ let_it_be(:token) { create(:personal_access_token) }
+
+ it_behaves_like 'a successfully revoked token'
+ end
+
+ context 'when current_user is not an administrator' do
+ let_it_be(:current_user) { create(:user) }
+
+ context 'token belongs to a different user' do
+ let_it_be(:token) { create(:personal_access_token) }
+
+ it_behaves_like 'an unsuccessfully revoked token'
+ end
+
+ context 'token belongs to current_user' do
+ let_it_be(:token) { create(:personal_access_token, user: current_user) }
+
+ it_behaves_like 'a successfully revoked token'
+ end
+ end
+ end
+end
diff --git a/spec/services/pod_logs/elasticsearch_service_spec.rb b/spec/services/pod_logs/elasticsearch_service_spec.rb
index 9431e47c6f2..e233abcd96a 100644
--- a/spec/services/pod_logs/elasticsearch_service_spec.rb
+++ b/spec/services/pod_logs/elasticsearch_service_spec.rb
@@ -243,6 +243,7 @@ RSpec.describe ::PodLogs::ElasticsearchService do
cursor: cursor
}
end
+
let(:expected_cursor) { '9999934,1572449784442' }
before do
diff --git a/spec/services/preview_markdown_service_spec.rb b/spec/services/preview_markdown_service_spec.rb
index 2509d1300b3..53f8f5b7253 100644
--- a/spec/services/preview_markdown_service_spec.rb
+++ b/spec/services/preview_markdown_service_spec.rb
@@ -25,12 +25,14 @@ RSpec.describe PreviewMarkdownService do
let(:merge_request) do
create(:merge_request, target_project: project, source_project: project)
end
+
let(:text) { "```suggestion\nfoo\n```" }
let(:params) do
suggestion_params.merge(text: text,
target_type: 'MergeRequest',
target_id: merge_request.iid)
end
+
let(:service) { described_class.new(project, user, params) }
context 'when preview markdown param is present' do
@@ -106,6 +108,7 @@ RSpec.describe PreviewMarkdownService do
target_id: issue.id
}
end
+
let(:service) { described_class.new(project, user, params) }
it 'removes quick actions from text' do
@@ -128,6 +131,7 @@ RSpec.describe PreviewMarkdownService do
target_type: 'MergeRequest'
}
end
+
let(:service) { described_class.new(project, user, params) }
it 'removes quick actions from text' do
@@ -153,6 +157,7 @@ RSpec.describe PreviewMarkdownService do
target_id: commit.id
}
end
+
let(:service) { described_class.new(project, user, params) }
it 'removes quick actions from text' do
diff --git a/spec/services/product_analytics/build_graph_service_spec.rb b/spec/services/product_analytics/build_graph_service_spec.rb
new file mode 100644
index 00000000000..933a2bfee92
--- /dev/null
+++ b/spec/services/product_analytics/build_graph_service_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ProductAnalytics::BuildGraphService do
+ let_it_be(:project) { create(:project) }
+
+ let_it_be(:events) do
+ [
+ create(:product_analytics_event, project: project, platform: 'web'),
+ create(:product_analytics_event, project: project, platform: 'web'),
+ create(:product_analytics_event, project: project, platform: 'app'),
+ create(:product_analytics_event, project: project, platform: 'mobile'),
+ create(:product_analytics_event, project: project, platform: 'mobile', collector_tstamp: Time.zone.now - 60.days)
+ ]
+ end
+
+ let(:params) { { graph: 'platform', timerange: 5 } }
+
+ subject { described_class.new(project, params).execute }
+
+ it 'returns a valid graph hash' do
+ expect(subject[:id]).to eq(:platform)
+ expect(subject[:keys]).to eq(%w(app mobile web))
+ expect(subject[:values]).to eq([1, 1, 2])
+ end
+end
diff --git a/spec/services/projects/alerting/notify_service_spec.rb b/spec/services/projects/alerting/notify_service_spec.rb
index 123b0bad2a8..3e74a15c3c0 100644
--- a/spec/services/projects/alerting/notify_service_spec.rb
+++ b/spec/services/projects/alerting/notify_service_spec.rb
@@ -30,21 +30,6 @@ RSpec.describe Projects::Alerting::NotifyService do
end
end
- shared_examples 'sends notification email' do
- let(:notification_service) { spy }
-
- it 'sends a notification for firing alerts only' do
- expect(NotificationService)
- .to receive(:new)
- .and_return(notification_service)
-
- expect(notification_service)
- .to receive_message_chain(:async, :prometheus_alerts_fired)
-
- expect(subject).to be_success
- end
- end
-
shared_examples 'does not process incident issues' do
it 'does not process issues' do
expect(IncidentManagement::ProcessAlertWorker)
@@ -81,6 +66,7 @@ RSpec.describe Projects::Alerting::NotifyService do
fingerprint: fingerprint
}.with_indifferent_access
end
+
let(:payload) { ActionController::Parameters.new(payload_raw).permit! }
subject { service.execute(token) }
@@ -234,7 +220,7 @@ RSpec.describe Projects::Alerting::NotifyService do
context 'with emails turned on' do
let(:email_enabled) { true }
- it_behaves_like 'sends notification email'
+ it_behaves_like 'Alert Notification Service sends notification email'
end
end
diff --git a/spec/services/projects/cleanup_service_spec.rb b/spec/services/projects/cleanup_service_spec.rb
index 528f31456a9..7c28b729e84 100644
--- a/spec/services/projects/cleanup_service_spec.rb
+++ b/spec/services/projects/cleanup_service_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Projects::CleanupService do
it 'runs garbage collection on the repository' do
expect_next_instance_of(GitGarbageCollectWorker) do |worker|
- expect(worker).to receive(:perform)
+ expect(worker).to receive(:perform).with(project.id, :gc, "project_cleanup:gc:#{project.id}")
end
service.execute
diff --git a/spec/services/projects/container_repository/delete_tags_service_spec.rb b/spec/services/projects/container_repository/delete_tags_service_spec.rb
index 3d065deefdf..3014ccbd7ba 100644
--- a/spec/services/projects/container_repository/delete_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/delete_tags_service_spec.rb
@@ -3,21 +3,15 @@
require 'spec_helper'
RSpec.describe Projects::ContainerRepository::DeleteTagsService do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :private) }
- let_it_be(:repository) { create(:container_repository, :root, project: project) }
+ include_context 'container repository delete tags service shared context'
- let(:params) { { tags: tags } }
let(:service) { described_class.new(project, user, params) }
- before do
- stub_container_registry_config(enabled: true,
- api_url: 'http://registry.gitlab',
- host_port: 'registry.gitlab')
-
- stub_container_registry_tags(
- repository: repository.path,
- tags: %w(latest A Ba Bb C D E))
+ let_it_be(:available_service_classes) do
+ [
+ ::Projects::ContainerRepository::Gitlab::DeleteTagsService,
+ ::Projects::ContainerRepository::ThirdParty::DeleteTagsService
+ ]
end
RSpec.shared_examples 'logging a success response' do
@@ -45,8 +39,54 @@ RSpec.describe Projects::ContainerRepository::DeleteTagsService do
end
end
+ RSpec.shared_examples 'calling the correct delete tags service' do |expected_service_class|
+ let(:service_response) { { status: :success, deleted: tags } }
+ let(:excluded_service_class) { available_service_classes.excluding(expected_service_class).first }
+
+ before do
+ service_double = double
+ expect(expected_service_class).to receive(:new).with(repository, tags).and_return(service_double)
+ expect(excluded_service_class).not_to receive(:new)
+ expect(service_double).to receive(:execute).and_return(service_response)
+ end
+
+ it { is_expected.to include(status: :success) }
+
+ it_behaves_like 'logging a success response'
+
+ context 'with an error service response' do
+ let(:service_response) { { status: :error, message: 'could not delete tags' } }
+
+ it { is_expected.to include(status: :error) }
+
+ it_behaves_like 'logging an error response'
+ end
+ end
+
+ RSpec.shared_examples 'handling invalid params' do
+ context 'with invalid params' do
+ before do
+ expect(::Projects::ContainerRepository::Gitlab::DeleteTagsService).not_to receive(:new)
+ expect(::Projects::ContainerRepository::ThirdParty::DeleteTagsService).not_to receive(:new)
+ expect_any_instance_of(ContainerRegistry::Client).not_to receive(:delete_repository_tag_by_name)
+ end
+
+ context 'when no params are specified' do
+ let_it_be(:params) { {} }
+
+ it { is_expected.to include(status: :error) }
+ end
+
+ context 'with empty tags' do
+ let_it_be(:tags) { [] }
+
+ it { is_expected.to include(status: :error) }
+ end
+ end
+ end
+
describe '#execute' do
- let(:tags) { %w[A] }
+ let(:tags) { %w[A Ba] }
subject { service.execute(repository) }
@@ -61,247 +101,58 @@ RSpec.describe Projects::ContainerRepository::DeleteTagsService do
context 'when the registry supports fast delete' do
context 'and the feature is enabled' do
- let_it_be(:project) { create(:project, :private) }
- let_it_be(:repository) { create(:container_repository, :root, project: project) }
-
before do
allow(repository.client).to receive(:supports_tag_delete?).and_return(true)
end
- context 'with tags to delete' do
- let_it_be(:tags) { %w[A Ba] }
-
- it 'deletes the tags by name' do
- stub_delete_reference_request('A')
- stub_delete_reference_request('Ba')
-
- expect_delete_tag_by_name('A')
- expect_delete_tag_by_name('Ba')
-
- is_expected.to include(status: :success)
- end
-
- it 'succeeds when tag delete returns 404' do
- stub_delete_reference_request('A')
- stub_delete_reference_request('Ba', 404)
-
- is_expected.to include(status: :success)
- end
-
- it_behaves_like 'logging a success response' do
- before do
- stub_delete_reference_request('A')
- stub_delete_reference_request('Ba')
- end
- end
-
- context 'with failures' do
- context 'when the delete request fails' do
- before do
- stub_delete_reference_request('A', 500)
- stub_delete_reference_request('Ba', 500)
- end
-
- it { is_expected.to include(status: :error) }
-
- it_behaves_like 'logging an error response'
- end
- end
- end
-
- context 'when no params are specified' do
- let_it_be(:params) { {} }
+ it_behaves_like 'calling the correct delete tags service', ::Projects::ContainerRepository::Gitlab::DeleteTagsService
- it 'does not remove anything' do
- expect_any_instance_of(ContainerRegistry::Client).not_to receive(:delete_repository_tag_by_name)
+ it_behaves_like 'handling invalid params'
- is_expected.to include(status: :error)
+ context 'with the real service' do
+ before do
+ stub_delete_reference_requests(tags)
+ expect_delete_tag_by_names(tags)
end
- end
- context 'with empty tags' do
- let_it_be(:tags) { [] }
+ it { is_expected.to include(status: :success) }
- it 'does not remove anything' do
- expect_any_instance_of(ContainerRegistry::Client).not_to receive(:delete_repository_tag_by_name)
-
- is_expected.to include(status: :error)
- end
+ it_behaves_like 'logging a success response'
end
end
context 'and the feature is disabled' do
- let_it_be(:tags) { %w[A Ba] }
-
before do
stub_feature_flags(container_registry_fast_tag_delete: false)
- stub_upload("{\n \"config\": {\n }\n}", 'sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
- stub_put_manifest_request('A')
- stub_put_manifest_request('Ba')
end
- it 'fallbacks to slow delete' do
- expect(service).not_to receive(:fast_delete)
- expect(service).to receive(:slow_delete).with(repository, tags).and_call_original
-
- expect_delete_tag_by_digest('sha256:dummy')
+ it_behaves_like 'calling the correct delete tags service', ::Projects::ContainerRepository::ThirdParty::DeleteTagsService
- subject
- end
+ it_behaves_like 'handling invalid params'
- it_behaves_like 'logging a success response' do
+ context 'with the real service' do
before do
- allow(service).to receive(:slow_delete).and_call_original
+ stub_upload('sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
+ tags.each { |tag| stub_put_manifest_request(tag) }
expect_delete_tag_by_digest('sha256:dummy')
end
+
+ it { is_expected.to include(status: :success) }
+
+ it_behaves_like 'logging a success response'
end
end
end
context 'when the registry does not support fast delete' do
- let_it_be(:project) { create(:project, :private) }
- let_it_be(:repository) { create(:container_repository, :root, project: project) }
-
before do
- stub_tag_digest('latest', 'sha256:configA')
- stub_tag_digest('A', 'sha256:configA')
- stub_tag_digest('Ba', 'sha256:configB')
-
allow(repository.client).to receive(:supports_tag_delete?).and_return(false)
end
- context 'when no params are specified' do
- let_it_be(:params) { {} }
-
- it 'does not remove anything' do
- expect_any_instance_of(ContainerRegistry::Client).not_to receive(:delete_repository_tag_by_digest)
+ it_behaves_like 'calling the correct delete tags service', ::Projects::ContainerRepository::ThirdParty::DeleteTagsService
- is_expected.to include(status: :error)
- end
- end
-
- context 'with empty tags' do
- let_it_be(:tags) { [] }
-
- it 'does not remove anything' do
- expect_any_instance_of(ContainerRegistry::Client).not_to receive(:delete_repository_tag_by_digest)
-
- is_expected.to include(status: :error)
- end
- end
-
- context 'with tags to delete' do
- let_it_be(:tags) { %w[A Ba] }
-
- it 'deletes the tags using a dummy image' do
- stub_upload("{\n \"config\": {\n }\n}", 'sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
-
- stub_put_manifest_request('A')
- stub_put_manifest_request('Ba')
-
- expect_delete_tag_by_digest('sha256:dummy')
-
- is_expected.to include(status: :success)
- end
-
- it 'succeeds when tag delete returns 404' do
- stub_upload("{\n \"config\": {\n }\n}", 'sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
-
- stub_put_manifest_request('A')
- stub_put_manifest_request('Ba')
-
- stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/manifests/sha256:dummy")
- .to_return(status: 404, body: "", headers: {})
-
- is_expected.to include(status: :success)
- end
-
- it_behaves_like 'logging a success response' do
- before do
- stub_upload("{\n \"config\": {\n }\n}", 'sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
- stub_put_manifest_request('A')
- stub_put_manifest_request('Ba')
- expect_delete_tag_by_digest('sha256:dummy')
- end
- end
-
- context 'with failures' do
- context 'when the dummy manifest generation fails' do
- before do
- stub_upload("{\n \"config\": {\n }\n}", 'sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3', success: false)
- end
-
- it { is_expected.to include(status: :error) }
-
- it_behaves_like 'logging an error response', message: 'could not generate manifest'
- end
-
- context 'when updating the tags fails' do
- before do
- stub_upload("{\n \"config\": {\n }\n}", 'sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
-
- stub_put_manifest_request('A', 500)
- stub_put_manifest_request('Ba', 500)
-
- stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/manifests/sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3")
- .to_return(status: 200, body: "", headers: {})
- end
-
- it { is_expected.to include(status: :error) }
- it_behaves_like 'logging an error response'
- end
- end
- end
+ it_behaves_like 'handling invalid params'
end
end
end
-
- private
-
- def stub_delete_reference_request(tag, status = 200)
- stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/tags/reference/#{tag}")
- .to_return(status: status, body: '')
- end
-
- def stub_put_manifest_request(tag, status = 200, headers = { 'docker-content-digest' => 'sha256:dummy' })
- stub_request(:put, "http://registry.gitlab/v2/#{repository.path}/manifests/#{tag}")
- .to_return(status: status, body: '', headers: headers)
- end
-
- def stub_tag_digest(tag, digest)
- stub_request(:head, "http://registry.gitlab/v2/#{repository.path}/manifests/#{tag}")
- .to_return(status: 200, body: "", headers: { 'docker-content-digest' => digest })
- end
-
- def stub_digest_config(digest, created_at)
- allow_any_instance_of(ContainerRegistry::Client)
- .to receive(:blob)
- .with(repository.path, digest, nil) do
- { 'created' => created_at.to_datetime.rfc3339 }.to_json if created_at
- end
- end
-
- def stub_upload(content, digest, success: true)
- expect_any_instance_of(ContainerRegistry::Client)
- .to receive(:upload_blob)
- .with(repository.path, content, digest) { double(success?: success ) }
- end
-
- def expect_delete_tag_by_digest(digest)
- expect_any_instance_of(ContainerRegistry::Client)
- .to receive(:delete_repository_tag_by_digest)
- .with(repository.path, digest) { true }
-
- expect_any_instance_of(ContainerRegistry::Client)
- .not_to receive(:delete_repository_tag_by_name)
- end
-
- def expect_delete_tag_by_name(name)
- expect_any_instance_of(ContainerRegistry::Client)
- .to receive(:delete_repository_tag_by_name)
- .with(repository.path, name) { true }
-
- expect_any_instance_of(ContainerRegistry::Client)
- .not_to receive(:delete_repository_tag_by_digest)
- end
end
diff --git a/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb b/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb
new file mode 100644
index 00000000000..68c232e5d83
--- /dev/null
+++ b/spec/services/projects/container_repository/gitlab/delete_tags_service_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::ContainerRepository::Gitlab::DeleteTagsService do
+ include_context 'container repository delete tags service shared context'
+
+ let(:service) { described_class.new(repository, tags) }
+
+ describe '#execute' do
+ let(:tags) { %w[A Ba] }
+
+ subject { service.execute }
+
+ context 'with tags to delete' do
+ it 'deletes the tags by name' do
+ stub_delete_reference_requests(tags)
+ expect_delete_tag_by_names(tags)
+
+ is_expected.to eq(status: :success, deleted: tags)
+ end
+
+ it 'succeeds when tag delete returns 404' do
+ stub_delete_reference_requests('A' => 200, 'Ba' => 404)
+
+ is_expected.to eq(status: :success, deleted: tags)
+ end
+
+ it 'succeeds when a tag delete returns 500' do
+ stub_delete_reference_requests('A' => 200, 'Ba' => 500)
+
+ is_expected.to eq(status: :success, deleted: ['A'])
+ end
+
+ context 'with failures' do
+ context 'when the delete request fails' do
+ before do
+ stub_delete_reference_requests('A' => 500, 'Ba' => 500)
+ end
+
+ it { is_expected.to eq(status: :error, message: 'could not delete tags') }
+ end
+ end
+ end
+
+ context 'with empty tags' do
+ let_it_be(:tags) { [] }
+
+ it 'does not remove anything' do
+ expect_any_instance_of(ContainerRegistry::Client).not_to receive(:delete_repository_tag_by_name)
+
+ is_expected.to eq(status: :success, deleted: [])
+ end
+ end
+ end
+end
diff --git a/spec/services/projects/container_repository/third_party/delete_tags_service_spec.rb b/spec/services/projects/container_repository/third_party/delete_tags_service_spec.rb
new file mode 100644
index 00000000000..7fc963949eb
--- /dev/null
+++ b/spec/services/projects/container_repository/third_party/delete_tags_service_spec.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::ContainerRepository::ThirdParty::DeleteTagsService do
+ include_context 'container repository delete tags service shared context'
+
+ let(:service) { described_class.new(repository, tags) }
+
+ describe '#execute' do
+ let(:tags) { %w[A Ba] }
+
+ subject { service.execute }
+
+ context 'with tags to delete' do
+ it 'deletes the tags by name' do
+ stub_upload('sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
+
+ tags.each { |tag| stub_put_manifest_request(tag) }
+
+ expect_delete_tag_by_digest('sha256:dummy')
+
+ is_expected.to eq(status: :success, deleted: tags)
+ end
+
+ it 'succeeds when tag delete returns 404' do
+ stub_upload('sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
+
+ stub_put_manifest_request('A')
+ stub_put_manifest_request('Ba')
+
+ stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/manifests/sha256:dummy")
+ .to_return(status: 404, body: '', headers: {})
+
+ is_expected.to eq(status: :success, deleted: tags)
+ end
+
+ context 'with failures' do
+ context 'when the dummy manifest generation fails' do
+ before do
+ stub_upload('sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3', success: false)
+ end
+
+ it { is_expected.to eq(status: :error, message: 'could not generate manifest') }
+ end
+
+ context 'when updating tags fails' do
+ before do
+ stub_upload('sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3')
+
+ stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/manifests/sha256:4435000728ee66e6a80e55637fc22725c256b61de344a2ecdeaac6bdb36e8bc3")
+ .to_return(status: 200, body: '', headers: {})
+ end
+
+ context 'all tag updates fail' do
+ before do
+ stub_put_manifest_request('A', 500, {})
+ stub_put_manifest_request('Ba', 500, {})
+ end
+
+ it { is_expected.to eq(status: :error, message: 'could not delete tags') }
+ end
+
+ context 'a single tag update fails' do
+ before do
+ stub_put_manifest_request('A')
+ stub_put_manifest_request('Ba', 500, {})
+
+ stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/manifests/sha256:dummy")
+ .to_return(status: 404, body: '', headers: {})
+ end
+
+ it { is_expected.to eq(status: :success, deleted: ['A']) }
+ end
+ end
+ end
+ end
+
+ context 'with empty tags' do
+ let_it_be(:tags) { [] }
+
+ it 'does not remove anything' do
+ expect_any_instance_of(ContainerRegistry::Client).not_to receive(:delete_repository_tag_by_name)
+
+ is_expected.to eq(status: :success, deleted: [])
+ end
+ end
+ end
+end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index 9eb7cacbbcb..e1df8700795 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -48,6 +48,12 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(project.project_setting).to be_new_record
end
+
+ it_behaves_like 'storing arguments in the application context' do
+ let(:expected_params) { { project: subject.full_path, related_class: described_class.to_s } }
+
+ subject { create_project(user, opts) }
+ end
end
context "admin creates project with other user's namespace_id" do
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index c49aa42b147..925c2ff5d88 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -105,6 +105,7 @@ RSpec.describe Projects::ForkService do
group.add_owner(@to_user)
group
end
+
let(:to_project) { fork_project(from_forked_project, @to_user, namespace: other_namespace) }
it 'sets the root of the network to the root project' do
@@ -439,37 +440,71 @@ RSpec.describe Projects::ForkService do
end
describe '#valid_fork_target?' do
- subject { described_class.new(project, user, params).valid_fork_target? }
-
let(:project) { Project.new }
let(:params) { {} }
- context 'when current user is an admin' do
- let(:user) { build(:user, :admin) }
+ context 'when target is not passed' do
+ subject { described_class.new(project, user, params).valid_fork_target? }
- it { is_expected.to be_truthy }
- end
+ context 'when current user is an admin' do
+ let(:user) { build(:user, :admin) }
- context 'when current_user is not an admin' do
- let(:user) { create(:user) }
+ it { is_expected.to be_truthy }
+ end
- let(:finder_mock) { instance_double('ForkTargetsFinder', execute: [user.namespace]) }
- let(:project) { create(:project) }
+ context 'when current_user is not an admin' do
+ let(:user) { create(:user) }
- before do
- allow(ForkTargetsFinder).to receive(:new).with(project, user).and_return(finder_mock)
+ let(:finder_mock) { instance_double('ForkTargetsFinder', execute: [user.namespace]) }
+ let(:project) { create(:project) }
+
+ before do
+ allow(ForkTargetsFinder).to receive(:new).with(project, user).and_return(finder_mock)
+ end
+
+ context 'when target namespace is in valid fork targets' do
+ let(:params) { { namespace: user.namespace } }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when target namespace is not in valid fork targets' do
+ let(:params) { { namespace: create(:group) } }
+
+ it { is_expected.to be_falsey }
+ end
end
+ end
+
+ context 'when target is passed' do
+ let(:target) { create(:group) }
- context 'when target namespace is in valid fork targets' do
- let(:params) { { namespace: user.namespace } }
+ subject { described_class.new(project, user, params).valid_fork_target?(target) }
+
+ context 'when current user is an admin' do
+ let(:user) { build(:user, :admin) }
it { is_expected.to be_truthy }
end
- context 'when target namespace is not in valid fork targets' do
- let(:params) { { namespace: create(:group) } }
+ context 'when current user is not an admin' do
+ let(:user) { create(:user) }
- it { is_expected.to be_falsey }
+ before do
+ allow(ForkTargetsFinder).to receive(:new).with(project, user).and_return(finder_mock)
+ end
+
+ context 'when target namespace is in valid fork targets' do
+ let(:finder_mock) { instance_double('ForkTargetsFinder', execute: [target]) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when target namespace is not in valid fork targets' do
+ let(:finder_mock) { instance_double('ForkTargetsFinder', execute: [create(:group)]) }
+
+ it { is_expected.to be_falsey }
+ end
end
end
end
diff --git a/spec/services/projects/operations/update_service_spec.rb b/spec/services/projects/operations/update_service_spec.rb
index 3cfc9844d65..8a538bc67ed 100644
--- a/spec/services/projects/operations/update_service_spec.rb
+++ b/spec/services/projects/operations/update_service_spec.rb
@@ -384,6 +384,7 @@ RSpec.describe Projects::Operations::UpdateService do
manual_configuration: "0"
})
end
+
let(:params) do
{
prometheus_integration_attributes: {
diff --git a/spec/services/projects/prometheus/alerts/notify_service_spec.rb b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
index aae257e3e3a..efe8e8b9243 100644
--- a/spec/services/projects/prometheus/alerts/notify_service_spec.rb
+++ b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
@@ -21,38 +21,6 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
project.clear_memoization(:licensed_feature_available)
end
- shared_examples 'sends notification email' do
- let(:notification_service) { spy }
-
- it 'sends a notification for firing alerts only' do
- expect(NotificationService)
- .to receive(:new)
- .and_return(notification_service)
-
- expect(notification_service)
- .to receive_message_chain(:async, :prometheus_alerts_fired)
-
- expect(subject).to be_success
- end
- end
-
- shared_examples 'notifies alerts' do
- it_behaves_like 'sends notification email'
- end
-
- shared_examples 'no notifications' do |http_status:|
- let(:notification_service) { spy }
- let(:create_events_service) { spy }
-
- it 'does not notify' do
- expect(notification_service).not_to receive(:async)
- expect(create_events_service).not_to receive(:execute)
-
- expect(subject).to be_error
- expect(subject.http_status).to eq(http_status)
- end
- end
-
context 'with valid payload' do
let_it_be(:alert_firing) { create(:prometheus_alert, project: project) }
let_it_be(:alert_resolved) { create(:prometheus_alert, project: project) }
@@ -89,11 +57,11 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
context 'without token' do
let(:token_input) { nil }
- it_behaves_like 'notifies alerts'
+ it_behaves_like 'Alert Notification Service sends notification email'
end
context 'with token' do
- it_behaves_like 'no notifications', http_status: :unauthorized
+ it_behaves_like 'Alert Notification Service sends no notifications', http_status: :unauthorized
end
end
@@ -125,9 +93,9 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
case result = params[:result]
when :success
- it_behaves_like 'notifies alerts'
+ it_behaves_like 'Alert Notification Service sends notification email'
when :failure
- it_behaves_like 'no notifications', http_status: :unauthorized
+ it_behaves_like 'Alert Notification Service sends no notifications', http_status: :unauthorized
else
raise "invalid result: #{result.inspect}"
end
@@ -137,7 +105,7 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
context 'without project specific cluster' do
let!(:cluster) { create(:cluster, enabled: true) }
- it_behaves_like 'no notifications', http_status: :unauthorized
+ it_behaves_like 'Alert Notification Service sends no notifications', http_status: :unauthorized
end
context 'with manual prometheus installation' do
@@ -166,9 +134,9 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
case result = params[:result]
when :success
- it_behaves_like 'notifies alerts'
+ it_behaves_like 'Alert Notification Service sends notification email'
when :failure
- it_behaves_like 'no notifications', http_status: :unauthorized
+ it_behaves_like 'Alert Notification Service sends no notifications', http_status: :unauthorized
else
raise "invalid result: #{result.inspect}"
end
@@ -199,9 +167,9 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
case result = params[:result]
when :success
- it_behaves_like 'notifies alerts'
+ it_behaves_like 'Alert Notification Service sends notification email'
when :failure
- it_behaves_like 'no notifications', http_status: :unauthorized
+ it_behaves_like 'Alert Notification Service sends no notifications', http_status: :unauthorized
else
raise "invalid result: #{result.inspect}"
end
@@ -226,7 +194,7 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
end
context 'when incident_management_setting.send_email is true' do
- it_behaves_like 'notifies alerts'
+ it_behaves_like 'Alert Notification Service sends notification email'
end
context 'incident_management_setting.send_email is false' do
@@ -278,7 +246,7 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
.and_return(false)
end
- it_behaves_like 'no notifications', http_status: :unprocessable_entity
+ it_behaves_like 'Alert Notification Service sends no notifications', http_status: :unprocessable_entity
end
context 'when the payload is too big' do
@@ -289,7 +257,7 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
allow(Gitlab::Utils::DeepSize).to receive(:new).and_return(deep_size_object)
end
- it_behaves_like 'no notifications', http_status: :bad_request
+ it_behaves_like 'Alert Notification Service sends no notifications', http_status: :bad_request
it 'does not process Prometheus alerts' do
expect(AlertManagement::ProcessPrometheusAlertService)
diff --git a/spec/services/projects/propagate_service_template_spec.rb b/spec/services/projects/propagate_service_template_spec.rb
index 266bf2cc213..df69e5a29fb 100644
--- a/spec/services/projects/propagate_service_template_spec.rb
+++ b/spec/services/projects/propagate_service_template_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Projects::PropagateServiceTemplate do
end
let!(:project) { create(:project) }
- let(:excluded_attributes) { %w[id project_id template created_at updated_at title description] }
+ let(:excluded_attributes) { %w[id project_id template created_at updated_at default] }
it 'creates services for projects' do
expect(project.pushover_service).to be_nil
@@ -120,7 +120,7 @@ RSpec.describe Projects::PropagateServiceTemplate do
describe 'external tracker' do
it 'updates the project external tracker' do
- service_template.update!(category: 'issue_tracker', default: false)
+ service_template.update!(category: 'issue_tracker')
expect { described_class.propagate(service_template) }
.to change { project.reload.has_external_issue_tracker }.to(true)
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index 72426a6f6ec..3362b333c6e 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -11,6 +11,39 @@ RSpec.describe Projects::TransferService do
subject(:execute_transfer) { described_class.new(project, user).execute(group) }
+ context 'with npm packages' do
+ before do
+ group.add_owner(user)
+ end
+
+ subject(:transfer_service) { described_class.new(project, user) }
+
+ let!(:package) { create(:npm_package, project: project) }
+
+ context 'with a root namespace change' do
+ it 'does not allow the transfer' do
+ expect(transfer_service.execute(group)).to be false
+ expect(project.errors[:new_namespace]).to include("Root namespace can't be updated if project has NPM packages")
+ end
+ end
+
+ context 'without a root namespace change' do
+ let(:root) { create(:group) }
+ let(:group) { create(:group, parent: root) }
+ let(:other_group) { create(:group, parent: root) }
+ let(:project) { create(:project, :repository, namespace: group) }
+
+ before do
+ other_group.add_owner(user)
+ end
+
+ it 'does allow the transfer' do
+ expect(transfer_service.execute(other_group)).to be true
+ expect(project.errors[:new_namespace]).to be_empty
+ end
+ end
+ end
+
context 'namespace -> namespace' do
before do
allow_next_instance_of(Gitlab::UploadsTransfer) do |service|
diff --git a/spec/services/projects/update_pages_configuration_service_spec.rb b/spec/services/projects/update_pages_configuration_service_spec.rb
index c4c9fc779fa..9f7ebd40df6 100644
--- a/spec/services/projects/update_pages_configuration_service_spec.rb
+++ b/spec/services/projects/update_pages_configuration_service_spec.rb
@@ -3,41 +3,75 @@
require 'spec_helper'
RSpec.describe Projects::UpdatePagesConfigurationService do
- let(:project) { create(:project) }
let(:service) { described_class.new(project) }
- describe "#update" do
- let(:file) { Tempfile.new('pages-test') }
-
+ describe "#execute" do
subject { service.execute }
- after do
- file.close
- file.unlink
- end
+ context 'when pages are deployed' do
+ let_it_be(:project) do
+ create(:project).tap(&:mark_pages_as_deployed)
+ end
- before do
- allow(service).to receive(:pages_config_file).and_return(file.path)
- end
+ let(:file) { Tempfile.new('pages-test') }
+
+ before do
+ allow(service).to receive(:pages_config_file).and_return(file.path)
+ end
+
+ after do
+ file.close
+ file.unlink
+ end
+
+ context 'when configuration changes' do
+ it 'updates the config and reloads the daemon' do
+ allow(service).to receive(:update_file).and_call_original
- context 'when configuration changes' do
- it 'updates the .update file' do
- expect(service).to receive(:reload_daemon).and_call_original
+ expect(service).to receive(:update_file).with(file.path, an_instance_of(String))
+ .and_call_original
+ expect(service).to receive(:reload_daemon).and_call_original
- expect(subject).to include(status: :success, reload: true)
+ expect(subject).to include(status: :success)
+ end
+ end
+
+ context 'when configuration does not change' do
+ before do
+ # we set the configuration
+ service.execute
+ end
+
+ it 'does not update the .update file' do
+ expect(service).not_to receive(:reload_daemon)
+
+ expect(subject).to include(status: :success)
+ end
+ end
+
+ context 'when an error occurs' do
+ it 'returns an error object' do
+ e = StandardError.new("Failure")
+ allow(service).to receive(:reload_daemon).and_raise(e)
+
+ expect(subject).to eq(status: :error, message: "Failure", exception: e)
+ end
end
end
- context 'when configuration does not change' do
- before do
- # we set the configuration
- service.execute
+ context 'when pages are not deployed' do
+ let_it_be(:project) do
+ create(:project).tap(&:mark_pages_as_not_deployed)
+ end
+
+ it 'returns successfully' do
+ expect(subject).to eq(status: :success)
end
- it 'does not update the .update file' do
- expect(service).not_to receive(:reload_daemon)
+ it 'does not update the config' do
+ expect(service).not_to receive(:update_file)
- expect(subject).to include(status: :success, reload: false)
+ subject
end
end
end
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index 2e02cb56668..374ce4f4ce2 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe Projects::UpdatePagesService do
# Check that all expected files are extracted
%w[index.html zero .hidden/file].each do |filename|
- expect(File.exist?(File.join(project.public_pages_path, filename))).to be_truthy
+ expect(File.exist?(File.join(project.pages_path, 'public', filename))).to be_truthy
end
end
@@ -65,15 +65,17 @@ RSpec.describe Projects::UpdatePagesService do
it 'removes pages after destroy' do
expect(PagesWorker).to receive(:perform_in)
expect(project.pages_deployed?).to be_falsey
+ expect(Dir.exist?(File.join(project.pages_path))).to be_falsey
expect(execute).to eq(:success)
expect(project.pages_metadatum).to be_deployed
expect(project.pages_deployed?).to be_truthy
+ expect(Dir.exist?(File.join(project.pages_path))).to be_truthy
project.destroy
- expect(project.pages_deployed?).to be_falsey
+ expect(Dir.exist?(File.join(project.pages_path))).to be_falsey
expect(ProjectPagesMetadatum.find_by_project_id(project)).to be_nil
end
@@ -160,19 +162,9 @@ RSpec.describe Projects::UpdatePagesService do
end
context 'with background jobs running', :sidekiq_inline do
- where(:ci_atomic_processing) do
- [true, false]
- end
-
- with_them do
- before do
- stub_feature_flags(ci_atomic_processing: ci_atomic_processing)
- end
-
- it 'succeeds' do
- expect(project.pages_deployed?).to be_falsey
- expect(execute).to eq(:success)
- end
+ it 'succeeds' do
+ expect(project.pages_deployed?).to be_falsey
+ expect(execute).to eq(:success)
end
end
end
diff --git a/spec/services/projects/update_remote_mirror_service_spec.rb b/spec/services/projects/update_remote_mirror_service_spec.rb
index f0a8074f46c..09244db8010 100644
--- a/spec/services/projects/update_remote_mirror_service_spec.rb
+++ b/spec/services/projects/update_remote_mirror_service_spec.rb
@@ -10,10 +10,6 @@ RSpec.describe Projects::UpdateRemoteMirrorService do
subject(:service) { described_class.new(project, project.creator) }
- before do
- stub_feature_flags(gitaly_ruby_remote_branches_ls_remote: false)
- end
-
describe '#execute' do
subject(:execute!) { service.execute(remote_mirror, 0) }
@@ -26,17 +22,14 @@ RSpec.describe Projects::UpdateRemoteMirrorService do
end
it 'ensures the remote exists' do
- stub_fetch_remote(project, remote_name: remote_name, ssh_auth: remote_mirror)
-
expect(remote_mirror).to receive(:ensure_remote!)
execute!
end
- it 'fetches the remote repository' do
- expect(project.repository)
- .to receive(:fetch_remote)
- .with(remote_mirror.remote_name, no_tags: true, ssh_auth: remote_mirror)
+ it 'does not fetch the remote repository' do
+ # See https://gitlab.com/gitlab-org/gitaly/-/issues/2670
+ expect(project.repository).not_to receive(:fetch_remote)
execute!
end
@@ -48,8 +41,6 @@ RSpec.describe Projects::UpdateRemoteMirrorService do
end
it 'marks the mirror as successfully finished' do
- stub_fetch_remote(project, remote_name: remote_name, ssh_auth: remote_mirror)
-
result = execute!
expect(result[:status]).to eq(:success)
@@ -57,7 +48,7 @@ RSpec.describe Projects::UpdateRemoteMirrorService do
end
it 'marks the mirror as failed and raises the error when an unexpected error occurs' do
- allow(project.repository).to receive(:fetch_remote).and_raise('Badly broken')
+ allow(remote_mirror).to receive(:update_repository).and_raise('Badly broken')
expect { execute! }.to raise_error(/Badly broken/)
@@ -67,33 +58,30 @@ RSpec.describe Projects::UpdateRemoteMirrorService do
context 'when the update fails because of a `Gitlab::Git::CommandError`' do
before do
- allow(project.repository).to receive(:fetch_remote).and_raise(Gitlab::Git::CommandError.new('fetch failed'))
+ allow(remote_mirror).to receive(:update_repository)
+ .and_raise(Gitlab::Git::CommandError.new('update failed'))
end
it 'wraps `Gitlab::Git::CommandError`s in a service error' do
- expect(execute!).to eq(status: :error, message: 'fetch failed')
+ expect(execute!).to eq(status: :error, message: 'update failed')
end
it 'marks the mirror as to be retried' do
execute!
expect(remote_mirror).to be_to_retry
- expect(remote_mirror.last_error).to include('fetch failed')
+ expect(remote_mirror.last_error).to include('update failed')
end
it "marks the mirror as failed after #{described_class::MAX_TRIES} tries" do
service.execute(remote_mirror, described_class::MAX_TRIES)
expect(remote_mirror).to be_failed
- expect(remote_mirror.last_error).to include('fetch failed')
+ expect(remote_mirror.last_error).to include('update failed')
end
end
context 'when there are divergent refs' do
- before do
- stub_fetch_remote(project, remote_name: remote_name, ssh_auth: remote_mirror)
- end
-
it 'marks the mirror as failed and sets an error message' do
response = double(divergent_refs: %w[refs/heads/master refs/heads/develop])
expect(remote_mirror).to receive(:update_repository).and_return(response)
@@ -106,37 +94,5 @@ RSpec.describe Projects::UpdateRemoteMirrorService do
expect(remote_mirror.last_error).to include("refs/heads/develop")
end
end
-
- # https://gitlab.com/gitlab-org/gitaly/-/issues/2670
- context 'when `gitaly_ruby_remote_branches_ls_remote` is enabled' do
- before do
- stub_feature_flags(gitaly_ruby_remote_branches_ls_remote: true)
- end
-
- it 'does not perform a fetch' do
- expect(project.repository).not_to receive(:fetch_remote)
-
- execute!
- end
- end
- end
-
- def stub_fetch_remote(project, remote_name:, ssh_auth:)
- allow(project.repository)
- .to receive(:fetch_remote)
- .with(remote_name, no_tags: true, ssh_auth: ssh_auth) { fetch_remote(project.repository, remote_name) }
- end
-
- def fetch_remote(repository, remote_name)
- local_branch_names(repository).each do |branch|
- commit = repository.commit(branch)
- repository.write_ref("refs/remotes/#{remote_name}/#{branch}", commit.id) if commit
- end
- end
-
- def local_branch_names(repository)
- branch_names = repository.branches.map(&:name)
- # we want the protected branch to be pushed first
- branch_names.unshift(branch_names.delete('master'))
end
end
diff --git a/spec/services/projects/update_repository_storage_service_spec.rb b/spec/services/projects/update_repository_storage_service_spec.rb
index 57e02c26b71..0fcd14f3bc9 100644
--- a/spec/services/projects/update_repository_storage_service_spec.rb
+++ b/spec/services/projects/update_repository_storage_service_spec.rb
@@ -21,6 +21,7 @@ RSpec.describe Projects::UpdateRepositoryStorageService do
let(:repository_storage_move) { create(:project_repository_storage_move, :scheduled, project: project, destination_storage_name: destination) }
let!(:checksum) { project.repository.checksum }
let(:project_repository_double) { double(:repository) }
+ let(:original_project_repository_double) { double(:repository) }
before do
allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original
@@ -29,6 +30,9 @@ RSpec.describe Projects::UpdateRepositoryStorageService do
allow(Gitlab::Git::Repository).to receive(:new)
.with('test_second_storage', project.repository.raw.relative_path, project.repository.gl_repository, project.repository.full_path)
.and_return(project_repository_double)
+ allow(Gitlab::Git::Repository).to receive(:new)
+ .with('default', project.repository.raw.relative_path, nil, nil)
+ .and_return(original_project_repository_double)
end
context 'when the move succeeds' do
@@ -41,8 +45,7 @@ RSpec.describe Projects::UpdateRepositoryStorageService do
.with(project.repository.raw)
expect(project_repository_double).to receive(:checksum)
.and_return(checksum)
- expect(GitlabShellWorker).to receive(:perform_async).with(:mv_repository, 'default', anything, anything)
- .and_call_original
+ expect(original_project_repository_double).to receive(:remove)
result = subject.execute
project.reload
@@ -74,13 +77,29 @@ RSpec.describe Projects::UpdateRepositoryStorageService do
expect(project_repository_double).to receive(:replicate)
.with(project.repository.raw)
.and_raise(Gitlab::Git::CommandError)
- expect(GitlabShellWorker).not_to receive(:perform_async)
result = subject.execute
expect(result).to be_error
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('default')
+ expect(repository_storage_move).to be_failed
+ end
+ end
+
+ context 'when the cleanup fails' do
+ it 'sets the correct state' do
+ expect(project_repository_double).to receive(:replicate)
+ .with(project.repository.raw)
+ expect(project_repository_double).to receive(:checksum)
+ .and_return(checksum)
+ expect(original_project_repository_double).to receive(:remove)
+ .and_raise(Gitlab::Git::CommandError)
+
+ result = subject.execute
+
+ expect(result).to be_error
+ expect(repository_storage_move).to be_cleanup_failed
end
end
@@ -93,7 +112,6 @@ RSpec.describe Projects::UpdateRepositoryStorageService do
.with(project.repository.raw)
expect(project_repository_double).to receive(:checksum)
.and_return('not matching checksum')
- expect(GitlabShellWorker).not_to receive(:perform_async)
result = subject.execute
@@ -114,6 +132,7 @@ RSpec.describe Projects::UpdateRepositoryStorageService do
.with(project.repository.raw)
expect(project_repository_double).to receive(:checksum)
.and_return(checksum)
+ expect(original_project_repository_double).to receive(:remove)
result = subject.execute
project.reload
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index 6620ee6e697..4a613f42556 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -396,6 +396,50 @@ RSpec.describe Projects::UpdateService do
end
end
+ shared_examples 'updating pages configuration' do
+ it 'schedules the `PagesUpdateConfigurationWorker` when pages are deployed' do
+ project.mark_pages_as_deployed
+
+ expect(PagesUpdateConfigurationWorker).to receive(:perform_async).with(project.id)
+
+ subject
+ end
+
+ it "does not schedule a job when pages aren't deployed" do
+ project.mark_pages_as_not_deployed
+
+ expect(PagesUpdateConfigurationWorker).not_to receive(:perform_async).with(project.id)
+
+ subject
+ end
+
+ context 'when `async_update_pages_config` is disabled' do
+ before do
+ stub_feature_flags(async_update_pages_config: false)
+ end
+
+ it 'calls Projects::UpdatePagesConfigurationService when pages are deployed' do
+ project.mark_pages_as_deployed
+
+ expect(Projects::UpdatePagesConfigurationService)
+ .to receive(:new)
+ .with(project)
+ .and_call_original
+
+ subject
+ end
+
+ it "does not update pages config when pages aren't deployed" do
+ project.mark_pages_as_not_deployed
+
+ expect(Projects::UpdatePagesConfigurationService)
+ .not_to receive(:new)
+
+ subject
+ end
+ end
+ end
+
context 'when updating #pages_https_only', :https_pages_enabled do
subject(:call_service) do
update_project(project, admin, pages_https_only: false)
@@ -407,14 +451,7 @@ RSpec.describe Projects::UpdateService do
.to(false)
end
- it 'calls Projects::UpdatePagesConfigurationService' do
- expect(Projects::UpdatePagesConfigurationService)
- .to receive(:new)
- .with(project)
- .and_call_original
-
- call_service
- end
+ it_behaves_like 'updating pages configuration'
end
context 'when updating #pages_access_level' do
@@ -428,14 +465,7 @@ RSpec.describe Projects::UpdateService do
.to(ProjectFeature::ENABLED)
end
- it 'calls Projects::UpdatePagesConfigurationService' do
- expect(Projects::UpdatePagesConfigurationService)
- .to receive(:new)
- .with(project)
- .and_call_original
-
- call_service
- end
+ it_behaves_like 'updating pages configuration'
end
context 'when updating #emails_disabled' do
diff --git a/spec/services/releases/create_service_spec.rb b/spec/services/releases/create_service_spec.rb
index 3c0698aa203..ad4696b0074 100644
--- a/spec/services/releases/create_service_spec.rb
+++ b/spec/services/releases/create_service_spec.rb
@@ -198,6 +198,7 @@ RSpec.describe Releases::CreateService do
released_at: released_at
}.compact
end
+
let(:last_release) { project.releases.last }
around do |example|
diff --git a/spec/services/resource_access_tokens/create_service_spec.rb b/spec/services/resource_access_tokens/create_service_spec.rb
index f22c379cd30..7dbd55a6909 100644
--- a/spec/services/resource_access_tokens/create_service_spec.rb
+++ b/spec/services/resource_access_tokens/create_service_spec.rb
@@ -34,6 +34,16 @@ RSpec.describe ResourceAccessTokens::CreateService do
end
end
+ shared_examples 'fails on gitlab.com' do
+ before do
+ allow(Gitlab).to receive(:com?) { true }
+ end
+
+ it 'returns nil' do
+ expect(subject).to be nil
+ end
+ end
+
shared_examples 'allows creation of bot with valid params' do
it { expect { subject }.to change { User.count }.by(1) }
@@ -171,6 +181,7 @@ RSpec.describe ResourceAccessTokens::CreateService do
it_behaves_like 'fails when user does not have the permission to create a Resource Bot'
it_behaves_like 'fails when flag is disabled'
+ it_behaves_like 'fails on gitlab.com'
context 'user with valid permission' do
before_all do
diff --git a/spec/services/resource_events/change_milestone_service_spec.rb b/spec/services/resource_events/change_milestone_service_spec.rb
index 9c0f9420f7a..3a9dadbd40e 100644
--- a/spec/services/resource_events/change_milestone_service_spec.rb
+++ b/spec/services/resource_events/change_milestone_service_spec.rb
@@ -3,9 +3,15 @@
require 'spec_helper'
RSpec.describe ResourceEvents::ChangeMilestoneService do
+ let_it_be(:timebox) { create(:milestone) }
+
+ let(:created_at_time) { Time.utc(2019, 12, 30) }
+ let(:add_timebox_args) { { created_at: created_at_time, old_milestone: nil } }
+ let(:remove_timebox_args) { { created_at: created_at_time, old_milestone: timebox } }
+
[:issue, :merge_request].each do |issuable|
- it_behaves_like 'a milestone events creator' do
- let(:resource) { create(issuable) }
+ it_behaves_like 'timebox(milestone or iteration) resource events creator', ResourceMilestoneEvent do
+ let_it_be(:resource) { create(issuable) }
end
end
end
diff --git a/spec/services/search_service_spec.rb b/spec/services/search_service_spec.rb
index 52aef73ac77..f6bb7acee57 100644
--- a/spec/services/search_service_spec.rb
+++ b/spec/services/search_service_spec.rb
@@ -374,6 +374,19 @@ RSpec.describe SearchService do
subject(:result) { search_service.search_objects }
+ shared_examples "redaction limits N+1 queries" do |limit:|
+ it 'does not exceed the query limit' do
+ # issuing the query to remove the data loading call
+ unredacted_results.to_a
+
+ # only the calls from the redaction are left
+ query = ActiveRecord::QueryRecorder.new { result }
+
+ # these are the project authorization calls, which are not preloaded
+ expect(query.count).to be <= limit
+ end
+ end
+
def found_blob(project)
Gitlab::Search::FoundBlob.new(project: project)
end
@@ -427,6 +440,12 @@ RSpec.describe SearchService do
it 'redacts the inaccessible merge request' do
expect(result).to contain_exactly(readable)
end
+
+ context 'with :with_api_entity_associations' do
+ let(:unredacted_results) { ar_relation(MergeRequest.with_api_entity_associations, readable, unreadable) }
+
+ it_behaves_like "redaction limits N+1 queries", limit: 7
+ end
end
context 'project repository blobs' do
@@ -460,6 +479,10 @@ RSpec.describe SearchService do
it 'redacts the inaccessible snippet' do
expect(result).to contain_exactly(readable)
end
+
+ context 'with :with_api_entity_associations' do
+ it_behaves_like "redaction limits N+1 queries", limit: 12
+ end
end
context 'personal snippets' do
@@ -471,6 +494,10 @@ RSpec.describe SearchService do
it 'redacts the inaccessible snippet' do
expect(result).to contain_exactly(readable)
end
+
+ context 'with :with_api_entity_associations' do
+ it_behaves_like "redaction limits N+1 queries", limit: 3
+ end
end
context 'commits' do
diff --git a/spec/services/service_desk_settings/update_service_spec.rb b/spec/services/service_desk_settings/update_service_spec.rb
index 8b920d536b4..fbef587365d 100644
--- a/spec/services/service_desk_settings/update_service_spec.rb
+++ b/spec/services/service_desk_settings/update_service_spec.rb
@@ -31,6 +31,17 @@ RSpec.describe ServiceDeskSettings::UpdateService do
end
end
+ context 'when project_key is an empty string' do
+ let(:params) { { project_key: '' } }
+
+ it 'sets nil project_key' do
+ result = described_class.new(settings.project, user, params).execute
+
+ expect(result[:status]).to eq :success
+ expect(settings.reload.project_key).to be_nil
+ end
+ end
+
context 'with invalid params' do
let(:params) { { outgoing_name: 'x' * 256 } }
diff --git a/spec/services/snippets/create_service_spec.rb b/spec/services/snippets/create_service_spec.rb
index 62eef00b67f..2106a9c2045 100644
--- a/spec/services/snippets/create_service_spec.rb
+++ b/spec/services/snippets/create_service_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe Snippets::CreateService do
visibility_level: Gitlab::VisibilityLevel::PRIVATE
}
end
+
let(:extra_opts) { {} }
let(:creator) { admin }
diff --git a/spec/services/snippets/update_service_spec.rb b/spec/services/snippets/update_service_spec.rb
index 66dddcc49de..638fe1948fd 100644
--- a/spec/services/snippets/update_service_spec.rb
+++ b/spec/services/snippets/update_service_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe Snippets::UpdateService do
visibility_level: visibility_level
}
end
+
let(:extra_opts) { {} }
let(:options) { base_opts.merge(extra_opts) }
let(:updater) { user }
diff --git a/spec/services/spam/spam_action_service_spec.rb b/spec/services/spam/spam_action_service_spec.rb
index abb8e49ec52..8edd9406bce 100644
--- a/spec/services/spam/spam_action_service_spec.rb
+++ b/spec/services/spam/spam_action_service_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe Spam::SpamActionService do
'HTTP_USER_AGENT' => fake_user_agent,
'HTTP_REFERRER' => fake_referrer }
end
+
let(:request) { double(:request, env: env) }
let_it_be(:project) { create(:project, :public) }
diff --git a/spec/services/spam/spam_verdict_service_spec.rb b/spec/services/spam/spam_verdict_service_spec.rb
index d775e1bdfb5..14b788e3a86 100644
--- a/spec/services/spam/spam_verdict_service_spec.rb
+++ b/spec/services/spam/spam_verdict_service_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe Spam::SpamVerdictService do
'HTTP_USER_AGENT' => fake_user_agent,
'HTTP_REFERRER' => fake_referrer }
end
+
let(:request) { double(:request, env: env) }
let(:check_for_spam) { true }
diff --git a/spec/services/submit_usage_ping_service_spec.rb b/spec/services/submit_usage_ping_service_spec.rb
index 4885ef99c13..450af68d383 100644
--- a/spec/services/submit_usage_ping_service_spec.rb
+++ b/spec/services/submit_usage_ping_service_spec.rb
@@ -49,17 +49,22 @@ RSpec.describe SubmitUsagePingService do
let(:with_conv_index_params) { { conv_index: score_params[:score] } }
let(:without_dev_ops_score_params) { { dev_ops_score: {} } }
- context 'when usage ping is disabled' do
- before do
- stub_application_setting(usage_ping_enabled: false)
- end
+ shared_examples 'does not run' do
+ it do
+ expect(Gitlab::HTTP).not_to receive(:post)
+ expect(Gitlab::UsageData).not_to receive(:data)
- it 'does not run' do
- expect(HTTParty).not_to receive(:post)
+ subject.execute
+ end
+ end
- result = subject.execute
+ shared_examples 'does not send a blank usage ping payload' do
+ it do
+ expect(Gitlab::HTTP).not_to receive(:post)
- expect(result).to eq false
+ expect { subject.execute }.to raise_error(described_class::SubmissionError) do |error|
+ expect(error.message).to include('Usage data is blank')
+ end
end
end
@@ -75,33 +80,47 @@ RSpec.describe SubmitUsagePingService do
end
end
+ context 'when usage ping is disabled' do
+ before do
+ stub_application_setting(usage_ping_enabled: false)
+ end
+
+ it_behaves_like 'does not run'
+ end
+
context 'when usage ping is enabled' do
before do
stub_usage_data_connections
stub_application_setting(usage_ping_enabled: true)
end
+ context 'and user requires usage stats consent' do
+ before do
+ allow(User).to receive(:single_user).and_return(double(:user, requires_usage_stats_consent?: true))
+ end
+
+ it_behaves_like 'does not run'
+ end
+
it 'sends a POST request' do
- response = stub_response(without_dev_ops_score_params)
+ response = stub_response(body: without_dev_ops_score_params)
subject.execute
expect(response).to have_been_requested
end
- it 'refreshes usage data statistics before submitting' do
- stub_response(without_dev_ops_score_params)
+ it 'forces a refresh of usage data statistics before submitting' do
+ stub_response(body: without_dev_ops_score_params)
- expect(Gitlab::UsageData).to receive(:to_json)
- .with(force_refresh: true)
- .and_call_original
+ expect(Gitlab::UsageData).to receive(:data).with(force_refresh: true).and_call_original
subject.execute
end
context 'when conv_index data is passed' do
before do
- stub_response(with_conv_index_params)
+ stub_response(body: with_conv_index_params)
end
it_behaves_like 'saves DevOps score data from the response'
@@ -109,18 +128,84 @@ RSpec.describe SubmitUsagePingService do
context 'when DevOps score data is passed' do
before do
- stub_response(with_dev_ops_score_params)
+ stub_response(body: with_dev_ops_score_params)
end
it_behaves_like 'saves DevOps score data from the response'
end
+
+ context 'with save_raw_usage_data feature enabled' do
+ before do
+ stub_response(body: with_dev_ops_score_params)
+ stub_feature_flags(save_raw_usage_data: true)
+ end
+
+ it 'creates a raw_usage_data record' do
+ expect { subject.execute }.to change(RawUsageData, :count).by(1)
+ end
+
+ it 'saves the correct payload' do
+ recorded_at = Time.current
+ usage_data = { uuid: 'uuid', recorded_at: recorded_at }
+
+ expect(Gitlab::UsageData).to receive(:data).with(force_refresh: true).and_return(usage_data)
+
+ subject.execute
+
+ raw_usage_data = RawUsageData.find_by(recorded_at: recorded_at)
+
+ expect(raw_usage_data.recorded_at).to be_like_time(recorded_at)
+ expect(raw_usage_data.payload.to_json).to eq(usage_data.to_json)
+ end
+ end
+
+ context 'with save_raw_usage_data feature disabled' do
+ before do
+ stub_response(body: with_dev_ops_score_params)
+ end
+
+ it 'does not create a raw_usage_data record' do
+ stub_feature_flags(save_raw_usage_data: false)
+
+ expect { subject.execute }.to change(RawUsageData, :count).by(0)
+ end
+ end
+
+ context 'and usage ping response has unsuccessful status' do
+ before do
+ stub_response(body: nil, status: 504)
+ end
+
+ it 'raises an exception' do
+ expect { subject.execute }.to raise_error(described_class::SubmissionError) do |error|
+ expect(error.message).to include('Unsuccessful response code: 504')
+ end
+ end
+ end
+
+ context 'and usage data is empty string' do
+ before do
+ allow(Gitlab::UsageData).to receive(:data).and_return({})
+ end
+
+ it_behaves_like 'does not send a blank usage ping payload'
+ end
+
+ context 'and usage data is nil' do
+ before do
+ allow(Gitlab::UsageData).to receive(:data).and_return(nil)
+ end
+
+ it_behaves_like 'does not send a blank usage ping payload'
+ end
end
- def stub_response(body)
- stub_full_request('https://version.gitlab.com/usage_data', method: :post)
+ def stub_response(body:, status: 201)
+ stub_full_request(SubmitUsagePingService::STAGING_URL, method: :post)
.to_return(
headers: { 'Content-Type' => 'application/json' },
- body: body.to_json
+ body: body.to_json,
+ status: status
)
end
end
diff --git a/spec/services/suggestions/apply_service_spec.rb b/spec/services/suggestions/apply_service_spec.rb
index aa9caf35987..d8ade0fbbda 100644
--- a/spec/services/suggestions/apply_service_spec.rb
+++ b/spec/services/suggestions/apply_service_spec.rb
@@ -609,40 +609,16 @@ RSpec.describe Suggestions::ApplyService do
end
end
- context 'suggestion is eligible to be outdated' do
- it 'returns error message' do
- expect(suggestion).to receive(:outdated?) { true }
-
- result = apply_service.new(user, suggestion).execute
-
- expect(result).to eq(message: 'A suggestion is not applicable.',
- status: :error)
- end
- end
-
- context 'note is outdated' do
- before do
- allow(diff_note).to receive(:active?) { false }
- end
+ context 'suggestion is not appliable' do
+ let(:inapplicable_reason) { "Can't apply this suggestion." }
it 'returns error message' do
- result = apply_service.new(user, suggestion).execute
-
- expect(result).to eq(message: 'A suggestion is not applicable.',
- status: :error)
- end
- end
+ expect(suggestion).to receive(:appliable?).and_return(false)
+ expect(suggestion).to receive(:inapplicable_reason).and_return(inapplicable_reason)
- context 'suggestion was already applied' do
- before do
- suggestion.update!(applied: true, commit_id: 'sha')
- end
-
- it 'returns error message' do
result = apply_service.new(user, suggestion).execute
- expect(result).to eq(message: 'A suggestion is not applicable.',
- status: :error)
+ expect(result).to eq(message: inapplicable_reason, status: :error)
end
end
diff --git a/spec/services/suggestions/create_service_spec.rb b/spec/services/suggestions/create_service_spec.rb
index 54e7c5cc127..80823364fe8 100644
--- a/spec/services/suggestions/create_service_spec.rb
+++ b/spec/services/suggestions/create_service_spec.rb
@@ -159,6 +159,7 @@ RSpec.describe Suggestions::CreateService do
```
MARKDOWN
end
+
let(:position) { build_position(new_line: 13) }
it 'creates an appliable suggestion' do
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index 58fa772fefb..969e5955609 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -699,7 +699,7 @@ RSpec.describe SystemNoteService do
it 'calls AlertManagementService' do
expect_next_instance_of(SystemNotes::AlertManagementService) do |service|
- expect(service).to receive(:new_alert_issue).with(alert, alert.issue)
+ expect(service).to receive(:new_alert_issue).with(alert.issue)
end
described_class.new_alert_issue(alert, alert.issue, author)
diff --git a/spec/services/system_notes/alert_management_service_spec.rb b/spec/services/system_notes/alert_management_service_spec.rb
index 403763d5fd9..943d7f55af4 100644
--- a/spec/services/system_notes/alert_management_service_spec.rb
+++ b/spec/services/system_notes/alert_management_service_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe ::SystemNotes::AlertManagementService do
describe '#new_alert_issue' do
let_it_be(:issue) { noteable.issue }
- subject { described_class.new(noteable: noteable, project: project, author: author).new_alert_issue(noteable, issue) }
+ subject { described_class.new(noteable: noteable, project: project, author: author).new_alert_issue(issue) }
it_behaves_like 'a system note' do
let(:action) { 'alert_issue_added' }
@@ -32,4 +32,18 @@ RSpec.describe ::SystemNotes::AlertManagementService do
expect(subject.note).to eq("created issue #{issue.to_reference(project)} for this alert")
end
end
+
+ describe '#closed_alert_issue' do
+ let_it_be(:issue) { noteable.issue }
+
+ subject { described_class.new(noteable: noteable, project: project, author: author).closed_alert_issue(issue) }
+
+ it_behaves_like 'a system note' do
+ let(:action) { 'status' }
+ end
+
+ it 'has the appropriate message' do
+ expect(subject.note).to eq("changed the status to **Resolved** by closing issue #{issue.to_reference(project)}")
+ end
+ end
end
diff --git a/spec/services/system_notes/design_management_service_spec.rb b/spec/services/system_notes/design_management_service_spec.rb
index 6267ad2aaad..19e1f338eb8 100644
--- a/spec/services/system_notes/design_management_service_spec.rb
+++ b/spec/services/system_notes/design_management_service_spec.rb
@@ -134,6 +134,7 @@ RSpec.describe SystemNotes::DesignManagementService do
let(:discussion_note) do
create(:diff_note_on_design, noteable: design, author: author)
end
+
let(:action) { 'designs_discussion_added' }
it_behaves_like 'a system note' do
diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb
index b187025eb11..94d4b61933d 100644
--- a/spec/services/todo_service_spec.rb
+++ b/spec/services/todo_service_spec.rb
@@ -3,22 +3,23 @@
require 'spec_helper'
RSpec.describe TodoService do
- let(:author) { create(:user) }
- let(:assignee) { create(:user) }
- let(:non_member) { create(:user) }
- let(:member) { create(:user) }
- let(:guest) { create(:user) }
- let(:admin) { create(:admin) }
- let(:john_doe) { create(:user) }
- let(:skipped) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:author) { create(:user) }
+ let_it_be(:assignee) { create(:user) }
+ let_it_be(:non_member) { create(:user) }
+ let_it_be(:member) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:john_doe) { create(:user) }
+ let_it_be(:skipped) { create(:user) }
+
let(:skip_users) { [skipped] }
- let(:project) { create(:project, :repository) }
let(:mentions) { 'FYI: ' + [author, assignee, john_doe, member, guest, non_member, admin, skipped].map(&:to_reference).join(' ') }
let(:directly_addressed) { [author, assignee, john_doe, member, guest, non_member, admin, skipped].map(&:to_reference).join(' ') }
let(:directly_addressed_and_mentioned) { member.to_reference + ", what do you think? cc: " + [guest, admin, skipped].map(&:to_reference).join(' ') }
let(:service) { described_class.new }
- before do
+ before_all do
project.add_guest(guest)
project.add_developer(author)
project.add_developer(assignee)
@@ -58,6 +59,10 @@ RSpec.describe TodoService do
should_not_create_todo(user: guest, target: addressed_target_assigned, action: Todo::DIRECTLY_ADDRESSED)
end
+
+ it 'does not create a todo if already assigned' do
+ should_not_create_any_todo { service.send(described_method, target_assigned, author, [john_doe]) }
+ end
end
describe 'Issues' do
@@ -456,7 +461,16 @@ RSpec.describe TodoService do
end
context 'leaving a note on a commit in a public project with private code' do
- let(:project) { create(:project, :repository, :public, :repository_private) }
+ let_it_be(:project) { create(:project, :repository, :public, :repository_private) }
+
+ before_all do
+ project.add_guest(guest)
+ project.add_developer(author)
+ project.add_developer(assignee)
+ project.add_developer(member)
+ project.add_developer(john_doe)
+ project.add_developer(skipped)
+ end
it 'creates a todo for each valid mentioned user' do
expected_todo = base_commit_todo_attrs.merge(
@@ -492,7 +506,16 @@ RSpec.describe TodoService do
end
context 'leaving a note on a commit in a private project' do
- let(:project) { create(:project, :repository, :private) }
+ let_it_be(:project) { create(:project, :repository, :private) }
+
+ before_all do
+ project.add_guest(guest)
+ project.add_developer(author)
+ project.add_developer(assignee)
+ project.add_developer(member)
+ project.add_developer(john_doe)
+ project.add_developer(skipped)
+ end
it 'creates a todo for each valid mentioned user' do
expected_todo = base_commit_todo_attrs.merge(
@@ -554,10 +577,10 @@ RSpec.describe TodoService do
end
end
- describe '#reassigned_issuable' do
- let(:described_method) { :reassigned_issuable }
+ describe '#reassigned_assignable' do
+ let(:described_method) { :reassigned_assignable }
- context 'issuable is a merge request' do
+ context 'assignable is a merge request' do
it_behaves_like 'reassigned target' do
let(:target_assigned) { create(:merge_request, source_project: project, author: author, assignees: [john_doe], description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
let(:addressed_target_assigned) { create(:merge_request, source_project: project, author: author, assignees: [john_doe], description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
@@ -565,13 +588,21 @@ RSpec.describe TodoService do
end
end
- context 'issuable is an issue' do
+ context 'assignable is an issue' do
it_behaves_like 'reassigned target' do
let(:target_assigned) { create(:issue, project: project, author: author, assignees: [john_doe], description: "- [ ] Task 1\n- [ ] Task 2 #{mentions}") }
let(:addressed_target_assigned) { create(:issue, project: project, author: author, assignees: [john_doe], description: "#{directly_addressed}\n- [ ] Task 1\n- [ ] Task 2") }
let(:target_unassigned) { create(:issue, project: project, author: author, assignees: []) }
end
end
+
+ context 'assignable is an alert' do
+ it_behaves_like 'reassigned target' do
+ let(:target_assigned) { create(:alert_management_alert, project: project, assignees: [john_doe]) }
+ let(:addressed_target_assigned) { create(:alert_management_alert, project: project, assignees: [john_doe]) }
+ let(:target_unassigned) { create(:alert_management_alert, project: project, assignees: []) }
+ end
+ end
end
describe 'Merge Requests' do
@@ -759,16 +790,6 @@ RSpec.describe TodoService do
end
end
- describe '#assign_alert' do
- let(:described_method) { :assign_alert }
-
- it_behaves_like 'reassigned target' do
- let(:target_assigned) { create(:alert_management_alert, project: project, assignees: [john_doe]) }
- let(:addressed_target_assigned) { create(:alert_management_alert, project: project, assignees: [john_doe]) }
- let(:target_unassigned) { create(:alert_management_alert, project: project, assignees: []) }
- end
- end
-
describe '#merge_request_build_failed' do
let(:merge_participants) { [mr_unassigned.author, admin] }
@@ -822,7 +843,17 @@ RSpec.describe TodoService do
end
describe '#new_note' do
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
+
+ before_all do
+ project.add_guest(guest)
+ project.add_developer(author)
+ project.add_developer(assignee)
+ project.add_developer(member)
+ project.add_developer(john_doe)
+ project.add_developer(skipped)
+ end
+
let(:mention) { john_doe.to_reference }
let(:diff_note_on_merge_request) { create(:diff_note_on_merge_request, project: project, noteable: mr_unassigned, author: author, note: "Hey #{mention}") }
let(:addressed_diff_note_on_merge_request) { create(:diff_note_on_merge_request, project: project, noteable: mr_unassigned, author: author, note: "#{mention}, hey!") }
diff --git a/spec/services/users/create_service_spec.rb b/spec/services/users/create_service_spec.rb
index 69d2d6ca9ff..9040966215c 100644
--- a/spec/services/users/create_service_spec.rb
+++ b/spec/services/users/create_service_spec.rb
@@ -154,6 +154,7 @@ RSpec.describe Users::CreateService do
let(:params) do
{ name: 'John Doe', username: 'jduser', email: 'jd@example.com', password: 'mydummypass', skip_confirmation: true }
end
+
let(:service) { described_class.new(nil, params) }
it 'persists the given attributes' do
diff --git a/spec/services/users/refresh_authorized_projects_service_spec.rb b/spec/services/users/refresh_authorized_projects_service_spec.rb
index e45cb05a6c5..9404668e3c5 100644
--- a/spec/services/users/refresh_authorized_projects_service_spec.rb
+++ b/spec/services/users/refresh_authorized_projects_service_spec.rb
@@ -76,6 +76,26 @@ RSpec.describe Users::RefreshAuthorizedProjectsService do
service.execute_without_lease
end
+ it 'removes duplicate entries' do
+ [Gitlab::Access::MAINTAINER, Gitlab::Access::REPORTER].each do |access_level|
+ user.project_authorizations.create!(project: project, access_level: access_level)
+ end
+
+ expect(service).to(
+ receive(:update_authorizations)
+ .with([project.id], [[user.id, project.id, Gitlab::Access::MAINTAINER]])
+ .and_call_original)
+
+ service.execute_without_lease
+
+ expect(user.project_authorizations.count).to eq(1)
+ project_authorization = ProjectAuthorization.where(
+ project_id: project.id,
+ user_id: user.id,
+ access_level: Gitlab::Access::MAINTAINER)
+ expect(project_authorization).to exist
+ end
+
it 'sets the access level of a project to the highest available level' do
user.project_authorizations.delete_all
diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb
index 2be481c5b62..b7b81d33c3e 100644
--- a/spec/services/web_hook_service_spec.rb
+++ b/spec/services/web_hook_service_spec.rb
@@ -13,9 +13,11 @@ RSpec.describe WebHookService do
'X-Gitlab-Event' => 'Push Hook'
}
end
+
let(:data) do
{ before: 'oldrev', after: 'newrev', ref: 'ref' }
end
+
let(:service_instance) { described_class.new(project_hook, data, :push_hooks) }
describe '#initialize' do
@@ -128,6 +130,14 @@ RSpec.describe WebHookService do
end
end
+ context 'when request body size is too big' do
+ it 'does not perform the request' do
+ stub_const("#{described_class}::REQUEST_BODY_SIZE_LIMIT", 10.bytes)
+
+ expect(service_instance.execute).to eq({ status: :error, message: "Gitlab::Json::LimitedEncoder::LimitExceeded" })
+ end
+ end
+
it 'handles 200 status code' do
stub_full_request(project_hook.url, method: :post).to_return(status: 200, body: 'Success')
diff --git a/spec/services/wiki_pages/event_create_service_spec.rb b/spec/services/wiki_pages/event_create_service_spec.rb
index abf3bcb4c4d..974f2591763 100644
--- a/spec/services/wiki_pages/event_create_service_spec.rb
+++ b/spec/services/wiki_pages/event_create_service_spec.rb
@@ -12,7 +12,8 @@ RSpec.describe WikiPages::EventCreateService do
let_it_be(:page) { create(:wiki_page, project: project) }
let(:slug) { generate(:sluggified_title) }
let(:action) { :created }
- let(:response) { subject.execute(slug, page, action) }
+ let(:fingerprint) { page.sha }
+ let(:response) { subject.execute(slug, page, action, fingerprint) }
context 'the user is nil' do
subject { described_class.new(nil) }
diff --git a/spec/services/wikis/create_attachment_service_spec.rb b/spec/services/wikis/create_attachment_service_spec.rb
index 50cb9ac111c..22e34e1f373 100644
--- a/spec/services/wikis/create_attachment_service_spec.rb
+++ b/spec/services/wikis/create_attachment_service_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe Wikis::CreateAttachmentService do
file_content: 'Content of attachment'
}
end
+
let(:opts) { file_opts }
subject(:service) { described_class.new(container: container, current_user: user, params: opts) }
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index ed3211a9c87..68beef40c0b 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -107,7 +107,6 @@ RSpec.configure do |config|
config.include FixtureHelpers
config.include NonExistingRecordsHelpers
config.include GitlabRoutingHelper
- config.include StubFeatureFlags
config.include StubExperiments
config.include StubGitlabCalls
config.include StubGitlabData
@@ -140,6 +139,8 @@ RSpec.configure do |config|
config.include SidekiqMiddleware
config.include StubActionCableConnection, type: :channel
+ include StubFeatureFlags
+
if ENV['CI'] || ENV['RETRIES']
# This includes the first try, i.e. tests will be run 4 times before failing.
config.default_retry_count = ENV.fetch('RETRIES', 3).to_i + 1
@@ -158,6 +159,10 @@ RSpec.configure do |config|
# Reload all feature flags definitions
Feature.register_definitions
+
+ # Enable all features by default for testing
+ # Reset any changes in after hook.
+ stub_all_feature_flags
end
config.after(:all) do
@@ -176,9 +181,6 @@ RSpec.configure do |config|
config.before do |example|
if example.metadata.fetch(:stub_feature_flags, true)
- # Enable all features by default for testing
- stub_all_feature_flags
-
# The following can be removed when we remove the staged rollout strategy
# and we can just enable it using instance wide settings
# (ie. ApplicationSetting#auto_devops_enabled)
@@ -203,6 +205,8 @@ RSpec.configure do |config|
stub_feature_flags(file_identifier_hash: false)
allow(Gitlab::GitalyClient).to receive(:can_use_disk?).and_return(enable_rugged)
+ else
+ unstub_all_feature_flags
end
# Enable Marginalia feature for all specs in the test suite.
@@ -314,6 +318,9 @@ RSpec.configure do |config|
config.after do
Fog.unmock! if Fog.mock?
Gitlab::CurrentSettings.clear_in_memory_application_settings!
+
+ # Reset all feature flag stubs to default for testing
+ stub_all_feature_flags
end
config.before(:example, :mailer) do
diff --git a/spec/support/counter_attribute.rb b/spec/support/counter_attribute.rb
new file mode 100644
index 00000000000..ea71b25b4c0
--- /dev/null
+++ b/spec/support/counter_attribute.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+RSpec.configure do |config|
+ config.before(:each, :counter_attribute) do
+ stub_const('CounterAttributeModel', Class.new(ProjectStatistics))
+
+ CounterAttributeModel.class_eval do
+ include CounterAttribute
+
+ counter_attribute :build_artifacts_size
+ counter_attribute :commit_count
+ end
+ end
+end
diff --git a/spec/support/csv_response.rb b/spec/support/csv_response.rb
new file mode 100644
index 00000000000..9ed76dcdd4a
--- /dev/null
+++ b/spec/support/csv_response.rb
@@ -0,0 +1,5 @@
+# frozen_string_literal: true
+
+RSpec.configure do |config|
+ config.include_context 'CSV response', type: :controller
+end
diff --git a/spec/support/gitlab_stubs/gitlab_ci_for_sast.yml b/spec/support/gitlab_stubs/gitlab_ci_for_sast.yml
new file mode 100644
index 00000000000..4134660e4b9
--- /dev/null
+++ b/spec/support/gitlab_stubs/gitlab_ci_for_sast.yml
@@ -0,0 +1,13 @@
+include:
+ - template: SAST.gitlab-ci.yml
+
+variables:
+ SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/gitlab-org/security-products/analyzers2"
+ SAST_EXCLUDED_PATHS: "spec, executables"
+
+stages:
+ - our_custom_security_stage
+sast:
+ stage: our_custom_security_stage
+ variables:
+ SEARCH_MAX_DEPTH: 8
diff --git a/spec/support/helpers/bare_repo_operations.rb b/spec/support/helpers/bare_repo_operations.rb
index 099610f087d..98fa13db6c2 100644
--- a/spec/support/helpers/bare_repo_operations.rb
+++ b/spec/support/helpers/bare_repo_operations.rb
@@ -44,7 +44,7 @@ class BareRepoOperations
yield stdin if block_given?
end
- unless status.zero?
+ unless status == 0
if allow_failure
return []
else
diff --git a/spec/support/helpers/cycle_analytics_helpers.rb b/spec/support/helpers/cycle_analytics_helpers.rb
index d101b092e7d..f4343b8b783 100644
--- a/spec/support/helpers/cycle_analytics_helpers.rb
+++ b/spec/support/helpers/cycle_analytics_helpers.rb
@@ -37,7 +37,7 @@ module CycleAnalyticsHelpers
end
def create_cycle(user, project, issue, mr, milestone, pipeline)
- issue.update(milestone: milestone)
+ issue.update!(milestone: milestone)
pipeline.run
ci_build = create(:ci_build, pipeline: pipeline, status: :success, author: user)
diff --git a/spec/support/helpers/design_management_test_helpers.rb b/spec/support/helpers/design_management_test_helpers.rb
index 1daa92e8ad4..db217250b17 100644
--- a/spec/support/helpers/design_management_test_helpers.rb
+++ b/spec/support/helpers/design_management_test_helpers.rb
@@ -35,9 +35,9 @@ module DesignManagementTestHelpers
def act_on_designs(designs, &block)
issue = designs.first.issue
- version = build(:design_version, :empty, issue: issue).tap { |v| v.save(validate: false) }
+ version = build(:design_version, :empty, issue: issue).tap { |v| v.save!(validate: false) }
designs.each do |d|
- yield.create(design: d, version: version)
+ yield.create!(design: d, version: version)
end
version
end
diff --git a/spec/support/helpers/filtered_search_helpers.rb b/spec/support/helpers/filtered_search_helpers.rb
index 1847a8f8a06..d203ff60cc9 100644
--- a/spec/support/helpers/filtered_search_helpers.rb
+++ b/spec/support/helpers/filtered_search_helpers.rb
@@ -13,7 +13,7 @@ module FilteredSearchHelpers
search = "#{search_term} "
end
- filtered_search.set(search)
+ filtered_search.set(search, rapid: false)
if submit
# Wait for the lazy author/assignee tokens that
diff --git a/spec/support/helpers/http_basic_auth_helpers.rb b/spec/support/helpers/http_basic_auth_helpers.rb
index c0b24b3dfa4..bc34e073f9f 100644
--- a/spec/support/helpers/http_basic_auth_helpers.rb
+++ b/spec/support/helpers/http_basic_auth_helpers.rb
@@ -8,19 +8,22 @@ module HttpBasicAuthHelpers
end
def job_basic_auth_header(job)
- basic_auth_header(Ci::Build::CI_REGISTRY_USER, job.token)
+ basic_auth_header(::Gitlab::Auth::CI_JOB_USER, job.token)
end
def client_basic_auth_header(client)
basic_auth_header(client.uid, client.secret)
end
+ def build_auth_headers(value)
+ { 'HTTP_AUTHORIZATION' => value }
+ end
+
+ def build_token_auth_header(token)
+ build_auth_headers("Bearer #{token}")
+ end
+
def basic_auth_header(username, password)
- {
- 'HTTP_AUTHORIZATION' => ActionController::HttpAuthentication::Basic.encode_credentials(
- username,
- password
- )
- }
+ build_auth_headers(ActionController::HttpAuthentication::Basic.encode_credentials(username, password))
end
end
diff --git a/spec/support/helpers/jira_service_helper.rb b/spec/support/helpers/jira_service_helper.rb
index 9072c41fe66..4895bc3ba15 100644
--- a/spec/support/helpers/jira_service_helper.rb
+++ b/spec/support/helpers/jira_service_helper.rb
@@ -10,7 +10,7 @@ module JiraServiceHelper
password = 'my-secret-password'
jira_issue_transition_id = '1'
- jira_tracker.update(
+ jira_tracker.update!(
url: url, username: username, password: password,
jira_issue_transition_id: jira_issue_transition_id, active: true
)
diff --git a/spec/support/helpers/login_helpers.rb b/spec/support/helpers/login_helpers.rb
index 92f6d673255..1118cfcf7ac 100644
--- a/spec/support/helpers/login_helpers.rb
+++ b/spec/support/helpers/login_helpers.rb
@@ -40,7 +40,7 @@ module LoginHelpers
if user_or_role.is_a?(User)
user_or_role
else
- create(user_or_role)
+ create(user_or_role) # rubocop:disable Rails/SaveBang
end
gitlab_sign_in_with(user, **kwargs)
diff --git a/spec/support/helpers/memory_usage_helper.rb b/spec/support/helpers/memory_usage_helper.rb
index 984ea8cc571..aa7b3bae83a 100644
--- a/spec/support/helpers/memory_usage_helper.rb
+++ b/spec/support/helpers/memory_usage_helper.rb
@@ -21,7 +21,7 @@ module MemoryUsageHelper
def get_memory_usage
output, status = Gitlab::Popen.popen(%w(free -m))
- abort "`free -m` return code is #{status}: #{output}" unless status.zero?
+ abort "`free -m` return code is #{status}: #{output}" unless status == 0
result = output.split("\n")[1].split(" ")[1..-1]
attrs = %i(m_total m_used m_free m_shared m_buffers_cache m_available).freeze
diff --git a/spec/support/helpers/metrics_dashboard_helpers.rb b/spec/support/helpers/metrics_dashboard_helpers.rb
index b2dd8ead7dd..7168079fead 100644
--- a/spec/support/helpers/metrics_dashboard_helpers.rb
+++ b/spec/support/helpers/metrics_dashboard_helpers.rb
@@ -1,16 +1,22 @@
# frozen_string_literal: true
module MetricsDashboardHelpers
- def project_with_dashboard(dashboard_path, dashboard_yml = nil)
- dashboard_yml ||= fixture_file('lib/gitlab/metrics/dashboard/sample_dashboard.yml')
-
- create(:project, :custom_repo, files: { dashboard_path => dashboard_yml })
+ # @param dashboards [Hash<string, string>] - Should contain a hash where
+ # each key is the path to a dashboard in the repository and each value is
+ # the dashboard content.
+ # Ex: { '.gitlab/dashboards/dashboard1.yml' => fixture_file('lib/gitlab/metrics/dashboard/sample_dashboard.yml') }
+ def project_with_dashboards(dashboards, project_params = {})
+ create(:project, :custom_repo, **project_params, files: dashboards)
end
- def project_with_dashboard_namespace(dashboard_path, dashboard_yml = nil)
+ def project_with_dashboard(dashboard_path, dashboard_yml = nil, project_params = {})
dashboard_yml ||= fixture_file('lib/gitlab/metrics/dashboard/sample_dashboard.yml')
- create(:project, :custom_repo, namespace: namespace, path: 'monitor-project', files: { dashboard_path => dashboard_yml })
+ project_with_dashboards({ dashboard_path => dashboard_yml }, project_params)
+ end
+
+ def project_with_dashboard_namespace(dashboard_path, dashboard_yml = nil, project_params = {})
+ project_with_dashboard(dashboard_path, dashboard_yml, project_params.reverse_merge(path: 'monitor-project'))
end
def delete_project_dashboard(project, user, dashboard_path)
diff --git a/spec/support/helpers/metrics_dashboard_url_helpers.rb b/spec/support/helpers/metrics_dashboard_url_helpers.rb
index cb9f58753a3..58b3d1e4d1d 100644
--- a/spec/support/helpers/metrics_dashboard_url_helpers.rb
+++ b/spec/support/helpers/metrics_dashboard_url_helpers.rb
@@ -13,4 +13,14 @@ module MetricsDashboardUrlHelpers
Gitlab::Metrics::Dashboard::Url.clear_memoization(method_name)
end
end
+
+ def stub_gitlab_domain
+ allow_any_instance_of(Banzai::Filter::InlineEmbedsFilter)
+ .to receive(:gitlab_domain)
+ .and_return(urls.root_url.chomp('/'))
+
+ allow(Gitlab::Metrics::Dashboard::Url)
+ .to receive(:gitlab_domain)
+ .and_return(urls.root_url.chomp('/'))
+ end
end
diff --git a/spec/support/helpers/navbar_structure_helper.rb b/spec/support/helpers/navbar_structure_helper.rb
index cfb1b185560..c7aa2ffe536 100644
--- a/spec/support/helpers/navbar_structure_helper.rb
+++ b/spec/support/helpers/navbar_structure_helper.rb
@@ -18,4 +18,22 @@ module NavbarStructureHelper
index = hash[:nav_sub_items].find_index(before_sub_nav_item_name)
hash[:nav_sub_items].insert(index + 1, new_sub_nav_item_name)
end
+
+ def insert_package_nav(within)
+ insert_after_nav_item(
+ within,
+ new_nav_item: {
+ nav_item: _('Packages & Registries'),
+ nav_sub_items: [_('Package Registry')]
+ }
+ )
+ end
+
+ def insert_container_nav(within)
+ insert_after_sub_nav_item(
+ _('Package Registry'),
+ within: _('Packages & Registries'),
+ new_sub_nav_item_name: _('Container Registry')
+ )
+ end
end
diff --git a/spec/support/helpers/notification_helpers.rb b/spec/support/helpers/notification_helpers.rb
index 887d68de4e1..aee57b452fe 100644
--- a/spec/support/helpers/notification_helpers.rb
+++ b/spec/support/helpers/notification_helpers.rb
@@ -12,7 +12,7 @@ module NotificationHelpers
def create_global_setting_for(user, level)
setting = user.global_notification_setting
setting.level = level
- setting.save
+ setting.save!
user
end
@@ -27,7 +27,7 @@ module NotificationHelpers
def create_notification_setting(user, resource, level)
setting = user.notification_settings_for(resource)
setting.level = level
- setting.save
+ setting.save!
end
# Create custom notifications
diff --git a/spec/support/helpers/packages_manager_api_spec_helper.rb b/spec/support/helpers/packages_manager_api_spec_helper.rb
index e5a690e1680..34e92c0595c 100644
--- a/spec/support/helpers/packages_manager_api_spec_helper.rb
+++ b/spec/support/helpers/packages_manager_api_spec_helper.rb
@@ -1,18 +1,6 @@
# frozen_string_literal: true
module PackagesManagerApiSpecHelpers
- def build_auth_headers(value)
- { 'HTTP_AUTHORIZATION' => value }
- end
-
- def build_basic_auth_header(username, password)
- build_auth_headers(ActionController::HttpAuthentication::Basic.encode_credentials(username, password))
- end
-
- def build_token_auth_header(token)
- build_auth_headers("Bearer #{token}")
- end
-
def build_jwt(personal_access_token, secret: jwt_secret, user_id: nil)
JSONWebToken::HMACToken.new(secret).tap do |jwt|
jwt['access_token'] = personal_access_token.id
diff --git a/spec/support/helpers/require_migration.rb b/spec/support/helpers/require_migration.rb
new file mode 100644
index 00000000000..d3f192a4142
--- /dev/null
+++ b/spec/support/helpers/require_migration.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'find'
+
+class RequireMigration
+ MIGRATION_FOLDERS = %w(db/migrate db/post_migrate ee/db/geo/migrate ee/db/geo/post_migrate).freeze
+ SPEC_FILE_PATTERN = /.+\/(?<file_name>.+)_spec\.rb/.freeze
+
+ class << self
+ def require_migration!(file_name)
+ file_paths = search_migration_file(file_name)
+
+ require file_paths.first
+ end
+
+ def search_migration_file(file_name)
+ MIGRATION_FOLDERS.flat_map do |path|
+ migration_path = Rails.root.join(path).to_s
+
+ Find.find(migration_path).grep(/\d+_#{file_name}\.rb/)
+ end
+ end
+ end
+end
+
+def require_migration!(file_name = nil)
+ location_info = caller_locations.first.path.match(RequireMigration::SPEC_FILE_PATTERN)
+ file_name ||= location_info[:file_name]
+
+ RequireMigration.require_migration!(file_name)
+end
diff --git a/spec/support/helpers/stub_configuration.rb b/spec/support/helpers/stub_configuration.rb
index e19f230d8df..3b733a2e57a 100644
--- a/spec/support/helpers/stub_configuration.rb
+++ b/spec/support/helpers/stub_configuration.rb
@@ -33,8 +33,8 @@ module StubConfiguration
allow(Gitlab.config).to receive_messages(to_settings(messages))
end
- def stub_default_url_options(host: "localhost", protocol: "http")
- url_options = { host: host, protocol: protocol }
+ def stub_default_url_options(host: "localhost", protocol: "http", script_name: nil)
+ url_options = { host: host, protocol: protocol, script_name: script_name }
allow(Rails.application.routes).to receive(:default_url_options).and_return(url_options)
end
diff --git a/spec/support/helpers/stub_feature_flags.rb b/spec/support/helpers/stub_feature_flags.rb
index 696148cacaf..792a1c21c31 100644
--- a/spec/support/helpers/stub_feature_flags.rb
+++ b/spec/support/helpers/stub_feature_flags.rb
@@ -1,6 +1,11 @@
# frozen_string_literal: true
module StubFeatureFlags
+ def self.included(base)
+ # Extend Feature class with methods that can stub feature flags.
+ Feature.prepend(StubbedFeature)
+ end
+
class StubFeatureGate
attr_reader :flipper_id
@@ -9,28 +14,14 @@ module StubFeatureFlags
end
end
+ # Ensure feature flags are stubbed and reset.
def stub_all_feature_flags
- adapter = Flipper::Adapters::Memory.new
- flipper = Flipper.new(adapter)
-
- allow(Feature).to receive(:flipper).and_return(flipper)
-
- # All new requested flags are enabled by default
- allow(Feature).to receive(:enabled?).and_wrap_original do |m, *args|
- feature_flag = m.call(*args)
-
- # If feature flag is not persisted we mark the feature flag as enabled
- # We do `m.call` as we want to validate the execution of method arguments
- # and a feature flag state if it is not persisted
- unless Feature.persisted_name?(args.first)
- # TODO: this is hack to support `promo_feature_available?`
- # We enable all feature flags by default unless they are `promo_`
- # Issue: https://gitlab.com/gitlab-org/gitlab/-/issues/218667
- feature_flag = true unless args.first.to_s.start_with?('promo_')
- end
+ Feature.stub = true
+ Feature.reset_flipper
+ end
- feature_flag
- end
+ def unstub_all_feature_flags
+ Feature.stub = false
end
# Stub Feature flags with `flag_name: true/false`
diff --git a/spec/support/helpers/stub_object_storage.rb b/spec/support/helpers/stub_object_storage.rb
index 6056359d026..8a52a614821 100644
--- a/spec/support/helpers/stub_object_storage.rb
+++ b/spec/support/helpers/stub_object_storage.rb
@@ -1,13 +1,6 @@
# frozen_string_literal: true
module StubObjectStorage
- def stub_packages_object_storage(**params)
- stub_object_storage_uploader(config: ::Gitlab.config.packages.object_store,
- uploader: ::Packages::PackageFileUploader,
- remote_directory: 'packages',
- **params)
- end
-
def stub_dependency_proxy_object_storage(**params)
stub_object_storage_uploader(config: ::Gitlab.config.dependency_proxy.object_store,
uploader: ::DependencyProxy::FileUploader,
@@ -44,7 +37,7 @@ module StubObjectStorage
Fog.mock!
::Fog::Storage.new(connection_params).tap do |connection|
- connection.directories.create(key: remote_directory)
+ connection.directories.create(key: remote_directory) # rubocop:disable Rails/SaveBang
# Cleanup remaining files
connection.directories.each do |directory|
@@ -54,9 +47,9 @@ module StubObjectStorage
end
end
- def stub_artifacts_object_storage(**params)
+ def stub_artifacts_object_storage(uploader = JobArtifactUploader, **params)
stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store,
- uploader: JobArtifactUploader,
+ uploader: uploader,
remote_directory: 'artifacts',
**params)
end
diff --git a/spec/support/helpers/stubbed_feature.rb b/spec/support/helpers/stubbed_feature.rb
new file mode 100644
index 00000000000..e78efcf6b75
--- /dev/null
+++ b/spec/support/helpers/stubbed_feature.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+# Extend the Feature class with the ability to stub feature flags.
+module StubbedFeature
+ extend ActiveSupport::Concern
+
+ class_methods do
+ # Turn stubbed feature flags on or off.
+ def stub=(stub)
+ @stub = stub
+ end
+
+ def stub?
+ @stub.nil? ? true : @stub
+ end
+
+ # Wipe any previously set feature flags.
+ def reset_flipper
+ @flipper = nil
+ end
+
+ # Replace #flipper method with the optional stubbed/unstubbed version.
+ def flipper
+ if stub?
+ @flipper ||= Flipper.new(Flipper::Adapters::Memory.new)
+ else
+ super
+ end
+ end
+
+ # Replace #enabled? method with the optional stubbed/unstubbed version.
+ def enabled?(*args)
+ feature_flag = super(*args)
+ return feature_flag unless stub?
+
+ # If feature flag is not persisted we mark the feature flag as enabled
+ # We do `m.call` as we want to validate the execution of method arguments
+ # and a feature flag state if it is not persisted
+ unless Feature.persisted_name?(args.first)
+ # TODO: this is hack to support `promo_feature_available?`
+ # We enable all feature flags by default unless they are `promo_`
+ # Issue: https://gitlab.com/gitlab-org/gitlab/-/issues/218667
+ feature_flag = true unless args.first.to_s.start_with?('promo_')
+ end
+
+ feature_flag
+ end
+ end
+end
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index f787aedf7aa..7dae960410d 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -6,8 +6,6 @@ module TestEnv
ComponentFailedToInstallError = Class.new(StandardError)
- SHA_REGEX = /\A[0-9a-f]{5,40}\z/i.freeze
-
# When developing the seed repository, comment out the branch you will modify.
BRANCH_SHA = {
'signed-commits' => '6101e87',
@@ -78,7 +76,7 @@ module TestEnv
'png-lfs' => 'fe42f41',
'sha-starting-with-large-number' => '8426165',
'invalid-utf8-diff-paths' => '99e4853',
- 'compare-with-merge-head-source' => 'b5f4399',
+ 'compare-with-merge-head-source' => 'f20a03d',
'compare-with-merge-head-target' => '2f1e176'
}.freeze
@@ -524,7 +522,7 @@ module TestEnv
def component_matches_git_sha?(component_folder, expected_version)
# Not a git SHA, so return early
- return false unless expected_version =~ SHA_REGEX
+ return false unless expected_version =~ ::Gitlab::Git::COMMIT_ID
sha, exit_status = Gitlab::Popen.popen(%W(#{Gitlab.config.git.bin_path} rev-parse HEAD), component_folder)
return false if exit_status != 0
diff --git a/spec/support/helpers/trigger_helpers.rb b/spec/support/helpers/trigger_helpers.rb
index 67c62cf4869..dd6d8ff5bb5 100644
--- a/spec/support/helpers/trigger_helpers.rb
+++ b/spec/support/helpers/trigger_helpers.rb
@@ -28,8 +28,6 @@ module TriggerHelpers
expect(timing).to eq(expected_timing.to_s)
expect(events).to match_array(Array.wrap(expected_events))
- # TODO: Update CREATE TRIGGER syntax to use EXECUTE FUNCTION
- # https://gitlab.com/gitlab-org/gitlab/-/issues/227089
expect(definition).to match(%r{execute (?:procedure|function) #{fn_name}()})
end
diff --git a/spec/support/helpers/usage_data_helpers.rb b/spec/support/helpers/usage_data_helpers.rb
index a4f40a4af0a..fab775dd404 100644
--- a/spec/support/helpers/usage_data_helpers.rb
+++ b/spec/support/helpers/usage_data_helpers.rb
@@ -119,6 +119,7 @@ module UsageDataHelpers
projects_with_terraform_states
pages_domains
protected_branches
+ protected_branches_except_default
releases
remote_mirrors
snippets
diff --git a/spec/support/helpers/wait_for_requests.rb b/spec/support/helpers/wait_for_requests.rb
index 52d1c59ab03..2cfd47634ca 100644
--- a/spec/support/helpers/wait_for_requests.rb
+++ b/spec/support/helpers/wait_for_requests.rb
@@ -42,7 +42,7 @@ module WaitForRequests
private
def finished_all_rack_requests?
- Gitlab::Testing::RequestBlockerMiddleware.num_active_requests.zero?
+ Gitlab::Testing::RequestBlockerMiddleware.num_active_requests == 0
end
def finished_all_js_requests?
@@ -53,12 +53,12 @@ module WaitForRequests
end
def finished_all_axios_requests?
- Capybara.page.evaluate_script('window.pendingRequests || 0').zero?
+ Capybara.page.evaluate_script('window.pendingRequests || 0').zero? # rubocop:disable Style/NumericPredicate
end
def finished_all_ajax_requests?
return true if Capybara.page.evaluate_script('typeof jQuery === "undefined"')
- Capybara.page.evaluate_script('jQuery.active').zero?
+ Capybara.page.evaluate_script('jQuery.active').zero? # rubocop:disable Style/NumericPredicate
end
end
diff --git a/spec/support/matchers/exceed_query_limit.rb b/spec/support/matchers/exceed_query_limit.rb
index cc0abfa0dd6..04482d3bfb8 100644
--- a/spec/support/matchers/exceed_query_limit.rb
+++ b/spec/support/matchers/exceed_query_limit.rb
@@ -44,7 +44,7 @@ module ExceedQueryLimitHelpers
def log_message
if expected.is_a?(ActiveRecord::QueryRecorder)
counts = count_queries(strip_marginalia_annotations(expected.log))
- extra_queries = strip_marginalia_annotations(@recorder.log).reject { |query| counts[query] -= 1 unless counts[query].zero? }
+ extra_queries = strip_marginalia_annotations(@recorder.log).reject { |query| counts[query] -= 1 unless counts[query] == 0 }
extra_queries_display = count_queries(extra_queries).map { |query, count| "[#{count}] #{query}" }
(['Extra queries:'] + extra_queries_display).join("\n\n")
@@ -188,7 +188,7 @@ RSpec::Matchers.define :issue_same_number_of_queries_as do
def expected_count_message
or_fewer_msg = "or fewer" if @or_fewer
- threshold_msg = "(+/- #{threshold})" unless threshold.zero?
+ threshold_msg = "(+/- #{threshold})" unless threshold == 0
["#{expected_count}", or_fewer_msg, threshold_msg].compact.join(' ')
end
diff --git a/spec/support/migrations_helpers/track_untracked_uploads_helpers.rb b/spec/support/migrations_helpers/track_untracked_uploads_helpers.rb
deleted file mode 100644
index 656be3b6d4d..00000000000
--- a/spec/support/migrations_helpers/track_untracked_uploads_helpers.rb
+++ /dev/null
@@ -1,130 +0,0 @@
-# frozen_string_literal: true
-
-module MigrationsHelpers
- module TrackUntrackedUploadsHelpers
- PUBLIC_DIR = File.join(Rails.root, 'tmp', 'tests', 'public')
- UPLOADS_DIR = File.join(PUBLIC_DIR, 'uploads')
- SYSTEM_DIR = File.join(UPLOADS_DIR, '-', 'system')
- UPLOAD_FILENAME = 'image.png'.freeze
- FIXTURE_FILE_PATH = File.join(Rails.root, 'spec', 'fixtures', 'dk.png')
- FIXTURE_CHECKSUM = 'b804383982bb89b00e828e3f44c038cc991d3d1768009fc39ba8e2c081b9fb75'.freeze
-
- def create_or_update_appearance(logo: false, header_logo: false)
- appearance = appearances.first_or_create(title: 'foo', description: 'bar', logo: (UPLOAD_FILENAME if logo), header_logo: (UPLOAD_FILENAME if header_logo))
-
- add_upload(appearance, 'Appearance', 'logo', 'AttachmentUploader') if logo
- add_upload(appearance, 'Appearance', 'header_logo', 'AttachmentUploader') if header_logo
-
- appearance
- end
-
- def create_group(avatar: false)
- index = unique_index(:group)
- group = namespaces.create(name: "group#{index}", path: "group#{index}", avatar: (UPLOAD_FILENAME if avatar))
-
- add_upload(group, 'Group', 'avatar', 'AvatarUploader') if avatar
-
- group
- end
-
- def create_note(attachment: false)
- note = notes.create(attachment: (UPLOAD_FILENAME if attachment))
-
- add_upload(note, 'Note', 'attachment', 'AttachmentUploader') if attachment
-
- note
- end
-
- def create_project(avatar: false)
- group = create_group
- project = projects.create(namespace_id: group.id, path: "project#{unique_index(:project)}", avatar: (UPLOAD_FILENAME if avatar))
- routes.create(path: "#{group.path}/#{project.path}", source_id: project.id, source_type: 'Project') # so Project.find_by_full_path works
-
- add_upload(project, 'Project', 'avatar', 'AvatarUploader') if avatar
-
- project
- end
-
- def create_user(avatar: false)
- user = users.create(email: "foo#{unique_index(:user)}@bar.com", avatar: (UPLOAD_FILENAME if avatar), projects_limit: 100)
-
- add_upload(user, 'User', 'avatar', 'AvatarUploader') if avatar
-
- user
- end
-
- def unique_index(name = :unnamed)
- @unique_index ||= {}
- @unique_index[name] ||= 0
- @unique_index[name] += 1
- end
-
- def add_upload(model, model_type, attachment_type, uploader)
- file_path = upload_file_path(model, model_type, attachment_type)
- path_relative_to_public = file_path.sub("#{PUBLIC_DIR}/", '')
- create_file(file_path)
-
- uploads.create!(
- size: 1062,
- path: path_relative_to_public,
- model_id: model.id,
- model_type: model_type == 'Group' ? 'Namespace' : model_type,
- uploader: uploader,
- checksum: FIXTURE_CHECKSUM
- )
- end
-
- def add_markdown_attachment(project, hashed_storage: false)
- project_dir = hashed_storage ? hashed_project_uploads_dir(project) : legacy_project_uploads_dir(project)
- attachment_dir = File.join(project_dir, SecureRandom.hex)
- attachment_file_path = File.join(attachment_dir, UPLOAD_FILENAME)
- project_attachment_path_relative_to_project = attachment_file_path.sub("#{project_dir}/", '')
- create_file(attachment_file_path)
-
- uploads.create!(
- size: 1062,
- path: project_attachment_path_relative_to_project,
- model_id: project.id,
- model_type: 'Project',
- uploader: 'FileUploader',
- checksum: FIXTURE_CHECKSUM
- )
- end
-
- def legacy_project_uploads_dir(project)
- namespace = namespaces.find_by(id: project.namespace_id)
- File.join(UPLOADS_DIR, namespace.path, project.path)
- end
-
- def hashed_project_uploads_dir(project)
- File.join(UPLOADS_DIR, '@hashed', 'aa', 'aaaaaaaaaaaa')
- end
-
- def upload_file_path(model, model_type, attachment_type)
- dir = File.join(upload_dir(model_type.downcase, attachment_type.to_s), model.id.to_s)
- File.join(dir, UPLOAD_FILENAME)
- end
-
- def upload_dir(model_type, attachment_type)
- File.join(SYSTEM_DIR, model_type, attachment_type)
- end
-
- def create_file(path)
- File.delete(path) if File.exist?(path)
- FileUtils.mkdir_p(File.dirname(path))
- FileUtils.cp(FIXTURE_FILE_PATH, path)
- end
-
- def get_uploads(model, model_type)
- uploads.where(model_type: model_type, model_id: model.id)
- end
-
- def get_full_path(project)
- routes.find_by(source_id: project.id, source_type: 'Project').path
- end
-
- def ensure_temporary_tracking_table_exists
- Gitlab::BackgroundMigration::PrepareUntrackedUploads.new.send(:ensure_temporary_tracking_table_exists)
- end
- end
-end
diff --git a/spec/support/protected_branch_helpers.rb b/spec/support/protected_branch_helpers.rb
index ede16d1c1e2..b34b9ec4641 100644
--- a/spec/support/protected_branch_helpers.rb
+++ b/spec/support/protected_branch_helpers.rb
@@ -27,4 +27,9 @@ module ProtectedBranchHelpers
set_allowed_to('merge')
set_allowed_to('push')
end
+
+ def click_on_protect
+ click_on "Protect"
+ wait_for_requests
+ end
end
diff --git a/spec/support/shared_contexts/change_access_checks_shared_context.rb b/spec/support/shared_contexts/change_access_checks_shared_context.rb
index e1ab81b4e3d..4c55990c901 100644
--- a/spec/support/shared_contexts/change_access_checks_shared_context.rb
+++ b/spec/support/shared_contexts/change_access_checks_shared_context.rb
@@ -3,7 +3,7 @@
RSpec.shared_context 'change access checks context' do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
- let(:user_access) { Gitlab::UserAccess.new(user, project: project) }
+ let(:user_access) { Gitlab::UserAccess.new(user, container: project) }
let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
let(:ref) { 'refs/heads/master' }
diff --git a/spec/support/shared_contexts/csv_response_shared_context.rb b/spec/support/shared_contexts/csv_response_shared_context.rb
new file mode 100644
index 00000000000..af79e393a91
--- /dev/null
+++ b/spec/support/shared_contexts/csv_response_shared_context.rb
@@ -0,0 +1,5 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'CSV response' do
+ let(:csv_response) { CSV.parse(response.body) }
+end
diff --git a/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb b/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb
index 07b6b98222f..010c445d8df 100644
--- a/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb
+++ b/spec/support/shared_contexts/finders/merge_requests_finder_shared_contexts.rb
@@ -28,6 +28,7 @@ RSpec.shared_context 'MergeRequestsFinder multiple projects with merge requests
fork_project(project1, user)
end
end
+
let!(:project3) do
allow_gitaly_n_plus_1 do
fork_project(project1, user).tap do |project|
@@ -35,6 +36,7 @@ RSpec.shared_context 'MergeRequestsFinder multiple projects with merge requests
end
end
end
+
let_it_be(:project4, reload: true) do
allow_gitaly_n_plus_1 { create(:project, :repository, group: subgroup) }
end
@@ -53,22 +55,26 @@ RSpec.shared_context 'MergeRequestsFinder multiple projects with merge requests
source_project: project2, target_project: project1,
target_branch: 'merged-target')
end
+
let!(:merge_request2) do
create(:merge_request, :conflict, assignees: [user], author: user,
source_project: project2, target_project: project1,
state: 'closed')
end
+
let!(:merge_request3) do
create(:merge_request, :simple, author: user, assignees: [user2],
source_project: project2, target_project: project2,
state: 'locked',
title: 'thing WIP thing')
end
+
let!(:merge_request4) do
create(:merge_request, :simple, author: user,
source_project: project3, target_project: project3,
title: 'WIP thing')
end
+
let_it_be(:merge_request5) do
create(:merge_request, :simple, author: user,
source_project: project4, target_project: project4,
diff --git a/spec/support/shared_contexts/lib/gitlab/git_access_shared_examples.rb b/spec/support/shared_contexts/lib/gitlab/git_access_shared_examples.rb
new file mode 100644
index 00000000000..837c1c37aa3
--- /dev/null
+++ b/spec/support/shared_contexts/lib/gitlab/git_access_shared_examples.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'forbidden git access' do
+ let(:message) { /You can't/ }
+
+ it 'prevents access' do
+ expect { subject }.to raise_error(Gitlab::GitAccess::ForbiddenError, message)
+ end
+end
+
+RSpec.shared_examples 'not-found git access' do
+ let(:message) { /not found/ }
+
+ it 'prevents access' do
+ expect { subject }.to raise_error(Gitlab::GitAccess::NotFoundError, message)
+ end
+end
diff --git a/spec/support/shared_contexts/navbar_structure_context.rb b/spec/support/shared_contexts/navbar_structure_context.rb
index d9a72f2b54a..e276a54224b 100644
--- a/spec/support/shared_contexts/navbar_structure_context.rb
+++ b/spec/support/shared_contexts/navbar_structure_context.rb
@@ -7,6 +7,7 @@ RSpec.shared_context 'project navbar structure' do
nav_sub_items: [
_('CI / CD'),
(_('Code Review') if Gitlab.ee?),
+ (_('Merge Request') if Gitlab.ee?),
_('Repository'),
_('Value Stream')
]
@@ -64,8 +65,10 @@ RSpec.shared_context 'project navbar structure' do
nav_sub_items: [
_('Metrics'),
_('Alerts'),
+ _('Incidents'),
_('Environments'),
_('Error Tracking'),
+ _('Product Analytics'),
_('Serverless'),
_('Logs'),
_('Kubernetes')
diff --git a/spec/support/shared_contexts/policies/group_policy_shared_context.rb b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
index 4b0c7afab6d..af46e5474b0 100644
--- a/spec/support/shared_contexts/policies/group_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
@@ -17,6 +17,7 @@ RSpec.shared_context 'GroupPolicy context' do
read_group_merge_requests
]
end
+
let(:read_group_permissions) { %i[read_label read_list read_milestone read_board] }
let(:reporter_permissions) { %i[admin_label read_container_image read_metrics_dashboard_annotation read_prometheus] }
let(:developer_permissions) { %i[admin_milestone create_metrics_dashboard_annotation delete_metrics_dashboard_annotation update_metrics_dashboard_annotation] }
@@ -26,6 +27,7 @@ RSpec.shared_context 'GroupPolicy context' do
read_cluster create_cluster update_cluster admin_cluster add_cluster
]
end
+
let(:owner_permissions) do
[
:admin_group,
@@ -38,6 +40,7 @@ RSpec.shared_context 'GroupPolicy context' do
:update_default_branch_protection
].compact
end
+
let(:admin_permissions) { %i[read_confidential_issues] }
before_all do
diff --git a/spec/support/shared_contexts/prometheus/alert_shared_context.rb b/spec/support/shared_contexts/prometheus/alert_shared_context.rb
index 330d2c4515f..932ab899270 100644
--- a/spec/support/shared_contexts/prometheus/alert_shared_context.rb
+++ b/spec/support/shared_contexts/prometheus/alert_shared_context.rb
@@ -15,7 +15,7 @@ RSpec.shared_context 'self-managed prometheus alert attributes' do
{
panel_groups: [{
panels: [{
- type: 'line-graph',
+ type: 'area-chart',
title: title,
y_label: y_label,
metrics: [{ query_range: query }]
diff --git a/spec/support/shared_contexts/read_ci_configuration_shared_context.rb b/spec/support/shared_contexts/read_ci_configuration_shared_context.rb
new file mode 100644
index 00000000000..f8f33e2a745
--- /dev/null
+++ b/spec/support/shared_contexts/read_ci_configuration_shared_context.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'read ci configuration for sast enabled project' do
+ let_it_be(:gitlab_ci_yml_content) do
+ File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci_for_sast.yml'))
+ end
+
+ let_it_be(:project) { create(:project, :repository) }
+end
diff --git a/spec/support/shared_contexts/requests/api/graphql/jira_import/jira_projects_context.rb b/spec/support/shared_contexts/requests/api/graphql/jira_import/jira_projects_context.rb
index 7f150bed43d..edc5b313220 100644
--- a/spec/support/shared_contexts/requests/api/graphql/jira_import/jira_projects_context.rb
+++ b/spec/support/shared_contexts/requests/api/graphql/jira_import/jira_projects_context.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-RSpec.shared_context 'jira projects request context' do
+RSpec.shared_context 'Jira projects request context' do
let(:url) { 'https://jira.example.com' }
let(:username) { 'jira-username' }
let(:password) { 'jira-password' }
diff --git a/spec/support/shared_contexts/services/projects/container_repository/delete_tags_service_shared_context.rb b/spec/support/shared_contexts/services/projects/container_repository/delete_tags_service_shared_context.rb
new file mode 100644
index 00000000000..bcc98cf6416
--- /dev/null
+++ b/spec/support/shared_contexts/services/projects/container_repository/delete_tags_service_shared_context.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'container repository delete tags service shared context' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project, :private) }
+ let_it_be(:repository) { create(:container_repository, :root, project: project) }
+
+ let(:params) { { tags: tags } }
+
+ before do
+ stub_container_registry_config(enabled: true,
+ api_url: 'http://registry.gitlab',
+ host_port: 'registry.gitlab')
+
+ stub_container_registry_tags(
+ repository: repository.path,
+ tags: %w(latest A Ba Bb C D E))
+ end
+
+ def stub_delete_reference_request(tag, status = 200)
+ stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/tags/reference/#{tag}")
+ .to_return(status: status, body: '')
+ end
+
+ def stub_delete_reference_requests(tags)
+ tags = Hash[Array.wrap(tags).map { |tag| [tag, 200] }] unless tags.is_a?(Hash)
+
+ tags.each do |tag, status|
+ stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/tags/reference/#{tag}")
+ .to_return(status: status, body: '')
+ end
+ end
+
+ def stub_put_manifest_request(tag, status = 200, headers = { 'docker-content-digest' => 'sha256:dummy' })
+ stub_request(:put, "http://registry.gitlab/v2/#{repository.path}/manifests/#{tag}")
+ .to_return(status: status, body: '', headers: headers)
+ end
+
+ def stub_tag_digest(tag, digest)
+ stub_request(:head, "http://registry.gitlab/v2/#{repository.path}/manifests/#{tag}")
+ .to_return(status: 200, body: '', headers: { 'docker-content-digest' => digest })
+ end
+
+ def stub_digest_config(digest, created_at)
+ allow_any_instance_of(ContainerRegistry::Client)
+ .to receive(:blob)
+ .with(repository.path, digest, nil) do
+ { 'created' => created_at.to_datetime.rfc3339 }.to_json if created_at
+ end
+ end
+
+ def stub_upload(digest, success: true)
+ content = "{\n \"config\": {\n }\n}"
+ expect_any_instance_of(ContainerRegistry::Client)
+ .to receive(:upload_blob)
+ .with(repository.path, content, digest) { double(success?: success ) }
+ end
+
+ def expect_delete_tag_by_digest(digest)
+ expect_any_instance_of(ContainerRegistry::Client)
+ .to receive(:delete_repository_tag_by_digest)
+ .with(repository.path, digest) { true }
+
+ expect_any_instance_of(ContainerRegistry::Client)
+ .not_to receive(:delete_repository_tag_by_name)
+ end
+
+ def expect_delete_tag_by_names(names)
+ Array.wrap(names).each do |name|
+ expect_any_instance_of(ContainerRegistry::Client)
+ .to receive(:delete_repository_tag_by_name)
+ .with(repository.path, name) { true }
+
+ expect_any_instance_of(ContainerRegistry::Client)
+ .not_to receive(:delete_repository_tag_by_digest)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/alert_notification_service_shared_examples.rb b/spec/support/shared_examples/alert_notification_service_shared_examples.rb
new file mode 100644
index 00000000000..1568e4357a1
--- /dev/null
+++ b/spec/support/shared_examples/alert_notification_service_shared_examples.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'Alert Notification Service sends notification email' do
+ let(:notification_service) { spy }
+
+ it 'sends a notification for firing alerts only' do
+ expect(NotificationService)
+ .to receive(:new)
+ .and_return(notification_service)
+
+ expect(notification_service)
+ .to receive_message_chain(:async, :prometheus_alerts_fired)
+
+ expect(subject).to be_success
+ end
+end
+
+RSpec.shared_examples 'Alert Notification Service sends no notifications' do |http_status:|
+ let(:notification_service) { spy }
+ let(:create_events_service) { spy }
+
+ it 'does not notify' do
+ expect(notification_service).not_to receive(:async)
+ expect(create_events_service).not_to receive(:execute)
+
+ expect(subject).to be_error
+ expect(subject.http_status).to eq(http_status)
+ end
+end
diff --git a/spec/support/shared_examples/controllers/binary_blob_shared_examples.rb b/spec/support/shared_examples/controllers/binary_blob_shared_examples.rb
index c1ec515f1fe..acce7642cfe 100644
--- a/spec/support/shared_examples/controllers/binary_blob_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/binary_blob_shared_examples.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
RSpec.shared_examples 'editing snippet checks blob is binary' do
+ let(:snippets_binary_blob_value) { true }
+
before do
sign_in(user)
@@ -8,6 +10,8 @@ RSpec.shared_examples 'editing snippet checks blob is binary' do
allow(blob).to receive(:binary?).and_return(binary)
end
+ stub_feature_flags(snippets_binary_blob: snippets_binary_blob_value)
+
subject
end
@@ -23,13 +27,24 @@ RSpec.shared_examples 'editing snippet checks blob is binary' do
context 'when blob is binary' do
let(:binary) { true }
- it 'redirects away' do
- expect(response).to redirect_to(gitlab_snippet_path(snippet))
+ it 'responds with status 200' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:edit)
+ end
+
+ context 'when feature flag :snippets_binary_blob is disabled' do
+ let(:snippets_binary_blob_value) { false }
+
+ it 'redirects away' do
+ expect(response).to redirect_to(gitlab_snippet_path(snippet))
+ end
end
end
end
RSpec.shared_examples 'updating snippet checks blob is binary' do
+ let(:snippets_binary_blob_value) { true }
+
before do
sign_in(user)
@@ -37,6 +52,8 @@ RSpec.shared_examples 'updating snippet checks blob is binary' do
allow(blob).to receive(:binary?).and_return(binary)
end
+ stub_feature_flags(snippets_binary_blob: snippets_binary_blob_value)
+
subject
end
@@ -52,9 +69,18 @@ RSpec.shared_examples 'updating snippet checks blob is binary' do
context 'when blob is binary' do
let(:binary) { true }
- it 'redirects away without updating' do
+ it 'updates successfully' do
+ expect(snippet.reload.title).to eq title
expect(response).to redirect_to(gitlab_snippet_path(snippet))
- expect(snippet.reload.title).not_to eq title
+ end
+
+ context 'when feature flag :snippets_binary_blob is disabled' do
+ let(:snippets_binary_blob_value) { false }
+
+ it 'redirects away without updating' do
+ expect(response).to redirect_to(gitlab_snippet_path(snippet))
+ expect(snippet.reload.title).not_to eq title
+ end
end
end
end
diff --git a/spec/support/shared_examples/controllers/concerns/graceful_timeout_handling_shared_examples.rb b/spec/support/shared_examples/controllers/concerns/graceful_timeout_handling_shared_examples.rb
new file mode 100644
index 00000000000..ea002776eeb
--- /dev/null
+++ b/spec/support/shared_examples/controllers/concerns/graceful_timeout_handling_shared_examples.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples GracefulTimeoutHandling do
+ it 'includes GracefulTimeoutHandling' do
+ expect(controller).to be_a(GracefulTimeoutHandling)
+ end
+end
diff --git a/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb b/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
index a01fa49d701..8bc91f72b8c 100644
--- a/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
@@ -72,7 +72,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do
project = create(:project, import_type: provider, namespace: user.namespace, import_status: :finished, import_source: 'example/repo')
group = create(:group)
group.add_owner(user)
- stub_client(repos: [repo, org_repo], orgs: [org], org_repos: [org_repo])
+ stub_client(repos: [repo, org_repo], orgs: [org], org_repos: [org_repo], each_page: [OpenStruct.new(objects: [repo, org_repo])].to_enum)
get :status, format: :json
@@ -85,7 +85,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do
it "does not show already added project" do
project = create(:project, import_type: provider, namespace: user.namespace, import_status: :finished, import_source: 'asd/vim')
- stub_client(repos: [repo], orgs: [])
+ stub_client(repos: [repo], orgs: [], each_page: [OpenStruct.new(objects: [repo])].to_enum)
get :status, format: :json
@@ -94,7 +94,8 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do
end
it "touches the etag cache store" do
- expect(stub_client(repos: [], orgs: [])).to receive(:repos)
+ stub_client(repos: [], orgs: [], each_page: [])
+
expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
expect(store).to receive(:touch) { "realtime_changes_import_#{provider}_path" }
end
@@ -102,17 +103,11 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do
get :status, format: :json
end
- it "requests provider repos list" do
- expect(stub_client(repos: [], orgs: [])).to receive(:repos)
-
- get :status
-
- expect(response).to have_gitlab_http_status(:ok)
- end
-
it "handles an invalid access token" do
- allow_any_instance_of(Gitlab::LegacyGithubImport::Client)
- .to receive(:repos).and_raise(Octokit::Unauthorized)
+ client = stub_client(repos: [], orgs: [], each_page: [])
+
+ allow(client).to receive(:repos).and_raise(Octokit::Unauthorized)
+ allow(client).to receive(:each_page).and_raise(Octokit::Unauthorized)
get :status
@@ -122,7 +117,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do
end
it "does not produce N+1 database queries" do
- stub_client(repos: [repo], orgs: [])
+ stub_client(repos: [repo], orgs: [], each_page: [].to_enum)
group_a = create(:group)
group_a.add_owner(user)
create(:project, :import_started, import_type: provider, namespace: user.namespace)
@@ -144,10 +139,12 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do
let(:repo_2) { OpenStruct.new(login: 'emacs', full_name: 'asd/emacs', name: 'emacs', owner: { login: 'owner' }) }
let(:project) { create(:project, import_type: provider, namespace: user.namespace, import_status: :finished, import_source: 'example/repo') }
let(:group) { create(:group) }
+ let(:repos) { [repo, repo_2, org_repo] }
before do
group.add_owner(user)
- stub_client(repos: [repo, repo_2, org_repo], orgs: [org], org_repos: [org_repo])
+ client = stub_client(repos: repos, orgs: [org], org_repos: [org_repo])
+ allow(client).to receive(:each_page).and_return([OpenStruct.new(objects: repos)].to_enum)
end
it 'filters list of repositories by name' do
@@ -187,14 +184,14 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
end
before do
- stub_client(user: provider_user, repo: provider_repo)
+ stub_client(user: provider_user, repo: provider_repo, repository: provider_repo)
assign_session_token(provider)
end
it 'returns 200 response when the project is imported successfully' do
allow(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider)
- .and_return(double(execute: project))
+ .and_return(double(execute: project))
post :create, format: :json
@@ -208,7 +205,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
allow(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider)
- .and_return(double(execute: project))
+ .and_return(double(execute: project))
post :create, format: :json
@@ -219,7 +216,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
it "touches the etag cache store" do
allow(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider)
- .and_return(double(execute: project))
+ .and_return(double(execute: project))
expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
expect(store).to receive(:touch) { "realtime_changes_import_#{provider}_path" }
end
@@ -232,7 +229,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
it "takes the current user's namespace" do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider)
- .and_return(double(execute: project))
+ .and_return(double(execute: project))
post :create, format: :json
end
@@ -244,7 +241,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
it "takes the current user's namespace" do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider)
- .and_return(double(execute: project))
+ .and_return(double(execute: project))
post :create, format: :json
end
@@ -271,7 +268,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
it "takes the existing namespace" do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, existing_namespace, user, access_params, type: provider)
- .and_return(double(execute: project))
+ .and_return(double(execute: project))
post :create, format: :json
end
@@ -283,7 +280,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider)
- .and_return(double(execute: project))
+ .and_return(double(execute: project))
post :create, format: :json
end
@@ -302,7 +299,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
it "takes the new namespace" do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, an_instance_of(Group), user, access_params, type: provider)
- .and_return(double(execute: project))
+ .and_return(double(execute: project))
post :create, params: { target_namespace: provider_repo.name }, format: :json
end
@@ -323,7 +320,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
it "takes the current user's namespace" do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, provider_repo.name, user.namespace, user, access_params, type: provider)
- .and_return(double(execute: project))
+ .and_return(double(execute: project))
post :create, format: :json
end
@@ -341,7 +338,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
it 'takes the selected namespace and name' do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, test_namespace, user, access_params, type: provider)
- .and_return(double(execute: project))
+ .and_return(double(execute: project))
post :create, params: { target_namespace: test_namespace.name, new_name: test_name }, format: :json
end
@@ -349,7 +346,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
it 'takes the selected name and default namespace' do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, user.namespace, user, access_params, type: provider)
- .and_return(double(execute: project))
+ .and_return(double(execute: project))
post :create, params: { new_name: test_name }, format: :json
end
@@ -368,7 +365,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
it 'takes the selected namespace and name' do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, nested_namespace, user, access_params, type: provider)
- .and_return(double(execute: project))
+ .and_return(double(execute: project))
post :create, params: { target_namespace: nested_namespace.full_path, new_name: test_name }, format: :json
end
@@ -380,7 +377,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
it 'takes the selected namespace and name' do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider)
- .and_return(double(execute: project))
+ .and_return(double(execute: project))
post :create, params: { target_namespace: 'foo/bar', new_name: test_name }, format: :json
end
@@ -388,7 +385,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
it 'creates the namespaces' do
allow(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider)
- .and_return(double(execute: project))
+ .and_return(double(execute: project))
expect { post :create, params: { target_namespace: 'foo/bar', new_name: test_name }, format: :json }
.to change { Namespace.count }.by(2)
@@ -397,7 +394,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
it 'new namespace has the right parent' do
allow(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider)
- .and_return(double(execute: project))
+ .and_return(double(execute: project))
post :create, params: { target_namespace: 'foo/bar', new_name: test_name }, format: :json
@@ -416,7 +413,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
it 'takes the selected namespace and name' do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider)
- .and_return(double(execute: project))
+ .and_return(double(execute: project))
post :create, params: { target_namespace: 'foo/foobar/bar', new_name: test_name }, format: :json
end
@@ -424,7 +421,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
it 'creates the namespaces' do
allow(Gitlab::LegacyGithubImport::ProjectCreator)
.to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider)
- .and_return(double(execute: project))
+ .and_return(double(execute: project))
expect { post :create, params: { target_namespace: 'foo/foobar/bar', new_name: test_name }, format: :json }
.to change { Namespace.count }.by(2)
@@ -432,11 +429,11 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
it 'does not create a new namespace under the user namespace' do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
- .to receive(:new).with(provider_repo, test_name, user.namespace, user, access_params, type: provider)
- .and_return(double(execute: project))
+ .to receive(:new).with(provider_repo, test_name, user.namespace, user, access_params, type: provider)
+ .and_return(double(execute: project))
expect { post :create, params: { target_namespace: "#{user.namespace_path}/test_group", new_name: test_name }, format: :js }
- .not_to change { Namespace.count }
+ .not_to change { Namespace.count }
end
end
@@ -446,19 +443,19 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
it 'does not take the selected namespace and name' do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
- .to receive(:new).with(provider_repo, test_name, user.namespace, user, access_params, type: provider)
- .and_return(double(execute: project))
+ .to receive(:new).with(provider_repo, test_name, user.namespace, user, access_params, type: provider)
+ .and_return(double(execute: project))
post :create, params: { target_namespace: 'foo/foobar/bar', new_name: test_name }, format: :js
end
it 'does not create the namespaces' do
allow(Gitlab::LegacyGithubImport::ProjectCreator)
- .to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider)
- .and_return(double(execute: project))
+ .to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, access_params, type: provider)
+ .and_return(double(execute: project))
expect { post :create, params: { target_namespace: 'foo/foobar/bar', new_name: test_name }, format: :js }
- .not_to change { Namespace.count }
+ .not_to change { Namespace.count }
end
end
@@ -471,8 +468,8 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
user.update!(can_create_group: false)
expect(Gitlab::LegacyGithubImport::ProjectCreator)
- .to receive(:new).with(provider_repo, test_name, group, user, access_params, type: provider)
- .and_return(double(execute: project))
+ .to receive(:new).with(provider_repo, test_name, group, user, access_params, type: provider)
+ .and_return(double(execute: project))
post :create, params: { target_namespace: 'foo', new_name: test_name }, format: :js
end
diff --git a/spec/support/shared_examples/controllers/metrics/dashboard/prometheus_api_proxy_shared_examples.rb b/spec/support/shared_examples/controllers/metrics/dashboard/prometheus_api_proxy_shared_examples.rb
index 94cd6971f7c..19b1cee44ee 100644
--- a/spec/support/shared_examples/controllers/metrics/dashboard/prometheus_api_proxy_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/metrics/dashboard/prometheus_api_proxy_shared_examples.rb
@@ -9,6 +9,7 @@ RSpec.shared_examples_for 'metrics dashboard prometheus api proxy' do
id: proxyable.id.to_s
}
end
+
let(:expected_params) do
ActionController::Parameters.new(
prometheus_proxy_params(
diff --git a/spec/support/shared_examples/controllers/variables_shared_examples.rb b/spec/support/shared_examples/controllers/variables_shared_examples.rb
index 9ff0bc3d217..34632993cf0 100644
--- a/spec/support/shared_examples/controllers/variables_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/variables_shared_examples.rb
@@ -21,6 +21,7 @@ RSpec.shared_examples 'PATCH #update updates variables' do
secret_value: variable.value,
protected: variable.protected?.to_s }
end
+
let(:new_variable_attributes) do
{ key: 'new_key',
secret_value: 'dummy_value',
diff --git a/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb b/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
index 4df3139d56e..c89ee0d25ae 100644
--- a/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/wiki_actions_shared_examples.rb
@@ -61,6 +61,14 @@ RSpec.shared_examples 'wiki controller actions' do
expect(assigns(:sidebar_wiki_entries)).to be_nil
expect(assigns(:sidebar_limited)).to be_nil
end
+
+ context 'when the request is of non-html format' do
+ it 'returns a 404 error' do
+ get :pages, params: routing_params.merge(format: 'json')
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
end
describe 'GET #history' do
@@ -153,6 +161,14 @@ RSpec.shared_examples 'wiki controller actions' do
expect(assigns(:sidebar_limited)).to be(false)
end
+ it 'increases the page view counter' do
+ expect do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end.to change { Gitlab::UsageDataCounters::WikiPageCounter.read(:view) }.by(1)
+ end
+
context 'when page content encoding is invalid' do
it 'sets flash error' do
allow(controller).to receive(:valid_encoding?).and_return(false)
@@ -339,6 +355,44 @@ RSpec.shared_examples 'wiki controller actions' do
end
end
+ describe 'POST #create' do
+ let(:new_title) { 'New title' }
+ let(:new_content) { 'New content' }
+
+ subject do
+ post(:create,
+ params: routing_params.merge(
+ wiki: { title: new_title, content: new_content }
+ ))
+ end
+
+ context 'when page is valid' do
+ it 'creates the page' do
+ expect do
+ subject
+ end.to change { wiki.list_pages.size }.by 1
+
+ wiki_page = wiki.find_page(new_title)
+
+ expect(wiki_page.title).to eq new_title
+ expect(wiki_page.content).to eq new_content
+ end
+ end
+
+ context 'when page is not valid' do
+ let(:new_title) { '' }
+
+ it 'renders the edit state' do
+ expect do
+ subject
+ end.not_to change { wiki.list_pages.size }
+
+ expect(response).to render_template('shared/wikis/edit')
+ expect(flash[:alert]).to eq('Could not create wiki page')
+ end
+ end
+ end
+
def redirect_to_wiki(wiki, page)
redirect_to(controller.wiki_page_path(wiki, page))
end
diff --git a/spec/support/shared_examples/create_alert_issue_shared_examples.rb b/spec/support/shared_examples/create_alert_issue_shared_examples.rb
deleted file mode 100644
index 9f4e1c4335a..00000000000
--- a/spec/support/shared_examples/create_alert_issue_shared_examples.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'create alert issue sets issue labels' do
- let(:title) { IncidentManagement::CreateIncidentLabelService::LABEL_PROPERTIES[:title] }
- let!(:label) { create(:label, project: project, title: title) }
- let(:label_service) { instance_double(IncidentManagement::CreateIncidentLabelService, execute: label_service_response) }
-
- before do
- allow(IncidentManagement::CreateIncidentLabelService).to receive(:new).with(project, user).and_return(label_service)
- end
-
- context 'when create incident label responds with success' do
- let(:label_service_response) { ServiceResponse.success(payload: { label: label }) }
-
- it 'adds label to issue' do
- expect(issue.labels).to eq([label])
- end
- end
-
- context 'when create incident label responds with error' do
- let(:label_service_response) { ServiceResponse.error(payload: { label: label }, message: 'label error') }
-
- it 'creates an issue without labels' do
- expect(issue.labels).to be_empty
- end
- end
-end
diff --git a/spec/support/shared_examples/features/master_manages_access_requests_shared_example.rb b/spec/support/shared_examples/features/master_manages_access_requests_shared_example.rb
index 00ce690d2e3..ffe4fb83283 100644
--- a/spec/support/shared_examples/features/master_manages_access_requests_shared_example.rb
+++ b/spec/support/shared_examples/features/master_manages_access_requests_shared_example.rb
@@ -8,17 +8,18 @@ RSpec.shared_examples 'Maintainer manages access requests' do
entity.request_access(user)
entity.respond_to?(:add_owner) ? entity.add_owner(maintainer) : entity.add_maintainer(maintainer)
sign_in(maintainer)
- end
-
- it 'maintainer can see access requests' do
visit members_page_path
+ if has_tabs
+ click_on 'Access requests'
+ end
+ end
+
+ it 'maintainer can see access requests', :js do
expect_visible_access_request(entity, user)
end
it 'maintainer can grant access', :js do
- visit members_page_path
-
expect_visible_access_request(entity, user)
click_on 'Grant access'
@@ -31,8 +32,6 @@ RSpec.shared_examples 'Maintainer manages access requests' do
end
it 'maintainer can deny access', :js do
- visit members_page_path
-
expect_visible_access_request(entity, user)
# Open modal
@@ -47,7 +46,13 @@ RSpec.shared_examples 'Maintainer manages access requests' do
end
def expect_visible_access_request(entity, user)
- expect(page).to have_content "Users requesting access to #{entity.name} 1"
+ if has_tabs
+ expect(page).to have_content "Access requests 1"
+ expect(page).to have_content "Users requesting access to #{entity.name}"
+ else
+ expect(page).to have_content "Users requesting access to #{entity.name} 1"
+ end
+
expect(page).to have_content user.name
end
diff --git a/spec/support/shared_examples/features/packages_shared_examples.rb b/spec/support/shared_examples/features/packages_shared_examples.rb
new file mode 100644
index 00000000000..6debbf81fc0
--- /dev/null
+++ b/spec/support/shared_examples/features/packages_shared_examples.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'packages list' do |check_project_name: false|
+ it 'shows a list of packages' do
+ wait_for_requests
+
+ packages.each_with_index do |pkg, index|
+ package_row = package_table_row(index)
+
+ expect(package_row).to have_content(pkg.name)
+ expect(package_row).to have_content(pkg.version)
+ expect(package_row).to have_content(pkg.project.name) if check_project_name
+ end
+ end
+
+ def package_table_row(index)
+ page.all("#{packages_table_selector} > [data-qa-selector=\"packages-row\"]")[index].text
+ end
+end
+
+RSpec.shared_examples 'package details link' do |property|
+ let(:package) { packages.first }
+
+ before do
+ stub_feature_flags(packages_details_one_column: false)
+ end
+
+ it 'navigates to the correct url' do
+ page.within(packages_table_selector) do
+ click_link package.name
+ end
+
+ expect(page).to have_current_path(project_package_path(package.project, package))
+
+ page.within('.detail-page-header') do
+ expect(page).to have_content(package.name)
+ end
+
+ page.within('[data-qa-selector="package_information_content"]') do
+ expect(page).to have_content('Installation')
+ expect(page).to have_content('Registry setup')
+ end
+ end
+end
+
+RSpec.shared_examples 'when there are no packages' do
+ it 'displays the empty message' do
+ expect(page).to have_content('There are no packages yet')
+ end
+end
+
+RSpec.shared_examples 'correctly sorted packages list' do |order_by, ascending: false|
+ context "ordered by #{order_by} and ascending #{ascending}" do
+ before do
+ click_sort_option(order_by, ascending)
+ end
+
+ it_behaves_like 'packages list'
+ end
+end
+
+RSpec.shared_examples 'shared package sorting' do
+ it_behaves_like 'correctly sorted packages list', 'Type' do
+ let(:packages) { [package_two, package_one] }
+ end
+
+ it_behaves_like 'correctly sorted packages list', 'Type', ascending: true do
+ let(:packages) { [package_one, package_two] }
+ end
+
+ it_behaves_like 'correctly sorted packages list', 'Name' do
+ let(:packages) { [package_two, package_one] }
+ end
+
+ it_behaves_like 'correctly sorted packages list', 'Name', ascending: true do
+ let(:packages) { [package_one, package_two] }
+ end
+
+ it_behaves_like 'correctly sorted packages list', 'Version' do
+ let(:packages) { [package_one, package_two] }
+ end
+
+ it_behaves_like 'correctly sorted packages list', 'Version', ascending: true do
+ let(:packages) { [package_two, package_one] }
+ end
+
+ it_behaves_like 'correctly sorted packages list', 'Created' do
+ let(:packages) { [package_two, package_one] }
+ end
+
+ it_behaves_like 'correctly sorted packages list', 'Created', ascending: true do
+ let(:packages) { [package_one, package_two] }
+ end
+end
+
+def packages_table_selector
+ '[data-qa-selector="packages-table"]'
+end
+
+def click_sort_option(option, ascending)
+ page.within('.gl-sorting') do
+ # Reset the sort direction
+ click_button 'Sort direction' if page.has_selector?('svg[aria-label="Sorting Direction: Ascending"]', wait: 0)
+
+ find('button.dropdown-menu-toggle').click
+
+ page.within('.dropdown-menu') do
+ click_button option
+ end
+
+ click_button 'Sort direction' if ascending
+ end
+end
diff --git a/spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb b/spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb
index 65db082505a..a46382bc292 100644
--- a/spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb
+++ b/spec/support/shared_examples/features/protected_branches_access_control_ce_shared_examples.rb
@@ -22,7 +22,7 @@ RSpec.shared_examples "protected branches > access control > CE" do
end
end
- click_on "Protect"
+ click_on_protect
expect(ProtectedBranch.count).to eq(1)
expect(ProtectedBranch.last.push_access_levels.map(&:access_level)).to eq([access_type_id])
@@ -45,7 +45,7 @@ RSpec.shared_examples "protected branches > access control > CE" do
find(:link, 'No one').click
end
- click_on "Protect"
+ click_on_protect
expect(ProtectedBranch.count).to eq(1)
@@ -85,7 +85,7 @@ RSpec.shared_examples "protected branches > access control > CE" do
find(:link, 'No one').click
end
- click_on "Protect"
+ click_on_protect
expect(ProtectedBranch.count).to eq(1)
expect(ProtectedBranch.last.merge_access_levels.map(&:access_level)).to eq([access_type_id])
@@ -108,7 +108,7 @@ RSpec.shared_examples "protected branches > access control > CE" do
find(:link, 'No one').click
end
- click_on "Protect"
+ click_on_protect
expect(ProtectedBranch.count).to eq(1)
diff --git a/spec/support/shared_examples/features/rss_shared_examples.rb b/spec/support/shared_examples/features/rss_shared_examples.rb
index 42df88ec08e..1b0d3f9605a 100644
--- a/spec/support/shared_examples/features/rss_shared_examples.rb
+++ b/spec/support/shared_examples/features/rss_shared_examples.rb
@@ -9,8 +9,7 @@ end
RSpec.shared_examples "it has an RSS button with current_user's feed token" do
it "shows the RSS button with current_user's feed token" do
expect(page)
- .to have_css("a:has(.fa-rss)[href*='feed_token=#{user.feed_token}']")
- .or have_css("a.js-rss-button[href*='feed_token=#{user.feed_token}']")
+ .to have_css("a:has(.qa-rss-icon)[href*='feed_token=#{user.feed_token}']")
end
end
@@ -23,7 +22,6 @@ end
RSpec.shared_examples "it has an RSS button without a feed token" do
it "shows the RSS button without a feed token" do
expect(page)
- .to have_css("a:has(.fa-rss):not([href*='feed_token'])")
- .or have_css("a.js-rss-button:not([href*='feed_token'])")
+ .to have_css("a:has(.qa-rss-icon):not([href*='feed_token'])")
end
end
diff --git a/spec/support/shared_examples/features/snippets_shared_examples.rb b/spec/support/shared_examples/features/snippets_shared_examples.rb
index 1c8a9714bdf..8d68b1e4c0a 100644
--- a/spec/support/shared_examples/features/snippets_shared_examples.rb
+++ b/spec/support/shared_examples/features/snippets_shared_examples.rb
@@ -50,3 +50,225 @@ RSpec.shared_examples 'tabs with counts' do
expect(tab.find('.badge').text).to eq(counts[:public])
end
end
+
+RSpec.shared_examples 'does not show New Snippet button' do
+ let(:user) { create(:user, :external) }
+
+ specify do
+ sign_in(user)
+
+ subject
+
+ wait_for_requests
+
+ expect(page).not_to have_link('New snippet')
+ end
+end
+
+RSpec.shared_examples 'show and render proper snippet blob' do
+ before do
+ allow_any_instance_of(Snippet).to receive(:blobs).and_return([snippet.repository.blob_at('master', file_path)])
+ end
+
+ context 'Ruby file' do
+ let(:file_path) { 'files/ruby/popen.rb' }
+
+ it 'displays the blob' do
+ subject
+
+ aggregate_failures do
+ # shows highlighted Ruby code
+ expect(page).to have_content("require 'fileutils'")
+
+ # does not show a viewer switcher
+ expect(page).not_to have_selector('.js-blob-viewer-switcher')
+
+ # shows an enabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
+
+ # shows a raw button
+ expect(page).to have_link('Open raw')
+
+ # shows a download button
+ expect(page).to have_link('Download')
+ end
+ end
+ end
+
+ context 'Markdown file' do
+ let(:file_path) { 'files/markdown/ruby-style-guide.md' }
+
+ context 'visiting directly' do
+ before do
+ subject
+ end
+
+ it 'displays the blob using the rich viewer' do
+ aggregate_failures do
+ # hides the simple viewer
+ expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false)
+ expect(page).to have_selector('.blob-viewer[data-type="rich"]')
+
+ # shows rendered Markdown
+ expect(page).to have_link("PEP-8")
+
+ # shows a viewer switcher
+ expect(page).to have_selector('.js-blob-viewer-switcher')
+
+ # shows a disabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn.disabled')
+
+ # shows a raw button
+ expect(page).to have_link('Open raw')
+
+ # shows a download button
+ expect(page).to have_link('Download')
+ end
+ end
+
+ context 'switching to the simple viewer' do
+ before do
+ find('.js-blob-viewer-switch-btn[data-viewer=simple]').click
+
+ wait_for_requests
+ end
+
+ it 'displays the blob using the simple viewer' do
+ aggregate_failures do
+ # hides the rich viewer
+ expect(page).to have_selector('.blob-viewer[data-type="simple"]')
+ expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false)
+
+ # shows highlighted Markdown code
+ expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)")
+
+ # shows an enabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
+ end
+ end
+
+ context 'switching to the rich viewer again' do
+ before do
+ find('.js-blob-viewer-switch-btn[data-viewer=rich]').click
+
+ wait_for_requests
+ end
+
+ it 'displays the blob using the rich viewer' do
+ aggregate_failures do
+ # hides the simple viewer
+ expect(page).to have_selector('.blob-viewer[data-type="simple"]', visible: false)
+ expect(page).to have_selector('.blob-viewer[data-type="rich"]')
+
+ # shows an enabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
+ end
+ end
+ end
+ end
+ end
+
+ context 'visiting with a line number anchor' do
+ let(:anchor) { 'L1' }
+
+ it 'displays the blob using the simple viewer' do
+ subject
+
+ aggregate_failures do
+ # hides the rich viewer
+ expect(page).to have_selector('.blob-viewer[data-type="simple"]')
+ expect(page).to have_selector('.blob-viewer[data-type="rich"]', visible: false)
+
+ # highlights the line in question
+ expect(page).to have_selector('#LC1.hll')
+
+ # shows highlighted Markdown code
+ expect(page).to have_content("[PEP-8](http://www.python.org/dev/peps/pep-0008/)")
+
+ # shows an enabled copy button
+ expect(page).to have_selector('.js-copy-blob-source-btn:not(.disabled)')
+ end
+ end
+ end
+ end
+end
+
+RSpec.shared_examples 'personal snippet with references' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+ let_it_be(:project_snippet) { create(:project_snippet, :repository, project: project)}
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:commit) { project.commit }
+
+ let(:mr_reference) { merge_request.to_reference(full: true) }
+ let(:issue_reference) { issue.to_reference(full: true) }
+ let(:snippet_reference) { project_snippet.to_reference(full: true) }
+ let(:commit_reference) { commit.reference_link_text(full: true) }
+
+ RSpec.shared_examples 'handles resource links' do
+ context 'with access to the resource' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'converts the reference to a link' do
+ subject
+
+ page.within(container) do
+ aggregate_failures do
+ expect(page).to have_link(mr_reference)
+ expect(page).to have_link(issue_reference)
+ expect(page).to have_link(snippet_reference)
+ expect(page).to have_link(commit_reference)
+ end
+ end
+ end
+ end
+
+ context 'without access to the resource' do
+ it 'does not convert the reference to a link' do
+ subject
+
+ page.within(container) do
+ expect(page).not_to have_link(mr_reference)
+ expect(page).not_to have_link(issue_reference)
+ expect(page).not_to have_link(snippet_reference)
+ expect(page).not_to have_link(commit_reference)
+ end
+ end
+ end
+ end
+
+ context 'when using references to resources' do
+ let(:references) do
+ <<~REFERENCES
+ MR: #{mr_reference}
+
+ Commit: #{commit_reference}
+
+ Issue: #{issue_reference}
+
+ ProjectSnippet: #{snippet_reference}
+ REFERENCES
+ end
+
+ it_behaves_like 'handles resource links'
+ end
+
+ context 'when using links to resources' do
+ let(:args) { { host: Gitlab.config.gitlab.url, port: nil } }
+ let(:references) do
+ <<~REFERENCES
+ MR: #{merge_request_url(merge_request, args)}
+
+ Commit: #{project_commit_url(project, commit, args)}
+
+ Issue: #{issue_url(issue, args)}
+
+ ProjectSnippet: #{project_snippet_url(project, project_snippet, args)}
+ REFERENCES
+ end
+
+ it_behaves_like 'handles resource links'
+ end
+end
diff --git a/spec/support/shared_examples/finders/snippet_visibility_shared_examples.rb b/spec/support/shared_examples/finders/snippet_visibility_shared_examples.rb
index c802038c9da..a2c34cdd4a1 100644
--- a/spec/support/shared_examples/finders/snippet_visibility_shared_examples.rb
+++ b/spec/support/shared_examples/finders/snippet_visibility_shared_examples.rb
@@ -9,13 +9,28 @@ RSpec.shared_examples 'snippet visibility' do
let_it_be(:non_member) { create(:user) }
let_it_be(:project, reload: true) do
- create(:project).tap do |project|
+ create(:project, :public).tap do |project|
project.add_developer(author)
project.add_developer(member)
end
end
+ let(:snippets) do
+ {
+ private: private_snippet,
+ public: public_snippet,
+ internal: internal_snippet
+ }
+ end
+
+ let(:user) { users[user_type] }
+ let(:snippet) { snippets[snippet_visibility] }
+
context "For project snippets" do
+ let_it_be(:private_snippet) { create(:project_snippet, :private, project: project, author: author) }
+ let_it_be(:public_snippet) { create(:project_snippet, :public, project: project, author: author) }
+ let_it_be(:internal_snippet) { create(:project_snippet, :internal, project: project, author: author) }
+
let!(:users) do
{
unauthenticated: nil,
@@ -26,214 +41,212 @@ RSpec.shared_examples 'snippet visibility' do
}
end
- where(:project_type, :feature_visibility, :user_type, :snippet_type, :outcome) do
+ where(:project_visibility, :feature_visibility, :user_type, :snippet_visibility, :outcome) do
[
# Public projects
- [:public, ProjectFeature::ENABLED, :unauthenticated, Snippet::PUBLIC, true],
- [:public, ProjectFeature::ENABLED, :unauthenticated, Snippet::INTERNAL, false],
- [:public, ProjectFeature::ENABLED, :unauthenticated, Snippet::PRIVATE, false],
+ [:public, :enabled, :unauthenticated, :public, true],
+ [:public, :enabled, :unauthenticated, :internal, false],
+ [:public, :enabled, :unauthenticated, :private, false],
- [:public, ProjectFeature::ENABLED, :external, Snippet::PUBLIC, true],
- [:public, ProjectFeature::ENABLED, :external, Snippet::INTERNAL, false],
- [:public, ProjectFeature::ENABLED, :external, Snippet::PRIVATE, false],
+ [:public, :enabled, :external, :public, true],
+ [:public, :enabled, :external, :internal, false],
+ [:public, :enabled, :external, :private, false],
- [:public, ProjectFeature::ENABLED, :non_member, Snippet::PUBLIC, true],
- [:public, ProjectFeature::ENABLED, :non_member, Snippet::INTERNAL, true],
- [:public, ProjectFeature::ENABLED, :non_member, Snippet::PRIVATE, false],
+ [:public, :enabled, :non_member, :public, true],
+ [:public, :enabled, :non_member, :internal, true],
+ [:public, :enabled, :non_member, :private, false],
- [:public, ProjectFeature::ENABLED, :member, Snippet::PUBLIC, true],
- [:public, ProjectFeature::ENABLED, :member, Snippet::INTERNAL, true],
- [:public, ProjectFeature::ENABLED, :member, Snippet::PRIVATE, true],
+ [:public, :enabled, :member, :public, true],
+ [:public, :enabled, :member, :internal, true],
+ [:public, :enabled, :member, :private, true],
- [:public, ProjectFeature::ENABLED, :author, Snippet::PUBLIC, true],
- [:public, ProjectFeature::ENABLED, :author, Snippet::INTERNAL, true],
- [:public, ProjectFeature::ENABLED, :author, Snippet::PRIVATE, true],
+ [:public, :enabled, :author, :public, true],
+ [:public, :enabled, :author, :internal, true],
+ [:public, :enabled, :author, :private, true],
- [:public, ProjectFeature::PRIVATE, :unauthenticated, Snippet::PUBLIC, false],
- [:public, ProjectFeature::PRIVATE, :unauthenticated, Snippet::INTERNAL, false],
- [:public, ProjectFeature::PRIVATE, :unauthenticated, Snippet::PRIVATE, false],
+ [:public, :private, :unauthenticated, :public, false],
+ [:public, :private, :unauthenticated, :internal, false],
+ [:public, :private, :unauthenticated, :private, false],
- [:public, ProjectFeature::PRIVATE, :external, Snippet::PUBLIC, false],
- [:public, ProjectFeature::PRIVATE, :external, Snippet::INTERNAL, false],
- [:public, ProjectFeature::PRIVATE, :external, Snippet::PRIVATE, false],
+ [:public, :private, :external, :public, false],
+ [:public, :private, :external, :internal, false],
+ [:public, :private, :external, :private, false],
- [:public, ProjectFeature::PRIVATE, :non_member, Snippet::PUBLIC, false],
- [:public, ProjectFeature::PRIVATE, :non_member, Snippet::INTERNAL, false],
- [:public, ProjectFeature::PRIVATE, :non_member, Snippet::PRIVATE, false],
+ [:public, :private, :non_member, :public, false],
+ [:public, :private, :non_member, :internal, false],
+ [:public, :private, :non_member, :private, false],
- [:public, ProjectFeature::PRIVATE, :member, Snippet::PUBLIC, true],
- [:public, ProjectFeature::PRIVATE, :member, Snippet::INTERNAL, true],
- [:public, ProjectFeature::PRIVATE, :member, Snippet::PRIVATE, true],
+ [:public, :private, :member, :public, true],
+ [:public, :private, :member, :internal, true],
+ [:public, :private, :member, :private, true],
- [:public, ProjectFeature::PRIVATE, :author, Snippet::PUBLIC, true],
- [:public, ProjectFeature::PRIVATE, :author, Snippet::INTERNAL, true],
- [:public, ProjectFeature::PRIVATE, :author, Snippet::PRIVATE, true],
+ [:public, :private, :author, :public, true],
+ [:public, :private, :author, :internal, true],
+ [:public, :private, :author, :private, true],
- [:public, ProjectFeature::DISABLED, :unauthenticated, Snippet::PUBLIC, false],
- [:public, ProjectFeature::DISABLED, :unauthenticated, Snippet::INTERNAL, false],
- [:public, ProjectFeature::DISABLED, :unauthenticated, Snippet::PRIVATE, false],
+ [:public, :disabled, :unauthenticated, :public, false],
+ [:public, :disabled, :unauthenticated, :internal, false],
+ [:public, :disabled, :unauthenticated, :private, false],
- [:public, ProjectFeature::DISABLED, :external, Snippet::PUBLIC, false],
- [:public, ProjectFeature::DISABLED, :external, Snippet::INTERNAL, false],
- [:public, ProjectFeature::DISABLED, :external, Snippet::PRIVATE, false],
+ [:public, :disabled, :external, :public, false],
+ [:public, :disabled, :external, :internal, false],
+ [:public, :disabled, :external, :private, false],
- [:public, ProjectFeature::DISABLED, :non_member, Snippet::PUBLIC, false],
- [:public, ProjectFeature::DISABLED, :non_member, Snippet::INTERNAL, false],
- [:public, ProjectFeature::DISABLED, :non_member, Snippet::PRIVATE, false],
+ [:public, :disabled, :non_member, :public, false],
+ [:public, :disabled, :non_member, :internal, false],
+ [:public, :disabled, :non_member, :private, false],
- [:public, ProjectFeature::DISABLED, :member, Snippet::PUBLIC, false],
- [:public, ProjectFeature::DISABLED, :member, Snippet::INTERNAL, false],
- [:public, ProjectFeature::DISABLED, :member, Snippet::PRIVATE, false],
+ [:public, :disabled, :member, :public, false],
+ [:public, :disabled, :member, :internal, false],
+ [:public, :disabled, :member, :private, false],
- [:public, ProjectFeature::DISABLED, :author, Snippet::PUBLIC, false],
- [:public, ProjectFeature::DISABLED, :author, Snippet::INTERNAL, false],
- [:public, ProjectFeature::DISABLED, :author, Snippet::PRIVATE, false],
+ [:public, :disabled, :author, :public, false],
+ [:public, :disabled, :author, :internal, false],
+ [:public, :disabled, :author, :private, false],
# Internal projects
- [:internal, ProjectFeature::ENABLED, :unauthenticated, Snippet::PUBLIC, false],
- [:internal, ProjectFeature::ENABLED, :unauthenticated, Snippet::INTERNAL, false],
- [:internal, ProjectFeature::ENABLED, :unauthenticated, Snippet::PRIVATE, false],
+ [:internal, :enabled, :unauthenticated, :public, false],
+ [:internal, :enabled, :unauthenticated, :internal, false],
+ [:internal, :enabled, :unauthenticated, :private, false],
- [:internal, ProjectFeature::ENABLED, :external, Snippet::PUBLIC, false],
- [:internal, ProjectFeature::ENABLED, :external, Snippet::INTERNAL, false],
- [:internal, ProjectFeature::ENABLED, :external, Snippet::PRIVATE, false],
+ [:internal, :enabled, :external, :public, false],
+ [:internal, :enabled, :external, :internal, false],
+ [:internal, :enabled, :external, :private, false],
- [:internal, ProjectFeature::ENABLED, :non_member, Snippet::PUBLIC, true],
- [:internal, ProjectFeature::ENABLED, :non_member, Snippet::INTERNAL, true],
- [:internal, ProjectFeature::ENABLED, :non_member, Snippet::PRIVATE, false],
+ [:internal, :enabled, :non_member, :public, true],
+ [:internal, :enabled, :non_member, :internal, true],
+ [:internal, :enabled, :non_member, :private, false],
- [:internal, ProjectFeature::ENABLED, :member, Snippet::PUBLIC, true],
- [:internal, ProjectFeature::ENABLED, :member, Snippet::INTERNAL, true],
- [:internal, ProjectFeature::ENABLED, :member, Snippet::PRIVATE, true],
+ [:internal, :enabled, :member, :public, true],
+ [:internal, :enabled, :member, :internal, true],
+ [:internal, :enabled, :member, :private, true],
- [:internal, ProjectFeature::ENABLED, :author, Snippet::PUBLIC, true],
- [:internal, ProjectFeature::ENABLED, :author, Snippet::INTERNAL, true],
- [:internal, ProjectFeature::ENABLED, :author, Snippet::PRIVATE, true],
+ [:internal, :enabled, :author, :public, true],
+ [:internal, :enabled, :author, :internal, true],
+ [:internal, :enabled, :author, :private, true],
- [:internal, ProjectFeature::PRIVATE, :unauthenticated, Snippet::PUBLIC, false],
- [:internal, ProjectFeature::PRIVATE, :unauthenticated, Snippet::INTERNAL, false],
- [:internal, ProjectFeature::PRIVATE, :unauthenticated, Snippet::PRIVATE, false],
+ [:internal, :private, :unauthenticated, :public, false],
+ [:internal, :private, :unauthenticated, :internal, false],
+ [:internal, :private, :unauthenticated, :private, false],
- [:internal, ProjectFeature::PRIVATE, :external, Snippet::PUBLIC, false],
- [:internal, ProjectFeature::PRIVATE, :external, Snippet::INTERNAL, false],
- [:internal, ProjectFeature::PRIVATE, :external, Snippet::PRIVATE, false],
+ [:internal, :private, :external, :public, false],
+ [:internal, :private, :external, :internal, false],
+ [:internal, :private, :external, :private, false],
- [:internal, ProjectFeature::PRIVATE, :non_member, Snippet::PUBLIC, false],
- [:internal, ProjectFeature::PRIVATE, :non_member, Snippet::INTERNAL, false],
- [:internal, ProjectFeature::PRIVATE, :non_member, Snippet::PRIVATE, false],
+ [:internal, :private, :non_member, :public, false],
+ [:internal, :private, :non_member, :internal, false],
+ [:internal, :private, :non_member, :private, false],
- [:internal, ProjectFeature::PRIVATE, :member, Snippet::PUBLIC, true],
- [:internal, ProjectFeature::PRIVATE, :member, Snippet::INTERNAL, true],
- [:internal, ProjectFeature::PRIVATE, :member, Snippet::PRIVATE, true],
+ [:internal, :private, :member, :public, true],
+ [:internal, :private, :member, :internal, true],
+ [:internal, :private, :member, :private, true],
- [:internal, ProjectFeature::PRIVATE, :author, Snippet::PUBLIC, true],
- [:internal, ProjectFeature::PRIVATE, :author, Snippet::INTERNAL, true],
- [:internal, ProjectFeature::PRIVATE, :author, Snippet::PRIVATE, true],
+ [:internal, :private, :author, :public, true],
+ [:internal, :private, :author, :internal, true],
+ [:internal, :private, :author, :private, true],
- [:internal, ProjectFeature::DISABLED, :unauthenticated, Snippet::PUBLIC, false],
- [:internal, ProjectFeature::DISABLED, :unauthenticated, Snippet::INTERNAL, false],
- [:internal, ProjectFeature::DISABLED, :unauthenticated, Snippet::PRIVATE, false],
+ [:internal, :disabled, :unauthenticated, :public, false],
+ [:internal, :disabled, :unauthenticated, :internal, false],
+ [:internal, :disabled, :unauthenticated, :private, false],
- [:internal, ProjectFeature::DISABLED, :external, Snippet::PUBLIC, false],
- [:internal, ProjectFeature::DISABLED, :external, Snippet::INTERNAL, false],
- [:internal, ProjectFeature::DISABLED, :external, Snippet::PRIVATE, false],
+ [:internal, :disabled, :external, :public, false],
+ [:internal, :disabled, :external, :internal, false],
+ [:internal, :disabled, :external, :private, false],
- [:internal, ProjectFeature::DISABLED, :non_member, Snippet::PUBLIC, false],
- [:internal, ProjectFeature::DISABLED, :non_member, Snippet::INTERNAL, false],
- [:internal, ProjectFeature::DISABLED, :non_member, Snippet::PRIVATE, false],
+ [:internal, :disabled, :non_member, :public, false],
+ [:internal, :disabled, :non_member, :internal, false],
+ [:internal, :disabled, :non_member, :private, false],
- [:internal, ProjectFeature::DISABLED, :member, Snippet::PUBLIC, false],
- [:internal, ProjectFeature::DISABLED, :member, Snippet::INTERNAL, false],
- [:internal, ProjectFeature::DISABLED, :member, Snippet::PRIVATE, false],
+ [:internal, :disabled, :member, :public, false],
+ [:internal, :disabled, :member, :internal, false],
+ [:internal, :disabled, :member, :private, false],
- [:internal, ProjectFeature::DISABLED, :author, Snippet::PUBLIC, false],
- [:internal, ProjectFeature::DISABLED, :author, Snippet::INTERNAL, false],
- [:internal, ProjectFeature::DISABLED, :author, Snippet::PRIVATE, false],
+ [:internal, :disabled, :author, :public, false],
+ [:internal, :disabled, :author, :internal, false],
+ [:internal, :disabled, :author, :private, false],
# Private projects
- [:private, ProjectFeature::ENABLED, :unauthenticated, Snippet::PUBLIC, false],
- [:private, ProjectFeature::ENABLED, :unauthenticated, Snippet::INTERNAL, false],
- [:private, ProjectFeature::ENABLED, :unauthenticated, Snippet::PRIVATE, false],
+ [:private, :enabled, :unauthenticated, :public, false],
+ [:private, :enabled, :unauthenticated, :internal, false],
+ [:private, :enabled, :unauthenticated, :private, false],
- [:private, ProjectFeature::ENABLED, :external, Snippet::PUBLIC, true],
- [:private, ProjectFeature::ENABLED, :external, Snippet::INTERNAL, true],
- [:private, ProjectFeature::ENABLED, :external, Snippet::PRIVATE, true],
+ [:private, :enabled, :external, :public, true],
+ [:private, :enabled, :external, :internal, true],
+ [:private, :enabled, :external, :private, true],
- [:private, ProjectFeature::ENABLED, :non_member, Snippet::PUBLIC, false],
- [:private, ProjectFeature::ENABLED, :non_member, Snippet::INTERNAL, false],
- [:private, ProjectFeature::ENABLED, :non_member, Snippet::PRIVATE, false],
+ [:private, :enabled, :non_member, :public, false],
+ [:private, :enabled, :non_member, :internal, false],
+ [:private, :enabled, :non_member, :private, false],
- [:private, ProjectFeature::ENABLED, :member, Snippet::PUBLIC, true],
- [:private, ProjectFeature::ENABLED, :member, Snippet::INTERNAL, true],
- [:private, ProjectFeature::ENABLED, :member, Snippet::PRIVATE, true],
+ [:private, :enabled, :member, :public, true],
+ [:private, :enabled, :member, :internal, true],
+ [:private, :enabled, :member, :private, true],
- [:private, ProjectFeature::ENABLED, :author, Snippet::PUBLIC, true],
- [:private, ProjectFeature::ENABLED, :author, Snippet::INTERNAL, true],
- [:private, ProjectFeature::ENABLED, :author, Snippet::PRIVATE, true],
+ [:private, :enabled, :author, :public, true],
+ [:private, :enabled, :author, :internal, true],
+ [:private, :enabled, :author, :private, true],
- [:private, ProjectFeature::PRIVATE, :unauthenticated, Snippet::PUBLIC, false],
- [:private, ProjectFeature::PRIVATE, :unauthenticated, Snippet::INTERNAL, false],
- [:private, ProjectFeature::PRIVATE, :unauthenticated, Snippet::PRIVATE, false],
+ [:private, :private, :unauthenticated, :public, false],
+ [:private, :private, :unauthenticated, :internal, false],
+ [:private, :private, :unauthenticated, :private, false],
- [:private, ProjectFeature::PRIVATE, :external, Snippet::PUBLIC, true],
- [:private, ProjectFeature::PRIVATE, :external, Snippet::INTERNAL, true],
- [:private, ProjectFeature::PRIVATE, :external, Snippet::PRIVATE, true],
+ [:private, :private, :external, :public, true],
+ [:private, :private, :external, :internal, true],
+ [:private, :private, :external, :private, true],
- [:private, ProjectFeature::PRIVATE, :non_member, Snippet::PUBLIC, false],
- [:private, ProjectFeature::PRIVATE, :non_member, Snippet::INTERNAL, false],
- [:private, ProjectFeature::PRIVATE, :non_member, Snippet::PRIVATE, false],
+ [:private, :private, :non_member, :public, false],
+ [:private, :private, :non_member, :internal, false],
+ [:private, :private, :non_member, :private, false],
- [:private, ProjectFeature::PRIVATE, :member, Snippet::PUBLIC, true],
- [:private, ProjectFeature::PRIVATE, :member, Snippet::INTERNAL, true],
- [:private, ProjectFeature::PRIVATE, :member, Snippet::PRIVATE, true],
+ [:private, :private, :member, :public, true],
+ [:private, :private, :member, :internal, true],
+ [:private, :private, :member, :private, true],
- [:private, ProjectFeature::PRIVATE, :author, Snippet::PUBLIC, true],
- [:private, ProjectFeature::PRIVATE, :author, Snippet::INTERNAL, true],
- [:private, ProjectFeature::PRIVATE, :author, Snippet::PRIVATE, true],
+ [:private, :private, :author, :public, true],
+ [:private, :private, :author, :internal, true],
+ [:private, :private, :author, :private, true],
- [:private, ProjectFeature::DISABLED, :unauthenticated, Snippet::PUBLIC, false],
- [:private, ProjectFeature::DISABLED, :unauthenticated, Snippet::INTERNAL, false],
- [:private, ProjectFeature::DISABLED, :unauthenticated, Snippet::PRIVATE, false],
+ [:private, :disabled, :unauthenticated, :public, false],
+ [:private, :disabled, :unauthenticated, :internal, false],
+ [:private, :disabled, :unauthenticated, :private, false],
- [:private, ProjectFeature::DISABLED, :external, Snippet::PUBLIC, false],
- [:private, ProjectFeature::DISABLED, :external, Snippet::INTERNAL, false],
- [:private, ProjectFeature::DISABLED, :external, Snippet::PRIVATE, false],
+ [:private, :disabled, :external, :public, false],
+ [:private, :disabled, :external, :internal, false],
+ [:private, :disabled, :external, :private, false],
- [:private, ProjectFeature::DISABLED, :non_member, Snippet::PUBLIC, false],
- [:private, ProjectFeature::DISABLED, :non_member, Snippet::INTERNAL, false],
- [:private, ProjectFeature::DISABLED, :non_member, Snippet::PRIVATE, false],
+ [:private, :disabled, :non_member, :public, false],
+ [:private, :disabled, :non_member, :internal, false],
+ [:private, :disabled, :non_member, :private, false],
- [:private, ProjectFeature::DISABLED, :member, Snippet::PUBLIC, false],
- [:private, ProjectFeature::DISABLED, :member, Snippet::INTERNAL, false],
- [:private, ProjectFeature::DISABLED, :member, Snippet::PRIVATE, false],
+ [:private, :disabled, :member, :public, false],
+ [:private, :disabled, :member, :internal, false],
+ [:private, :disabled, :member, :private, false],
- [:private, ProjectFeature::DISABLED, :author, Snippet::PUBLIC, false],
- [:private, ProjectFeature::DISABLED, :author, Snippet::INTERNAL, false],
- [:private, ProjectFeature::DISABLED, :author, Snippet::PRIVATE, false]
+ [:private, :disabled, :author, :public, false],
+ [:private, :disabled, :author, :internal, false],
+ [:private, :disabled, :author, :private, false]
]
end
with_them do
- let!(:project_visibility) { project.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value(project_type.to_s)) }
- let!(:project_feature) { project.project_feature.update_column(:snippets_access_level, feature_visibility) }
- let!(:user) { users[user_type] }
- let!(:snippet) { create(:project_snippet, visibility_level: snippet_type, project: project, author: author) }
- let!(:external_member) do
- member = project.project_member(external)
-
- if project.private?
- project.add_developer(external) unless member
- else
- member.delete if member
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel.level_value(project_visibility.to_s), snippets_access_level: feature_visibility)
+
+ if user_type == :external
+ member = project.project_member(external)
+
+ if project.private?
+ project.add_developer(external) unless member
+ else
+ member.delete if member
+ end
end
end
context "For #{params[:project_type]} project and #{params[:user_type]} users" do
- it 'agrees with the read_snippet policy' do
+ it 'returns proper outcome' do
expect(can?(user, :read_snippet, snippet)).to eq(outcome)
- end
- it 'returns proper outcome' do
results = described_class.new(user, project: project).execute
expect(results.include?(snippet)).to eq(outcome)
@@ -243,16 +256,8 @@ RSpec.shared_examples 'snippet visibility' do
context "Without a given project and #{params[:user_type]} users" do
it 'returns proper outcome' do
results = described_class.new(user).execute
- expect(results.include?(snippet)).to eq(outcome)
- end
- it 'returns no snippets when the user cannot read cross project' do
- allow(Ability).to receive(:allowed?).and_call_original
- allow(Ability).to receive(:allowed?).with(user, :read_cross_project) { false }
-
- snippets = described_class.new(user).execute
-
- expect(snippets).to be_empty
+ expect(results.include?(snippet)).to eq(outcome)
end
end
end
@@ -270,46 +275,55 @@ RSpec.shared_examples 'snippet visibility' do
where(:snippet_visibility, :user_type, :outcome) do
[
- [Snippet::PUBLIC, :unauthenticated, true],
- [Snippet::PUBLIC, :external, true],
- [Snippet::PUBLIC, :non_member, true],
- [Snippet::PUBLIC, :author, true],
-
- [Snippet::INTERNAL, :unauthenticated, false],
- [Snippet::INTERNAL, :external, false],
- [Snippet::INTERNAL, :non_member, true],
- [Snippet::INTERNAL, :author, true],
-
- [Snippet::PRIVATE, :unauthenticated, false],
- [Snippet::PRIVATE, :external, false],
- [Snippet::PRIVATE, :non_member, false],
- [Snippet::PRIVATE, :author, true]
+ [:public, :unauthenticated, true],
+ [:public, :external, true],
+ [:public, :non_member, true],
+ [:public, :author, true],
+
+ [:internal, :unauthenticated, false],
+ [:internal, :external, false],
+ [:internal, :non_member, true],
+ [:internal, :author, true],
+
+ [:private, :unauthenticated, false],
+ [:private, :external, false],
+ [:private, :non_member, false],
+ [:private, :author, true]
]
end
with_them do
- let!(:user) { users[user_type] }
- let!(:snippet) { create(:personal_snippet, visibility_level: snippet_visibility, author: author) }
+ let_it_be(:private_snippet) { create(:personal_snippet, :private, author: author) }
+ let_it_be(:public_snippet) { create(:personal_snippet, :public, author: author) }
+ let_it_be(:internal_snippet) { create(:personal_snippet, :internal, author: author) }
context "For personal and #{params[:snippet_visibility]} snippets with #{params[:user_type]} user" do
- it 'agrees with read_snippet policy' do
+ it 'returns proper outcome' do
expect(can?(user, :read_snippet, snippet)).to eq(outcome)
- end
- it 'returns proper outcome' do
results = described_class.new(user).execute
+
expect(results.include?(snippet)).to eq(outcome)
end
+ end
+ end
+ end
- it 'returns personal snippets when the user cannot read cross project' do
- allow(Ability).to receive(:allowed?).and_call_original
- allow(Ability).to receive(:allowed?).with(user, :read_cross_project) { false }
+ context 'when the user cannot read cross project' do
+ it 'returns only personal snippets' do
+ personal_snippet = create(:personal_snippet, :public, author: author)
+ create(:project_snippet, :public, project: project, author: author)
- results = described_class.new(user).execute
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(author, :read_cross_project) { false }
- expect(results.include?(snippet)).to eq(outcome)
- end
- end
+ service = described_class.new(author)
+
+ expect(service).to receive(:personal_snippets).and_call_original
+ expect(service).not_to receive(:snippets_of_visible_projects)
+ expect(service).not_to receive(:snippets_of_authorized_projects)
+
+ expect(service.execute).to match_array([personal_snippet])
end
end
end
diff --git a/spec/support/shared_examples/graphql/design_fields_shared_examples.rb b/spec/support/shared_examples/graphql/design_fields_shared_examples.rb
index 029d7e677da..ef7086234c4 100644
--- a/spec/support/shared_examples/graphql/design_fields_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/design_fields_shared_examples.rb
@@ -35,6 +35,7 @@ RSpec.shared_examples 'a GraphQL type with design fields' do
object = GitlabSchema.sync_lazy(GitlabSchema.object_from_id(object_id))
object_type.authorized_new(object, query.context)
end
+
let(:instance_b) do
object_b = GitlabSchema.sync_lazy(GitlabSchema.object_from_id(object_id_b))
object_type.authorized_new(object_b, query.context)
diff --git a/spec/support/shared_examples/graphql/mutations/resolves_subscription_shared_examples.rb b/spec/support/shared_examples/graphql/mutations/resolves_subscription_shared_examples.rb
new file mode 100644
index 00000000000..ebba312e895
--- /dev/null
+++ b/spec/support/shared_examples/graphql/mutations/resolves_subscription_shared_examples.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples 'a subscribeable graphql resource' do
+ let(:project) { resource.project }
+ let_it_be(:user) { create(:user) }
+
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
+
+ specify { expect(described_class).to require_graphql_authorizations(permission_name) }
+
+ describe '#resolve' do
+ let(:subscribe) { true }
+ let(:mutated_resource) { subject[resource.class.name.underscore.to_sym] }
+
+ subject { mutation.resolve(project_path: resource.project.full_path, iid: resource.iid, subscribed_state: subscribe) }
+
+ it 'raises an error if the resource is not accessible to the user' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+
+ context 'when the user can update the resource' do
+ before do
+ resource.project.add_developer(user)
+ end
+
+ it 'subscribes to the resource' do
+ expect(mutated_resource).to eq(resource)
+ expect(mutated_resource.subscribed?(user, project)).to eq(true)
+ expect(subject[:errors]).to be_empty
+ end
+
+ context 'when passing subscribe as false' do
+ let(:subscribe) { false }
+
+ it 'unsubscribes from the discussion' do
+ resource.subscribe(user, project)
+
+ expect(mutated_resource.subscribed?(user, project)).to eq(false)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/graphql/mutations/set_assignees_shared_examples.rb b/spec/support/shared_examples/graphql/mutations/set_assignees_shared_examples.rb
new file mode 100644
index 00000000000..cfa12171b7e
--- /dev/null
+++ b/spec/support/shared_examples/graphql/mutations/set_assignees_shared_examples.rb
@@ -0,0 +1,126 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples 'an assignable resource' do
+ let_it_be(:user) { create(:user) }
+
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
+
+ describe '#resolve' do
+ let_it_be(:assignee) { create(:user) }
+ let_it_be(:assignee2) { create(:user) }
+ let(:assignee_usernames) { [assignee.username] }
+ let(:mutated_resource) { subject[resource.class.name.underscore.to_sym] }
+
+ subject { mutation.resolve(project_path: resource.project.full_path, iid: resource.iid, assignee_usernames: assignee_usernames) }
+
+ before do
+ resource.project.add_developer(assignee)
+ resource.project.add_developer(assignee2)
+ end
+
+ it 'raises an error if the resource is not accessible to the user' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+
+ context 'when the user can update the resource' do
+ before do
+ resource.project.add_developer(user)
+ end
+
+ it 'replaces the assignee' do
+ resource.assignees = [assignee2]
+ resource.save!
+
+ expect(mutated_resource).to eq(resource)
+ expect(mutated_resource.assignees).to contain_exactly(assignee)
+ expect(subject[:errors]).to be_empty
+ end
+
+ it 'returns errors when resource could not be updated' do
+ allow(resource).to receive(:errors_on_object).and_return(['foo'])
+
+ expect(subject[:errors]).not_to match_array(['foo'])
+ end
+
+ context 'when passing an empty assignee list' do
+ let(:assignee_usernames) { [] }
+
+ before do
+ resource.assignees = [assignee]
+ resource.save!
+ end
+
+ it 'removes all assignees' do
+ expect(mutated_resource).to eq(resource)
+ expect(mutated_resource.assignees).to eq([])
+ expect(subject[:errors]).to be_empty
+ end
+ end
+
+ context 'when passing "append" as true' do
+ subject do
+ mutation.resolve(
+ project_path: resource.project.full_path,
+ iid: resource.iid,
+ assignee_usernames: assignee_usernames,
+ operation_mode: Types::MutationOperationModeEnum.enum[:append]
+ )
+ end
+
+ before do
+ resource.assignees = [assignee2]
+ resource.save!
+
+ # In CE, APPEND is a NOOP as you can't have multiple assignees
+ # We test multiple assignment in EE specs
+ if resource.is_a?(MergeRequest)
+ stub_licensed_features(multiple_merge_request_assignees: false)
+ else
+ stub_licensed_features(multiple_issue_assignees: false)
+ end
+ end
+
+ it 'is a NO-OP in FOSS' do
+ expect(mutated_resource).to eq(resource)
+ expect(mutated_resource.assignees).to contain_exactly(assignee2)
+ expect(subject[:errors]).to be_empty
+ end
+ end
+
+ context 'when passing "remove" as true' do
+ before do
+ resource.assignees = [assignee]
+ resource.save!
+ end
+
+ it 'removes named assignee' do
+ mutated_resource = mutation.resolve(
+ project_path: resource.project.full_path,
+ iid: resource.iid,
+ assignee_usernames: assignee_usernames,
+ operation_mode: Types::MutationOperationModeEnum.enum[:remove]
+ )[resource.class.name.underscore.to_sym]
+
+ expect(mutated_resource).to eq(resource)
+ expect(mutated_resource.assignees).to eq([])
+ expect(subject[:errors]).to be_empty
+ end
+
+ it 'does not remove unnamed assignee' do
+ mutated_resource = mutation.resolve(
+ project_path: resource.project.full_path,
+ iid: resource.iid,
+ assignee_usernames: [assignee2.username],
+ operation_mode: Types::MutationOperationModeEnum.enum[:remove]
+ )[resource.class.name.underscore.to_sym]
+
+ expect(mutated_resource).to eq(resource)
+ expect(mutated_resource.assignees).to contain_exactly(assignee)
+ expect(subject[:errors]).to be_empty
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb b/spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb
index e1dd98814f1..41b7da07d2d 100644
--- a/spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb
@@ -8,6 +8,7 @@ RSpec.shared_context 'exposing regular notes on a noteable in GraphQL' do
noteable: noteable,
project: (noteable.project if noteable.respond_to?(:project)))
end
+
let(:user) { note.author }
context 'for regular notes' do
diff --git a/spec/support/shared_examples/graphql/projects/merge_request_n_plus_one_query_examples.rb b/spec/support/shared_examples/graphql/projects/merge_request_n_plus_one_query_examples.rb
new file mode 100644
index 00000000000..397e22ace28
--- /dev/null
+++ b/spec/support/shared_examples/graphql/projects/merge_request_n_plus_one_query_examples.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+shared_examples 'N+1 query check' do
+ it 'prevents N+1 queries' do
+ execute_query # "warm up" to prevent undeterministic counts
+
+ control_count = ActiveRecord::QueryRecorder.new { execute_query }.count
+
+ search_params[:iids] << extra_iid_for_second_query
+ expect { execute_query }.not_to exceed_query_limit(control_count)
+ end
+end
diff --git a/spec/support/shared_examples/lib/api/ci/runner_shared_examples.rb b/spec/support/shared_examples/lib/api/ci/runner_shared_examples.rb
new file mode 100644
index 00000000000..bdb0316bf5a
--- /dev/null
+++ b/spec/support/shared_examples/lib/api/ci/runner_shared_examples.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'API::CI::Runner application context metadata' do |api_route|
+ it 'contains correct context metadata' do
+ # Avoids popping the context from the thread so we can
+ # check its content after the request.
+ allow(Labkit::Context).to receive(:pop)
+
+ send_request
+
+ Labkit::Context.with_context do |context|
+ expected_context = {
+ 'meta.caller_id' => api_route,
+ 'meta.user' => job.user.username,
+ 'meta.project' => job.project.full_path,
+ 'meta.root_namespace' => job.project.full_path_components.first
+ }
+
+ expect(context.to_h).to include(expected_context)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb
index af65b61021c..8cf6babe146 100644
--- a/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/background_migration/mentions_migration_shared_examples.rb
@@ -82,3 +82,25 @@ RSpec.shared_examples 'schedules resource mentions migration' do |resource_class
end
end
end
+
+RSpec.shared_examples 'resource migration not run' do |migration_class, resource_class|
+ it 'does not migrate mentions' do
+ join = migration_class::JOIN
+ conditions = migration_class::QUERY_CONDITIONS
+
+ expect do
+ subject.perform(resource_class.name, join, conditions, false, resource_class.minimum(:id), resource_class.maximum(:id))
+ end.to change { user_mentions.count }.by(0)
+ end
+end
+
+RSpec.shared_examples 'resource notes migration not run' do |migration_class, resource_class|
+ it 'does not migrate mentions' do
+ join = migration_class::JOIN
+ conditions = migration_class::QUERY_CONDITIONS
+
+ expect do
+ subject.perform(resource_class.name, join, conditions, true, Note.minimum(:id), Note.maximum(:id))
+ end.to change { user_mentions.count }.by(0)
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/kubernetes/network_policy_common_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/kubernetes/network_policy_common_shared_examples.rb
new file mode 100644
index 00000000000..a3800f050bb
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/kubernetes/network_policy_common_shared_examples.rb
@@ -0,0 +1,160 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'network policy common specs' do
+ let(:name) { 'example-name' }
+ let(:namespace) { 'example-namespace' }
+ let(:labels) { nil }
+
+ describe 'as_json' do
+ let(:json_policy) do
+ {
+ name: name,
+ namespace: namespace,
+ creation_timestamp: nil,
+ manifest: YAML.dump(
+ {
+ metadata: metadata,
+ spec: spec
+ }.deep_stringify_keys
+ ),
+ is_autodevops: false,
+ is_enabled: true
+ }
+ end
+
+ subject { policy.as_json }
+
+ it { is_expected.to eq(json_policy) }
+ end
+
+ describe 'autodevops?' do
+ subject { policy.autodevops? }
+
+ let(:labels) { { chart: chart } }
+ let(:chart) { nil }
+
+ it { is_expected.to be false }
+
+ context 'with non-autodevops chart' do
+ let(:chart) { 'foo' }
+
+ it { is_expected.to be false }
+ end
+
+ context 'with autodevops chart' do
+ let(:chart) { 'auto-deploy-app-0.6.0' }
+
+ it { is_expected.to be true }
+ end
+ end
+
+ describe 'enabled?' do
+ subject { policy.enabled? }
+
+ let(:selector) { nil }
+
+ it { is_expected.to be true }
+
+ context 'with empty selector' do
+ let(:selector) { {} }
+
+ it { is_expected.to be true }
+ end
+
+ context 'with nil matchLabels in selector' do
+ let(:selector) { { matchLabels: nil } }
+
+ it { is_expected.to be true }
+ end
+
+ context 'with empty matchLabels in selector' do
+ let(:selector) { { matchLabels: {} } }
+
+ it { is_expected.to be true }
+ end
+
+ context 'with disabled_by label in matchLabels in selector' do
+ let(:selector) do
+ { matchLabels: { Gitlab::Kubernetes::NetworkPolicyCommon::DISABLED_BY_LABEL => 'gitlab' } }
+ end
+
+ it { is_expected.to be false }
+ end
+ end
+
+ describe 'enable' do
+ subject { policy.enabled? }
+
+ let(:selector) { nil }
+
+ before do
+ policy.enable
+ end
+
+ it { is_expected.to be true }
+
+ context 'with empty selector' do
+ let(:selector) { {} }
+
+ it { is_expected.to be true }
+ end
+
+ context 'with nil matchLabels in selector' do
+ let(:selector) { { matchLabels: nil } }
+
+ it { is_expected.to be true }
+ end
+
+ context 'with empty matchLabels in selector' do
+ let(:selector) { { matchLabels: {} } }
+
+ it { is_expected.to be true }
+ end
+
+ context 'with disabled_by label in matchLabels in selector' do
+ let(:selector) do
+ { matchLabels: { Gitlab::Kubernetes::NetworkPolicyCommon::DISABLED_BY_LABEL => 'gitlab' } }
+ end
+
+ it { is_expected.to be true }
+ end
+ end
+
+ describe 'disable' do
+ subject { policy.enabled? }
+
+ let(:selector) { nil }
+
+ before do
+ policy.disable
+ end
+
+ it { is_expected.to be false }
+
+ context 'with empty selector' do
+ let(:selector) { {} }
+
+ it { is_expected.to be false }
+ end
+
+ context 'with nil matchLabels in selector' do
+ let(:selector) { { matchLabels: nil } }
+
+ it { is_expected.to be false }
+ end
+
+ context 'with empty matchLabels in selector' do
+ let(:selector) { { matchLabels: {} } }
+
+ it { is_expected.to be false }
+ end
+
+ context 'with disabled_by label in matchLabels in selector' do
+ let(:selector) do
+ { matchLabels: { Gitlab::Kubernetes::NetworkPolicyCommon::DISABLED_BY_LABEL => 'gitlab' } }
+ end
+
+ it { is_expected.to be false }
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/template/template_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/template/template_shared_examples.rb
new file mode 100644
index 00000000000..6b6e25ca1dd
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/template/template_shared_examples.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples 'file template shared examples' do |filename, file_extension|
+ describe '.all' do
+ it "strips the #{file_extension} suffix" do
+ expect(subject.all.first.name).not_to end_with(file_extension)
+ end
+
+ it 'ensures that the template name is used exactly once' do
+ all = subject.all.group_by(&:name)
+ duplicates = all.select { |_, templates| templates.length > 1 }
+
+ expect(duplicates).to be_empty
+ end
+ end
+
+ describe '.by_category' do
+ it 'returns sorted results' do
+ result = described_class.by_category('General')
+
+ expect(result).to eq(result.sort)
+ end
+ end
+
+ describe '.find' do
+ it 'returns nil if the file does not exist' do
+ expect(subject.find('nonexistent-file')).to be nil
+ end
+
+ it 'returns the corresponding object of a valid file' do
+ template = subject.find(filename)
+
+ expect(template).to be_a described_class
+ expect(template.name).to eq(filename)
+ end
+ end
+
+ describe '#<=>' do
+ it 'sorts lexicographically' do
+ one = described_class.new("a.#{file_extension}")
+ other = described_class.new("z.#{file_extension}")
+
+ expect(one.<=>(other)).to be(-1)
+ expect([other, one].sort).to eq([one, other])
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/chat_service_shared_examples.rb b/spec/support/shared_examples/models/chat_service_shared_examples.rb
index 0a1c27b32db..ad237ad9f49 100644
--- a/spec/support/shared_examples/models/chat_service_shared_examples.rb
+++ b/spec/support/shared_examples/models/chat_service_shared_examples.rb
@@ -198,6 +198,7 @@ RSpec.shared_examples "chat service" do |service_name|
message: "user created page: Awesome wiki_page"
}
end
+
let(:wiki_page) { create(:wiki_page, wiki: project.wiki, **opts) }
let(:sample_data) { Gitlab::DataBuilder::WikiPage.build(wiki_page, user, "create") }
@@ -250,6 +251,7 @@ RSpec.shared_examples "chat service" do |service_name|
project: project, status: status,
sha: project.commit.sha, ref: project.default_branch)
end
+
let(:sample_data) { Gitlab::DataBuilder::Pipeline.build(pipeline) }
context "with failed pipeline" do
diff --git a/spec/support/shared_examples/models/cluster_application_helm_cert_shared_examples.rb b/spec/support/shared_examples/models/cluster_application_helm_cert_shared_examples.rb
index 239588d3b2f..394253fb699 100644
--- a/spec/support/shared_examples/models/cluster_application_helm_cert_shared_examples.rb
+++ b/spec/support/shared_examples/models/cluster_application_helm_cert_shared_examples.rb
@@ -28,46 +28,16 @@ RSpec.shared_examples 'cluster application helm specs' do |application_name|
describe '#files' do
subject { application.files }
- context 'managed_apps_local_tiller feature flag is disabled' do
- before do
- stub_feature_flags(managed_apps_local_tiller: false)
- end
-
- context 'when the helm application does not have a ca_cert' do
- before do
- application.cluster.application_helm.ca_cert = nil
- end
-
- it 'does not include cert files when there is no ca_cert entry' do
- expect(subject).not_to include(:'ca.pem', :'cert.pem', :'key.pem')
- end
- end
-
- it 'includes cert files when there is a ca_cert entry' do
- expect(subject).to include(:'ca.pem', :'cert.pem', :'key.pem')
- expect(subject[:'ca.pem']).to eq(application.cluster.application_helm.ca_cert)
-
- cert = OpenSSL::X509::Certificate.new(subject[:'cert.pem'])
- expect(cert.not_after).to be < 60.minutes.from_now
- end
+ it 'does not include cert files' do
+ expect(subject).not_to include(:'ca.pem', :'cert.pem', :'key.pem')
end
- context 'managed_apps_local_tiller feature flag is enabled' do
- before do
- stub_feature_flags(managed_apps_local_tiller: application.cluster.clusterable)
- end
+ context 'when cluster does not have helm installed' do
+ let(:application) { create(application_name, :no_helm_installed) }
it 'does not include cert files' do
expect(subject).not_to include(:'ca.pem', :'cert.pem', :'key.pem')
end
-
- context 'when cluster does not have helm installed' do
- let(:application) { create(application_name, :no_helm_installed) }
-
- it 'does not include cert files' do
- expect(subject).not_to include(:'ca.pem', :'cert.pem', :'key.pem')
- end
- end
end
end
end
diff --git a/spec/support/shared_examples/models/cluster_application_initial_status_shared_examples.rb b/spec/support/shared_examples/models/cluster_application_initial_status_shared_examples.rb
index 7f0c60d4204..55e458db512 100644
--- a/spec/support/shared_examples/models/cluster_application_initial_status_shared_examples.rb
+++ b/spec/support/shared_examples/models/cluster_application_initial_status_shared_examples.rb
@@ -6,46 +6,8 @@ RSpec.shared_examples 'cluster application initial status specs' do
subject { described_class.new(cluster: cluster) }
- context 'local tiller feature flag is disabled' do
- before do
- stub_feature_flags(managed_apps_local_tiller: false)
- end
-
- it 'sets a default status' do
- expect(subject.status_name).to be(:not_installable)
- end
- end
-
- context 'local tiller feature flag is enabled' do
- before do
- stub_feature_flags(managed_apps_local_tiller: cluster.clusterable)
- end
-
- it 'sets a default status' do
- expect(subject.status_name).to be(:installable)
- end
- end
-
- context 'when application helm is scheduled' do
- before do
- stub_feature_flags(managed_apps_local_tiller: false)
-
- create(:clusters_applications_helm, :scheduled, cluster: cluster)
- end
-
- it 'defaults to :not_installable' do
- expect(subject.status_name).to be(:not_installable)
- end
- end
-
- context 'when application helm is installed' do
- before do
- create(:clusters_applications_helm, :installed, cluster: cluster)
- end
-
- it 'sets a default status' do
- expect(subject.status_name).to be(:installable)
- end
+ it 'sets a default status' do
+ expect(subject.status_name).to be(:installable)
end
end
end
diff --git a/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb b/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb
index f80ca235220..7603787a54e 100644
--- a/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb
+++ b/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb
@@ -48,43 +48,21 @@ RSpec.shared_examples 'cluster application status specs' do |application_name|
expect(subject).to be_installed
end
- context 'managed_apps_local_tiller feature flag disabled' do
- before do
- stub_feature_flags(managed_apps_local_tiller: false)
- end
-
- it 'updates helm version' do
- subject.cluster.application_helm.update!(version: '1.2.3')
+ it 'does not update the helm version' do
+ subject.cluster.application_helm.update!(version: '1.2.3')
+ expect do
subject.make_installed!
subject.cluster.application_helm.reload
-
- expect(subject.cluster.application_helm.version).to eq(Gitlab::Kubernetes::Helm::HELM_VERSION)
- end
+ end.not_to change { subject.cluster.application_helm.version }
end
- context 'managed_apps_local_tiller feature flag enabled' do
- before do
- stub_feature_flags(managed_apps_local_tiller: subject.cluster.clusterable)
- end
-
- it 'does not update the helm version' do
- subject.cluster.application_helm.update!(version: '1.2.3')
-
- expect do
- subject.make_installed!
-
- subject.cluster.application_helm.reload
- end.not_to change { subject.cluster.application_helm.version }
- end
-
- context 'the cluster has no helm installed' do
- subject { create(application_name, :installing, :no_helm_installed) }
+ context 'the cluster has no helm installed' do
+ subject { create(application_name, :installing, :no_helm_installed) }
- it 'runs without errors' do
- expect { subject.make_installed! }.not_to raise_error
- end
+ it 'runs without errors' do
+ expect { subject.make_installed! }.not_to raise_error
end
end
@@ -97,43 +75,21 @@ RSpec.shared_examples 'cluster application status specs' do |application_name|
expect(subject).to be_updated
end
- context 'managed_apps_local_tiller feature flag disabled' do
- before do
- stub_feature_flags(managed_apps_local_tiller: false)
- end
-
- it 'updates helm version' do
- subject.cluster.application_helm.update!(version: '1.2.3')
+ it 'does not update the helm version' do
+ subject.cluster.application_helm.update!(version: '1.2.3')
+ expect do
subject.make_installed!
subject.cluster.application_helm.reload
-
- expect(subject.cluster.application_helm.version).to eq(Gitlab::Kubernetes::Helm::HELM_VERSION)
- end
+ end.not_to change { subject.cluster.application_helm.version }
end
- context 'managed_apps_local_tiller feature flag enabled' do
- before do
- stub_feature_flags(managed_apps_local_tiller: true)
- end
-
- it 'does not update the helm version' do
- subject.cluster.application_helm.update!(version: '1.2.3')
-
- expect do
- subject.make_installed!
-
- subject.cluster.application_helm.reload
- end.not_to change { subject.cluster.application_helm.version }
- end
-
- context 'the cluster has no helm installed' do
- subject { create(application_name, :updating, :no_helm_installed) }
+ context 'the cluster has no helm installed' do
+ subject { create(application_name, :updating, :no_helm_installed) }
- it 'runs without errors' do
- expect { subject.make_installed! }.not_to raise_error
- end
+ it 'runs without errors' do
+ expect { subject.make_installed! }.not_to raise_error
end
end
end
@@ -185,62 +141,26 @@ RSpec.shared_examples 'cluster application status specs' do |application_name|
expect(subject).to be_installed
end
- context 'local tiller flag enabled' do
- before do
- stub_feature_flags(managed_apps_local_tiller: true)
- end
-
- context 'helm record does not exist' do
- subject { build(application_name, :installing, :no_helm_installed) }
-
- it 'does not create a helm record' do
- subject.make_externally_installed!
-
- subject.cluster.reload
- expect(subject.cluster.application_helm).to be_nil
- end
- end
-
- context 'helm record exists' do
- subject { build(application_name, :installing, cluster: old_helm.cluster) }
+ context 'helm record does not exist' do
+ subject { build(application_name, :installing, :no_helm_installed) }
- it 'does not update helm version' do
- subject.make_externally_installed!
+ it 'does not create a helm record' do
+ subject.make_externally_installed!
- subject.cluster.application_helm.reload
-
- expect(subject.cluster.application_helm.version).to eq('1.2.3')
- end
+ subject.cluster.reload
+ expect(subject.cluster.application_helm).to be_nil
end
end
- context 'local tiller flag disabled' do
- before do
- stub_feature_flags(managed_apps_local_tiller: false)
- end
-
- context 'helm record does not exist' do
- subject { build(application_name, :installing, :no_helm_installed) }
-
- it 'creates a helm record' do
- subject.make_externally_installed!
-
- subject.cluster.reload
- expect(subject.cluster.application_helm).to be_present
- expect(subject.cluster.application_helm).to be_persisted
- end
- end
-
- context 'helm record exists' do
- subject { build(application_name, :installing, cluster: old_helm.cluster) }
+ context 'helm record exists' do
+ subject { build(application_name, :installing, cluster: old_helm.cluster) }
- it 'does not update helm version' do
- subject.make_externally_installed!
+ it 'does not update helm version' do
+ subject.make_externally_installed!
- subject.cluster.application_helm.reload
+ subject.cluster.application_helm.reload
- expect(subject.cluster.application_helm.version).to eq('1.2.3')
- end
+ expect(subject.cluster.application_helm.version).to eq('1.2.3')
end
end
@@ -262,6 +182,14 @@ RSpec.shared_examples 'cluster application status specs' do |application_name|
expect(subject).to be_installed
end
+
+ it 'clears #status_reason' do
+ expect(subject.status_reason).not_to be_nil
+
+ subject.make_externally_installed!
+
+ expect(subject.status_reason).to be_nil
+ end
end
end
@@ -292,6 +220,14 @@ RSpec.shared_examples 'cluster application status specs' do |application_name|
expect(subject).to be_uninstalled
end
+
+ it 'clears #status_reason' do
+ expect(subject.status_reason).not_to be_nil
+
+ subject.make_externally_uninstalled!
+
+ expect(subject.status_reason).to be_nil
+ end
end
end
diff --git a/spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb b/spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb
new file mode 100644
index 00000000000..99a09993900
--- /dev/null
+++ b/spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb
@@ -0,0 +1,176 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.shared_examples_for CounterAttribute do |counter_attributes|
+ it 'defines a Redis counter_key' do
+ expect(model.counter_key(:counter_name))
+ .to eq("project:{#{model.project_id}}:counters:CounterAttributeModel:#{model.id}:counter_name")
+ end
+
+ it 'defines a method to store counters' do
+ expect(model.class.counter_attributes.to_a).to eq(counter_attributes)
+ end
+
+ counter_attributes.each do |attribute|
+ describe attribute do
+ describe '#delayed_increment_counter', :redis do
+ let(:increment) { 10 }
+
+ subject { model.delayed_increment_counter(attribute, increment) }
+
+ context 'when attribute is a counter attribute' do
+ where(:increment) { [10, -3] }
+
+ with_them do
+ it 'increments the counter in Redis' do
+ subject
+
+ Gitlab::Redis::SharedState.with do |redis|
+ counter = redis.get(model.counter_key(attribute))
+ expect(counter).to eq(increment.to_s)
+ end
+ end
+
+ it 'does not increment the counter for the record' do
+ expect { subject }.not_to change { model.reset.read_attribute(attribute) }
+ end
+
+ it 'schedules a worker to flush counter increments asynchronously' do
+ expect(FlushCounterIncrementsWorker).to receive(:perform_in)
+ .with(CounterAttribute::WORKER_DELAY, model.class.name, model.id, attribute)
+ .and_call_original
+
+ subject
+ end
+ end
+
+ context 'when increment is 0' do
+ let(:increment) { 0 }
+
+ it 'does nothing' do
+ expect(FlushCounterIncrementsWorker).not_to receive(:perform_in)
+ expect(model).not_to receive(:update!)
+
+ subject
+ end
+ end
+ end
+
+ context 'when attribute is not a counter attribute' do
+ it 'delegates to ActiveRecord update!' do
+ expect { model.delayed_increment_counter(:unknown_attribute, 10) }
+ .to raise_error(ActiveModel::MissingAttributeError)
+ end
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(efficient_counter_attribute: false)
+ end
+
+ it 'delegates to ActiveRecord update!' do
+ expect { subject }
+ .to change { model.reset.read_attribute(attribute) }.by(increment)
+ end
+
+ it 'does not increment the counter in Redis' do
+ subject
+
+ Gitlab::Redis::SharedState.with do |redis|
+ counter = redis.get(model.counter_key(attribute))
+ expect(counter).to be_nil
+ end
+ end
+ end
+ end
+ end
+ end
+
+ describe '.flush_increments_to_database!', :redis do
+ let(:incremented_attribute) { counter_attributes.first }
+
+ subject { model.flush_increments_to_database!(incremented_attribute) }
+
+ it 'obtains an exclusive lease during processing' do
+ expect(model)
+ .to receive(:in_lock)
+ .with(model.counter_lock_key(incremented_attribute), ttl: described_class::WORKER_LOCK_TTL)
+ .and_call_original
+
+ subject
+ end
+
+ context 'when there is a counter to flush' do
+ before do
+ model.delayed_increment_counter(incremented_attribute, 10)
+ model.delayed_increment_counter(incremented_attribute, -3)
+ end
+
+ it 'updates the record' do
+ expect { subject }.to change { model.reset.read_attribute(incremented_attribute) }.by(7)
+ end
+
+ it 'removes the increment entry from Redis' do
+ Gitlab::Redis::SharedState.with do |redis|
+ key_exists = redis.exists(model.counter_key(incremented_attribute))
+ expect(key_exists).to be_truthy
+ end
+
+ subject
+
+ Gitlab::Redis::SharedState.with do |redis|
+ key_exists = redis.exists(model.counter_key(incremented_attribute))
+ expect(key_exists).to be_falsey
+ end
+ end
+ end
+
+ context 'when there are no counters to flush' do
+ context 'when there are no counters in the relative :flushed key' do
+ it 'does not change the record' do
+ expect { subject }.not_to change { model.reset.attributes }
+ end
+ end
+
+ # This can be the case where updating counters in the database fails with error
+ # and retrying the worker will retry flushing the counters but the main key has
+ # disappeared and the increment has been moved to the "<...>:flushed" key.
+ context 'when there are counters in the relative :flushed key' do
+ before do
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.incrby(model.counter_flushed_key(incremented_attribute), 10)
+ end
+ end
+
+ it 'updates the record' do
+ expect { subject }.to change { model.reset.read_attribute(incremented_attribute) }.by(10)
+ end
+
+ it 'deletes the relative :flushed key' do
+ subject
+
+ Gitlab::Redis::SharedState.with do |redis|
+ key_exists = redis.exists(model.counter_flushed_key(incremented_attribute))
+ expect(key_exists).to be_falsey
+ end
+ end
+ end
+ end
+
+ context 'when deleting :flushed key fails' do
+ before do
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.incrby(model.counter_flushed_key(incremented_attribute), 10)
+
+ expect(redis).to receive(:del).and_raise('could not delete key')
+ end
+ end
+
+ it 'does a rollback of the counter update' do
+ expect { subject }.to raise_error('could not delete key')
+
+ expect(model.reset.read_attribute(incremented_attribute)).to eq(0)
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/file_store_mounter_shared_examples.rb b/spec/support/shared_examples/models/concerns/file_store_mounter_shared_examples.rb
new file mode 100644
index 00000000000..4cb087c47ad
--- /dev/null
+++ b/spec/support/shared_examples/models/concerns/file_store_mounter_shared_examples.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'mounted file in local store' do
+ it 'is stored locally' do
+ expect(subject.file_store).to be(ObjectStorage::Store::LOCAL)
+ expect(subject.file).to be_file_storage
+ expect(subject.file.object_store).to eq(ObjectStorage::Store::LOCAL)
+ end
+end
+
+RSpec.shared_examples 'mounted file in object store' do
+ it 'is stored remotely' do
+ expect(subject.file_store).to eq(ObjectStorage::Store::REMOTE)
+ expect(subject.file).not_to be_file_storage
+ expect(subject.file.object_store).to eq(ObjectStorage::Store::REMOTE)
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb b/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb
index 32d502af5a2..15ca1f56bd0 100644
--- a/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/timebox_shared_examples.rb
@@ -3,7 +3,8 @@
RSpec.shared_examples 'a timebox' do |timebox_type|
let(:project) { create(:project, :public) }
let(:group) { create(:group) }
- let(:timebox) { create(timebox_type, project: project) }
+ let(:timebox_args) { [] }
+ let(:timebox) { create(timebox_type, *timebox_args, project: project) }
let(:issue) { create(:issue, project: project) }
let(:user) { create(:user) }
let(:timebox_table_name) { timebox_type.to_s.pluralize.to_sym }
@@ -12,7 +13,7 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
context 'with a project' do
it_behaves_like 'AtomicInternalId' do
let(:internal_id_attribute) { :iid }
- let(:instance) { build(timebox_type, project: build(:project), group: nil) }
+ let(:instance) { build(timebox_type, *timebox_args, project: build(:project), group: nil) }
let(:scope) { :project }
let(:scope_attrs) { { project: instance.project } }
let(:usage) { timebox_table_name }
@@ -22,7 +23,7 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
context 'with a group' do
it_behaves_like 'AtomicInternalId' do
let(:internal_id_attribute) { :iid }
- let(:instance) { build(timebox_type, project: nil, group: build(:group)) }
+ let(:instance) { build(timebox_type, *timebox_args, project: nil, group: build(:group)) }
let(:scope) { :group }
let(:scope_attrs) { { namespace: instance.group } }
let(:usage) { timebox_table_name }
@@ -37,14 +38,14 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
describe 'start_date' do
it 'adds an error when start_date is greater then due_date' do
- timebox = build(timebox_type, start_date: Date.tomorrow, due_date: Date.yesterday)
+ timebox = build(timebox_type, *timebox_args, start_date: Date.tomorrow, due_date: Date.yesterday)
expect(timebox).not_to be_valid
expect(timebox.errors[:due_date]).to include("must be greater than start date")
end
it 'adds an error when start_date is greater than 9999-12-31' do
- timebox = build(timebox_type, start_date: Date.new(10000, 1, 1))
+ timebox = build(timebox_type, *timebox_args, start_date: Date.new(10000, 1, 1))
expect(timebox).not_to be_valid
expect(timebox.errors[:start_date]).to include("date must not be after 9999-12-31")
@@ -53,7 +54,7 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
describe 'due_date' do
it 'adds an error when due_date is greater than 9999-12-31' do
- timebox = build(timebox_type, due_date: Date.new(10000, 1, 1))
+ timebox = build(timebox_type, *timebox_args, due_date: Date.new(10000, 1, 1))
expect(timebox).not_to be_valid
expect(timebox.errors[:due_date]).to include("date must not be after 9999-12-31")
@@ -64,7 +65,7 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
it { is_expected.to validate_presence_of(:title) }
it 'is invalid if title would be empty after sanitation' do
- timebox = build(timebox_type, project: project, title: '<img src=x onerror=prompt(1)>')
+ timebox = build(timebox_type, *timebox_args, project: project, title: '<img src=x onerror=prompt(1)>')
expect(timebox).not_to be_valid
expect(timebox.errors[:title]).to include("can't be blank")
@@ -73,7 +74,7 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
describe '#timebox_type_check' do
it 'is invalid if it has both project_id and group_id' do
- timebox = build(timebox_type, group: group)
+ timebox = build(timebox_type, *timebox_args, group: group)
timebox.project = project
expect(timebox).not_to be_valid
@@ -98,7 +99,7 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
end
context "per group" do
- let(:timebox) { create(timebox_type, group: group) }
+ let(:timebox) { create(timebox_type, *timebox_args, group: group) }
before do
project.update(group: group)
@@ -111,7 +112,7 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
end
it "does not accept the same title of a child project timebox" do
- create(timebox_type, project: group.projects.first)
+ create(timebox_type, *timebox_args, project: group.projects.first)
new_timebox = described_class.new(group: group, title: timebox.title)
@@ -143,7 +144,7 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
end
context 'when project_id is not present' do
- let(:timebox) { build(timebox_type, group: group) }
+ let(:timebox) { build(timebox_type, *timebox_args, group: group) }
it 'returns false' do
expect(timebox.project_timebox?).to be_falsey
@@ -153,7 +154,7 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
describe '#group_timebox?' do
context 'when group_id is present' do
- let(:timebox) { build(timebox_type, group: group) }
+ let(:timebox) { build(timebox_type, *timebox_args, group: group) }
it 'returns true' do
expect(timebox.group_timebox?).to be_truthy
@@ -168,7 +169,7 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
end
describe '#safe_title' do
- let(:timebox) { create(timebox_type, title: "<b>foo & bar -> 2.2</b>") }
+ let(:timebox) { create(timebox_type, *timebox_args, title: "<b>foo & bar -> 2.2</b>") }
it 'normalizes the title for use as a slug' do
expect(timebox.safe_title).to eq('foo-bar-22')
@@ -177,7 +178,7 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
describe '#resource_parent' do
context 'when group is present' do
- let(:timebox) { build(timebox_type, group: group) }
+ let(:timebox) { build(timebox_type, *timebox_args, group: group) }
it 'returns the group' do
expect(timebox.resource_parent).to eq(group)
@@ -192,7 +193,7 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
end
describe "#title" do
- let(:timebox) { create(timebox_type, title: "<b>foo & bar -> 2.2</b>") }
+ let(:timebox) { create(timebox_type, *timebox_args, title: "<b>foo & bar -> 2.2</b>") }
it "sanitizes title" do
expect(timebox.title).to eq("foo & bar -> 2.2")
@@ -203,28 +204,28 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
context "per project" do
it "is true for projects with MRs enabled" do
project = create(:project, :merge_requests_enabled)
- timebox = create(timebox_type, project: project)
+ timebox = create(timebox_type, *timebox_args, project: project)
expect(timebox.merge_requests_enabled?).to be_truthy
end
it "is false for projects with MRs disabled" do
project = create(:project, :repository_enabled, :merge_requests_disabled)
- timebox = create(timebox_type, project: project)
+ timebox = create(timebox_type, *timebox_args, project: project)
expect(timebox.merge_requests_enabled?).to be_falsey
end
it "is false for projects with repository disabled" do
project = create(:project, :repository_disabled)
- timebox = create(timebox_type, project: project)
+ timebox = create(timebox_type, *timebox_args, project: project)
expect(timebox.merge_requests_enabled?).to be_falsey
end
end
context "per group" do
- let(:timebox) { create(timebox_type, group: group) }
+ let(:timebox) { create(timebox_type, *timebox_args, group: group) }
it "is always true for groups, for performance reasons" do
expect(timebox.merge_requests_enabled?).to be_truthy
@@ -234,7 +235,7 @@ RSpec.shared_examples 'a timebox' do |timebox_type|
describe '#to_ability_name' do
it 'returns timebox' do
- timebox = build(timebox_type)
+ timebox = build(timebox_type, *timebox_args)
expect(timebox.to_ability_name).to eq(timebox_type.to_s)
end
diff --git a/spec/support/shared_examples/models/issuable_hook_data_shared_examples.rb b/spec/support/shared_examples/models/issuable_hook_data_shared_examples.rb
index 21ab9b06c33..13ffc1b7f87 100644
--- a/spec/support/shared_examples/models/issuable_hook_data_shared_examples.rb
+++ b/spec/support/shared_examples/models/issuable_hook_data_shared_examples.rb
@@ -38,6 +38,7 @@ RSpec.shared_examples 'issuable hook data' do |kind|
title_html: %w[foo bar]
}
end
+
let(:data) { builder.build(user: user, changes: changes) }
it 'populates the :changes hash' do
diff --git a/spec/support/shared_examples/models/relative_positioning_shared_examples.rb b/spec/support/shared_examples/models/relative_positioning_shared_examples.rb
index 99e62ebf422..e4668926d74 100644
--- a/spec/support/shared_examples/models/relative_positioning_shared_examples.rb
+++ b/spec/support/shared_examples/models/relative_positioning_shared_examples.rb
@@ -1,11 +1,11 @@
# frozen_string_literal: true
RSpec.shared_examples 'a class that supports relative positioning' do
- let(:item1) { create(factory, default_params) }
- let(:item2) { create(factory, default_params) }
- let(:new_item) { create(factory, default_params) }
+ let(:item1) { create_item }
+ let(:item2) { create_item }
+ let(:new_item) { create_item }
- def create_item(params)
+ def create_item(params = {})
create(factory, params.merge(default_params))
end
@@ -16,31 +16,119 @@ RSpec.shared_examples 'a class that supports relative positioning' do
end
describe '.move_nulls_to_end' do
+ let(:item3) { create_item }
+
it 'moves items with null relative_position to the end' do
+ item1.update!(relative_position: 1000)
+ item2.update!(relative_position: nil)
+ item3.update!(relative_position: nil)
+
+ items = [item1, item2, item3]
+ expect(described_class.move_nulls_to_end(items)).to be(2)
+
+ expect(items.sort_by(&:relative_position)).to eq(items)
+ expect(item1.relative_position).to be(1000)
+ expect(item1.prev_relative_position).to be_nil
+ expect(item1.next_relative_position).to eq(item2.relative_position)
+ expect(item2.next_relative_position).to eq(item3.relative_position)
+ expect(item3.next_relative_position).to be_nil
+ end
+
+ it 'preserves relative position' do
item1.update!(relative_position: nil)
item2.update!(relative_position: nil)
described_class.move_nulls_to_end([item1, item2])
- expect(item2.prev_relative_position).to eq item1.relative_position
- expect(item1.prev_relative_position).to eq nil
- expect(item2.next_relative_position).to eq nil
+ expect(item1.relative_position).to be < item2.relative_position
end
it 'moves the item near the start position when there are no existing positions' do
item1.update!(relative_position: nil)
described_class.move_nulls_to_end([item1])
-
- expect(item1.relative_position).to eq(described_class::START_POSITION + described_class::IDEAL_DISTANCE)
+ expect(item1.reset.relative_position).to eq(described_class::START_POSITION + described_class::IDEAL_DISTANCE)
end
it 'does not perform any moves if all items have their relative_position set' do
item1.update!(relative_position: 1)
- expect(item1).not_to receive(:save)
+ expect(described_class.move_nulls_to_start([item1])).to be(0)
+ expect(item1.reload.relative_position).to be(1)
+ end
+
+ it 'manages to move nulls to the end even if there is a sequence at the end' do
+ bunch = create_items_with_positions(run_at_end)
+ item1.update!(relative_position: nil)
described_class.move_nulls_to_end([item1])
+
+ items = [*bunch, item1]
+ items.each(&:reset)
+
+ expect(items.map(&:relative_position)).to all(be_valid_position)
+ expect(items.sort_by(&:relative_position)).to eq(items)
+ end
+
+ it 'does not have an N+1 issue' do
+ create_items_with_positions(10..12)
+
+ a, b, c, d, e, f = create_items_with_positions([nil, nil, nil, nil, nil, nil])
+
+ baseline = ActiveRecord::QueryRecorder.new do
+ described_class.move_nulls_to_end([a, e])
+ end
+
+ expect { described_class.move_nulls_to_end([b, c, d]) }
+ .not_to exceed_query_limit(baseline)
+
+ expect { described_class.move_nulls_to_end([f]) }
+ .not_to exceed_query_limit(baseline.count)
+ end
+ end
+
+ describe '.move_nulls_to_start' do
+ let(:item3) { create_item }
+
+ it 'moves items with null relative_position to the start' do
+ item1.update!(relative_position: nil)
+ item2.update!(relative_position: nil)
+ item3.update!(relative_position: 1000)
+
+ items = [item1, item2, item3]
+ expect(described_class.move_nulls_to_start(items)).to be(2)
+ items.map(&:reload)
+
+ expect(items.sort_by(&:relative_position)).to eq(items)
+ expect(item1.prev_relative_position).to eq nil
+ expect(item1.next_relative_position).to eq item2.relative_position
+ expect(item2.next_relative_position).to eq item3.relative_position
+ expect(item3.next_relative_position).to eq nil
+ expect(item3.relative_position).to be(1000)
+ end
+
+ it 'moves the item near the start position when there are no existing positions' do
+ item1.update!(relative_position: nil)
+
+ described_class.move_nulls_to_start([item1])
+
+ expect(item1.relative_position).to eq(described_class::START_POSITION - described_class::IDEAL_DISTANCE)
+ end
+
+ it 'preserves relative position' do
+ item1.update!(relative_position: nil)
+ item2.update!(relative_position: nil)
+
+ described_class.move_nulls_to_start([item1, item2])
+
+ expect(item1.relative_position).to be < item2.relative_position
+ end
+
+ it 'does not perform any moves if all items have their relative_position set' do
+ item1.update!(relative_position: 1)
+
+ expect(described_class.move_nulls_to_start([item1])).to be(0)
+ expect(item1.reload.relative_position).to be(1)
end
end
@@ -52,8 +140,8 @@ RSpec.shared_examples 'a class that supports relative positioning' do
describe '#prev_relative_position' do
it 'returns previous position if there is an item above' do
- item1.update(relative_position: 5)
- item2.update(relative_position: 15)
+ item1.update!(relative_position: 5)
+ item2.update!(relative_position: 15)
expect(item2.prev_relative_position).to eq item1.relative_position
end
@@ -65,8 +153,8 @@ RSpec.shared_examples 'a class that supports relative positioning' do
describe '#next_relative_position' do
it 'returns next position if there is an item below' do
- item1.update(relative_position: 5)
- item2.update(relative_position: 15)
+ item1.update!(relative_position: 5)
+ item2.update!(relative_position: 15)
expect(item1.next_relative_position).to eq item2.relative_position
end
@@ -76,9 +164,172 @@ RSpec.shared_examples 'a class that supports relative positioning' do
end
end
+ describe '#find_next_gap_before' do
+ context 'there is no gap' do
+ let(:items) { create_items_with_positions(run_at_start) }
+
+ it 'returns nil' do
+ items.each do |item|
+ expect(item.send(:find_next_gap_before)).to be_nil
+ end
+ end
+ end
+
+ context 'there is a sequence ending at MAX_POSITION' do
+ let(:items) { create_items_with_positions(run_at_end) }
+
+ let(:gaps) do
+ items.map { |item| item.send(:find_next_gap_before) }
+ end
+
+ it 'can find the gap at the start for any item in the sequence' do
+ gap = { start: items.first.relative_position, end: RelativePositioning::MIN_POSITION }
+
+ expect(gaps).to all(eq(gap))
+ end
+
+ it 'respects lower bounds' do
+ gap = { start: items.first.relative_position, end: 10 }
+ new_item.update!(relative_position: 10)
+
+ expect(gaps).to all(eq(gap))
+ end
+ end
+
+ specify do
+ item1.update!(relative_position: 5)
+
+ (0..10).each do |pos|
+ item2.update!(relative_position: pos)
+
+ gap = item2.send(:find_next_gap_before)
+
+ expect(gap[:start]).to be <= item2.relative_position
+ expect((gap[:end] - gap[:start]).abs).to be >= RelativePositioning::MIN_GAP
+ expect(gap[:start]).to be_valid_position
+ expect(gap[:end]).to be_valid_position
+ end
+ end
+
+ it 'deals with there not being any items to the left' do
+ create_items_with_positions([1, 2, 3])
+ new_item.update!(relative_position: 0)
+
+ expect(new_item.send(:find_next_gap_before)).to eq(start: 0, end: RelativePositioning::MIN_POSITION)
+ end
+
+ it 'finds the next gap to the left, skipping adjacent values' do
+ create_items_with_positions([1, 9, 10])
+ new_item.update!(relative_position: 11)
+
+ expect(new_item.send(:find_next_gap_before)).to eq(start: 9, end: 1)
+ end
+
+ it 'finds the next gap to the left' do
+ create_items_with_positions([2, 10])
+
+ new_item.update!(relative_position: 15)
+ expect(new_item.send(:find_next_gap_before)).to eq(start: 15, end: 10)
+
+ new_item.update!(relative_position: 11)
+ expect(new_item.send(:find_next_gap_before)).to eq(start: 10, end: 2)
+
+ new_item.update!(relative_position: 9)
+ expect(new_item.send(:find_next_gap_before)).to eq(start: 9, end: 2)
+
+ new_item.update!(relative_position: 5)
+ expect(new_item.send(:find_next_gap_before)).to eq(start: 5, end: 2)
+ end
+ end
+
+ describe '#find_next_gap_after' do
+ context 'there is no gap' do
+ let(:items) { create_items_with_positions(run_at_end) }
+
+ it 'returns nil' do
+ items.each do |item|
+ expect(item.send(:find_next_gap_after)).to be_nil
+ end
+ end
+ end
+
+ context 'there is a sequence starting at MIN_POSITION' do
+ let(:items) { create_items_with_positions(run_at_start) }
+
+ let(:gaps) do
+ items.map { |item| item.send(:find_next_gap_after) }
+ end
+
+ it 'can find the gap at the end for any item in the sequence' do
+ gap = { start: items.last.relative_position, end: RelativePositioning::MAX_POSITION }
+
+ expect(gaps).to all(eq(gap))
+ end
+
+ it 'respects upper bounds' do
+ gap = { start: items.last.relative_position, end: 10 }
+ new_item.update!(relative_position: 10)
+
+ expect(gaps).to all(eq(gap))
+ end
+ end
+
+ specify do
+ item1.update!(relative_position: 5)
+
+ (0..10).each do |pos|
+ item2.update!(relative_position: pos)
+
+ gap = item2.send(:find_next_gap_after)
+
+ expect(gap[:start]).to be >= item2.relative_position
+ expect((gap[:end] - gap[:start]).abs).to be >= RelativePositioning::MIN_GAP
+ expect(gap[:start]).to be_valid_position
+ expect(gap[:end]).to be_valid_position
+ end
+ end
+
+ it 'deals with there not being any items to the right' do
+ create_items_with_positions([1, 2, 3])
+ new_item.update!(relative_position: 5)
+
+ expect(new_item.send(:find_next_gap_after)).to eq(start: 5, end: RelativePositioning::MAX_POSITION)
+ end
+
+ it 'finds the next gap to the right, skipping adjacent values' do
+ create_items_with_positions([1, 2, 10])
+ new_item.update!(relative_position: 0)
+
+ expect(new_item.send(:find_next_gap_after)).to eq(start: 2, end: 10)
+ end
+
+ it 'finds the next gap to the right' do
+ create_items_with_positions([2, 10])
+
+ new_item.update!(relative_position: 0)
+ expect(new_item.send(:find_next_gap_after)).to eq(start: 0, end: 2)
+
+ new_item.update!(relative_position: 1)
+ expect(new_item.send(:find_next_gap_after)).to eq(start: 2, end: 10)
+
+ new_item.update!(relative_position: 3)
+ expect(new_item.send(:find_next_gap_after)).to eq(start: 3, end: 10)
+
+ new_item.update!(relative_position: 5)
+ expect(new_item.send(:find_next_gap_after)).to eq(start: 5, end: 10)
+ end
+ end
+
describe '#move_before' do
+ let(:item3) { create(factory, default_params) }
+
it 'moves item before' do
- [item2, item1].each(&:move_to_end)
+ [item2, item1].each do |item|
+ item.move_to_end
+ item.save!
+ end
+
+ expect(item1.relative_position).to be > item2.relative_position
item1.move_before(item2)
@@ -86,12 +337,10 @@ RSpec.shared_examples 'a class that supports relative positioning' do
end
context 'when there is no space' do
- let(:item3) { create(factory, default_params) }
-
before do
- item1.update(relative_position: 1000)
- item2.update(relative_position: 1001)
- item3.update(relative_position: 1002)
+ item1.update!(relative_position: 1000)
+ item2.update!(relative_position: 1001)
+ item3.update!(relative_position: 1002)
end
it 'moves items correctly' do
@@ -100,6 +349,73 @@ RSpec.shared_examples 'a class that supports relative positioning' do
expect(item3.relative_position).to be_between(item1.reload.relative_position, item2.reload.relative_position).exclusive
end
end
+
+ it 'can move the item before an item at the start' do
+ item1.update!(relative_position: RelativePositioning::START_POSITION)
+
+ new_item.move_before(item1)
+
+ expect(new_item.relative_position).to be_valid_position
+ expect(new_item.relative_position).to be < item1.reload.relative_position
+ end
+
+ it 'can move the item before an item at MIN_POSITION' do
+ item1.update!(relative_position: RelativePositioning::MIN_POSITION)
+
+ new_item.move_before(item1)
+
+ expect(new_item.relative_position).to be >= RelativePositioning::MIN_POSITION
+ expect(new_item.relative_position).to be < item1.reload.relative_position
+ end
+
+ it 'can move the item before an item bunched up at MIN_POSITION' do
+ item1, item2, item3 = create_items_with_positions(run_at_start)
+
+ new_item.move_before(item3)
+ new_item.save!
+
+ items = [item1, item2, new_item, item3]
+
+ items.each do |item|
+ expect(item.reset.relative_position).to be_valid_position
+ end
+
+ expect(items.sort_by(&:relative_position)).to eq(items)
+ end
+
+ context 'leap-frogging to the left' do
+ before do
+ start = RelativePositioning::START_POSITION
+ item1.update!(relative_position: start - RelativePositioning::IDEAL_DISTANCE * 0)
+ item2.update!(relative_position: start - RelativePositioning::IDEAL_DISTANCE * 1)
+ item3.update!(relative_position: start - RelativePositioning::IDEAL_DISTANCE * 2)
+ end
+
+ let(:item3) { create(factory, default_params) }
+
+ def leap_frog(steps)
+ a = item1
+ b = item2
+
+ steps.times do |i|
+ a.move_before(b)
+ a.save!
+ a, b = b, a
+ end
+ end
+
+ it 'can leap-frog STEPS - 1 times before needing to rebalance' do
+ # This is less efficient than going right, due to the flooring of
+ # integer division
+ expect { leap_frog(RelativePositioning::STEPS - 1) }
+ .not_to change { item3.reload.relative_position }
+ end
+
+ it 'rebalances after leap-frogging STEPS times' do
+ expect { leap_frog(RelativePositioning::STEPS) }
+ .to change { item3.reload.relative_position }
+ end
+ end
end
describe '#move_after' do
@@ -115,9 +431,17 @@ RSpec.shared_examples 'a class that supports relative positioning' do
let(:item3) { create(factory, default_params) }
before do
- item1.update(relative_position: 1000)
- item2.update(relative_position: 1001)
- item3.update(relative_position: 1002)
+ item1.update!(relative_position: 1000)
+ item2.update!(relative_position: 1001)
+ item3.update!(relative_position: 1002)
+ end
+
+ it 'can move the item after an item at MAX_POSITION' do
+ item1.update!(relative_position: RelativePositioning::MAX_POSITION)
+
+ new_item.move_after(item1)
+ expect(new_item.relative_position).to be_valid_position
+ expect(new_item.relative_position).to be > item1.reset.relative_position
end
it 'moves items correctly' do
@@ -126,12 +450,96 @@ RSpec.shared_examples 'a class that supports relative positioning' do
expect(item1.relative_position).to be_between(item2.reload.relative_position, item3.reload.relative_position).exclusive
end
end
+
+ it 'can move the item after an item bunched up at MAX_POSITION' do
+ item1, item2, item3 = create_items_with_positions(run_at_end)
+
+ new_item.move_after(item1)
+ new_item.save!
+
+ items = [item1, new_item, item2, item3]
+
+ items.each do |item|
+ expect(item.reset.relative_position).to be_valid_position
+ end
+
+ expect(items.sort_by(&:relative_position)).to eq(items)
+ end
+
+ context 'leap-frogging' do
+ before do
+ start = RelativePositioning::START_POSITION
+ item1.update!(relative_position: start + RelativePositioning::IDEAL_DISTANCE * 0)
+ item2.update!(relative_position: start + RelativePositioning::IDEAL_DISTANCE * 1)
+ item3.update!(relative_position: start + RelativePositioning::IDEAL_DISTANCE * 2)
+ end
+
+ let(:item3) { create(factory, default_params) }
+
+ def leap_frog(steps)
+ a = item1
+ b = item2
+
+ steps.times do |i|
+ a.move_after(b)
+ a.save!
+ a, b = b, a
+ end
+ end
+
+ it 'can leap-frog STEPS times before needing to rebalance' do
+ expect { leap_frog(RelativePositioning::STEPS) }
+ .not_to change { item3.reload.relative_position }
+ end
+
+ it 'rebalances after leap-frogging STEPS+1 times' do
+ expect { leap_frog(RelativePositioning::STEPS + 1) }
+ .to change { item3.reload.relative_position }
+ end
+ end
+ end
+
+ describe '#move_to_start' do
+ before do
+ [item1, item2].each do |item1|
+ item1.move_to_start && item1.save!
+ end
+ end
+
+ it 'moves item to the end' do
+ new_item.move_to_start
+
+ expect(new_item.relative_position).to be < item2.relative_position
+ end
+
+ it 'rebalances when there is already an item at the MIN_POSITION' do
+ item2.update!(relative_position: RelativePositioning::MIN_POSITION)
+
+ new_item.move_to_start
+ item2.reset
+
+ expect(new_item.relative_position).to be < item2.relative_position
+ expect(new_item.relative_position).to be >= RelativePositioning::MIN_POSITION
+ end
+
+ it 'deals with a run of elements at the start' do
+ item1.update!(relative_position: RelativePositioning::MIN_POSITION + 1)
+ item2.update!(relative_position: RelativePositioning::MIN_POSITION)
+
+ new_item.move_to_start
+ item1.reset
+ item2.reset
+
+ expect(item2.relative_position).to be < item1.relative_position
+ expect(new_item.relative_position).to be < item2.relative_position
+ expect(new_item.relative_position).to be >= RelativePositioning::MIN_POSITION
+ end
end
describe '#move_to_end' do
before do
[item1, item2].each do |item1|
- item1.move_to_end && item1.save
+ item1.move_to_end && item1.save!
end
end
@@ -140,12 +548,44 @@ RSpec.shared_examples 'a class that supports relative positioning' do
expect(new_item.relative_position).to be > item2.relative_position
end
+
+ it 'rebalances when there is already an item at the MAX_POSITION' do
+ item2.update!(relative_position: RelativePositioning::MAX_POSITION)
+
+ new_item.move_to_end
+ item2.reset
+
+ expect(new_item.relative_position).to be > item2.relative_position
+ expect(new_item.relative_position).to be <= RelativePositioning::MAX_POSITION
+ end
+
+ it 'deals with a run of elements at the end' do
+ item1.update!(relative_position: RelativePositioning::MAX_POSITION - 1)
+ item2.update!(relative_position: RelativePositioning::MAX_POSITION)
+
+ new_item.move_to_end
+ item1.reset
+ item2.reset
+
+ expect(item2.relative_position).to be > item1.relative_position
+ expect(new_item.relative_position).to be > item2.relative_position
+ expect(new_item.relative_position).to be <= RelativePositioning::MAX_POSITION
+ end
end
describe '#move_between' do
before do
- [item1, item2].each do |item1|
- item1.move_to_end && item1.save
+ [item1, item2].each do |item|
+ item.move_to_end && item.save!
+ end
+ end
+
+ shared_examples 'moves item between' do
+ it 'moves the middle item to between left and right' do
+ expect do
+ middle.move_between(left, right)
+ middle.save!
+ end.to change { between_exclusive?(left, middle, right) }.from(false).to(true)
end
end
@@ -169,26 +609,26 @@ RSpec.shared_examples 'a class that supports relative positioning' do
end
it 'positions items even when after and before positions are the same' do
- item2.update relative_position: item1.relative_position
+ item2.update! relative_position: item1.relative_position
new_item.move_between(item1, item2)
+ [item1, item2].each(&:reset)
expect(new_item.relative_position).to be > item1.relative_position
expect(item1.relative_position).to be < item2.relative_position
end
- it 'positions items between other two if distance is 1' do
- item2.update relative_position: item1.relative_position + 1
-
- new_item.move_between(item1, item2)
+ context 'the two items are next to each other' do
+ let(:left) { item1 }
+ let(:middle) { new_item }
+ let(:right) { create_item(relative_position: item1.relative_position + 1) }
- expect(new_item.relative_position).to be > item1.relative_position
- expect(item1.relative_position).to be < item2.relative_position
+ it_behaves_like 'moves item between'
end
it 'positions item in the middle of other two if distance is big enough' do
- item1.update relative_position: 6000
- item2.update relative_position: 10000
+ item1.update! relative_position: 6000
+ item2.update! relative_position: 10000
new_item.move_between(item1, item2)
@@ -196,7 +636,8 @@ RSpec.shared_examples 'a class that supports relative positioning' do
end
it 'positions item closer to the middle if we are at the very top' do
- item2.update relative_position: 6000
+ item1.update!(relative_position: 6001)
+ item2.update!(relative_position: 6000)
new_item.move_between(nil, item2)
@@ -204,51 +645,53 @@ RSpec.shared_examples 'a class that supports relative positioning' do
end
it 'positions item closer to the middle if we are at the very bottom' do
- new_item.update relative_position: 1
- item1.update relative_position: 6000
- item2.destroy
+ new_item.update!(relative_position: 1)
+ item1.update!(relative_position: 6000)
+ item2.update!(relative_position: 5999)
new_item.move_between(item1, nil)
expect(new_item.relative_position).to eq(6000 + RelativePositioning::IDEAL_DISTANCE)
end
- it 'positions item in the middle of other two if distance is not big enough' do
- item1.update relative_position: 100
- item2.update relative_position: 400
+ it 'positions item in the middle of other two' do
+ item1.update! relative_position: 100
+ item2.update! relative_position: 400
new_item.move_between(item1, item2)
expect(new_item.relative_position).to eq(250)
end
- it 'positions item in the middle of other two is there is no place' do
- item1.update relative_position: 100
- item2.update relative_position: 101
+ context 'there is no space' do
+ let(:middle) { new_item }
+ let(:left) { create_item(relative_position: 100) }
+ let(:right) { create_item(relative_position: 101) }
- new_item.move_between(item1, item2)
-
- expect(new_item.relative_position).to be_between(item1.relative_position, item2.relative_position).exclusive
+ it_behaves_like 'moves item between'
end
- it 'uses rebalancing if there is no place' do
- item1.update relative_position: 100
- item2.update relative_position: 101
- item3 = create_item(relative_position: 102)
- new_item.update relative_position: 103
+ context 'there is a bunch of items' do
+ let(:items) { create_items_with_positions(100..104) }
+ let(:left) { items[1] }
+ let(:middle) { items[3] }
+ let(:right) { items[2] }
- new_item.move_between(item2, item3)
- new_item.save!
+ it_behaves_like 'moves item between'
+
+ it 'handles bunches correctly' do
+ middle.move_between(left, right)
+ middle.save!
- expect(new_item.relative_position).to be_between(item2.relative_position, item3.relative_position).exclusive
- expect(item1.reload.relative_position).not_to eq(100)
+ expect(items.first.reset.relative_position).to be < middle.relative_position
+ end
end
- it 'positions item right if we pass none-sequential parameters' do
- item1.update relative_position: 99
- item2.update relative_position: 101
+ it 'positions item right if we pass non-sequential parameters' do
+ item1.update! relative_position: 99
+ item2.update! relative_position: 101
item3 = create_item(relative_position: 102)
- new_item.update relative_position: 103
+ new_item.update! relative_position: 103
new_item.move_between(item1, item3)
new_item.save!
@@ -280,6 +723,12 @@ RSpec.shared_examples 'a class that supports relative positioning' do
expect(positions).to eq([90, 95, 96, 102])
end
+ it 'raises an error if there is no space' do
+ items = create_items_with_positions(run_at_start)
+
+ expect { items.last.move_sequence_before }.to raise_error(RelativePositioning::NoSpaceLeft)
+ end
+
it 'finds a gap if there are unused positions' do
items = create_items_with_positions([100, 101, 102])
@@ -287,7 +736,8 @@ RSpec.shared_examples 'a class that supports relative positioning' do
items.last.save!
positions = items.map { |item| item.reload.relative_position }
- expect(positions).to eq([50, 51, 102])
+
+ expect(positions.last - positions.second).to be > RelativePositioning::MIN_GAP
end
end
@@ -309,7 +759,33 @@ RSpec.shared_examples 'a class that supports relative positioning' do
items.first.save!
positions = items.map { |item| item.reload.relative_position }
- expect(positions).to eq([100, 601, 602])
+ expect(positions.second - positions.first).to be > RelativePositioning::MIN_GAP
end
+
+ it 'raises an error if there is no space' do
+ items = create_items_with_positions(run_at_end)
+
+ expect { items.first.move_sequence_after }.to raise_error(RelativePositioning::NoSpaceLeft)
+ end
+ end
+
+ def be_valid_position
+ be_between(RelativePositioning::MIN_POSITION, RelativePositioning::MAX_POSITION)
+ end
+
+ def between_exclusive?(left, middle, right)
+ a, b, c = [left, middle, right].map { |item| item.reset.relative_position }
+ return false if a.nil? || b.nil?
+ return a < b if c.nil?
+
+ a < b && b < c
+ end
+
+ def run_at_end(size = 3)
+ (RelativePositioning::MAX_POSITION - size)..RelativePositioning::MAX_POSITION
+ end
+
+ def run_at_start(size = 3)
+ (RelativePositioning::MIN_POSITION..).take(size)
end
end
diff --git a/spec/support/shared_examples/resource_events.rb b/spec/support/shared_examples/models/resource_event_shared_examples.rb
index c0158f9b24b..c0158f9b24b 100644
--- a/spec/support/shared_examples/resource_events.rb
+++ b/spec/support/shared_examples/models/resource_event_shared_examples.rb
diff --git a/spec/support/shared_examples/models/resource_timebox_event_shared_examples.rb b/spec/support/shared_examples/models/resource_timebox_event_shared_examples.rb
new file mode 100644
index 00000000000..07552b62cdd
--- /dev/null
+++ b/spec/support/shared_examples/models/resource_timebox_event_shared_examples.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples 'timebox resource event validations' do
+ describe 'validations' do
+ context 'when issue and merge_request are both nil' do
+ subject { build(described_class.name.underscore.to_sym, issue: nil, merge_request: nil) }
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when issue and merge_request are both set' do
+ subject { build(described_class.name.underscore.to_sym, issue: build(:issue), merge_request: build(:merge_request)) }
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when issue is set' do
+ subject { create(described_class.name.underscore.to_sym, issue: create(:issue), merge_request: nil) }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when merge_request is set' do
+ subject { create(described_class.name.underscore.to_sym, issue: nil, merge_request: create(:merge_request)) }
+
+ it { is_expected.to be_valid }
+ end
+ end
+end
+
+RSpec.shared_examples 'timebox resource event states' do
+ describe 'states' do
+ [Issue, MergeRequest].each do |klass|
+ klass.available_states.each do |state|
+ it "supports state #{state.first} for #{klass.name.underscore}" do
+ model = create(klass.name.underscore, state: state[0])
+ key = model.class.name.underscore
+ event = build(described_class.name.underscore.to_sym, key => model, state: model.state)
+
+ expect(event.state).to eq(state[0])
+ end
+ end
+ end
+ end
+end
+
+RSpec.shared_examples 'queryable timebox action resource event' do |expected_results_for_actions|
+ [Issue, MergeRequest].each do |klass|
+ expected_results_for_actions.each do |action, expected_result|
+ it "is #{expected_result} for action #{action} on #{klass.name.underscore}" do
+ model = build(klass.name.underscore)
+ key = model.class.name.underscore
+ event = build(described_class.name.underscore.to_sym, key => model, action: action)
+
+ expect(event.send(query_method)).to eq(expected_result)
+ end
+ end
+ end
+end
+
+RSpec.shared_examples 'timebox resource event actions' do
+ describe '#added?' do
+ it_behaves_like 'queryable timebox action resource event', { add: true, remove: false } do
+ let(:query_method) { :add? }
+ end
+ end
+
+ describe '#removed?' do
+ it_behaves_like 'queryable timebox action resource event', { add: false, remove: true } do
+ let(:query_method) { :remove? }
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb b/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
index 7d70df82ec7..7f0da19996e 100644
--- a/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
+++ b/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
@@ -17,11 +17,14 @@ RSpec.shared_examples 'UpdateProjectStatistics' do
context 'when creating' do
it 'updates the project statistics' do
- delta = read_attribute
+ delta0 = reload_stat
- expect { subject.save! }
- .to change { reload_stat }
- .by(delta)
+ subject.save!
+
+ delta1 = reload_stat
+
+ expect(delta1).to eq(delta0 + read_attribute)
+ expect(delta1).to be > delta0
end
it 'schedules a namespace statistics worker' do
@@ -80,15 +83,14 @@ RSpec.shared_examples 'UpdateProjectStatistics' do
end
it 'updates the project statistics' do
- delta = -read_attribute
+ delta0 = reload_stat
- expect(ProjectStatistics)
- .to receive(:increment_statistic)
- .and_call_original
+ subject.destroy!
- expect { subject.destroy! }
- .to change { reload_stat }
- .by(delta)
+ delta1 = reload_stat
+
+ expect(delta1).to eq(delta0 - read_attribute)
+ expect(delta1).to be < delta0
end
it 'schedules a namespace statistics worker' do
diff --git a/spec/support/shared_examples/path_extraction_shared_examples.rb b/spec/support/shared_examples/path_extraction_shared_examples.rb
index 19c6f2404e5..ff55bc9a490 100644
--- a/spec/support/shared_examples/path_extraction_shared_examples.rb
+++ b/spec/support/shared_examples/path_extraction_shared_examples.rb
@@ -88,9 +88,16 @@ RSpec.shared_examples 'extracts refs' do
expect(extract_ref('stable')).to eq(['stable', ''])
end
- it 'extracts the longest matching ref' do
- expect(extract_ref('release/app/v1.0.0/README.md')).to eq(
- ['release/app/v1.0.0', 'README.md'])
+ it 'does not fetch ref names when there is no slash' do
+ expect(self).not_to receive(:ref_names)
+
+ extract_ref('master')
+ end
+
+ it 'fetches ref names when there is a slash' do
+ expect(self).to receive(:ref_names).and_call_original
+
+ extract_ref('release/app/v1.0.0')
end
end
@@ -113,6 +120,61 @@ RSpec.shared_examples 'extracts refs' do
it 'falls back to a primitive split for an invalid ref' do
expect(extract_ref('stable/CHANGELOG')).to eq(%w(stable CHANGELOG))
end
+
+ it 'extracts the longest matching ref' do
+ expect(extract_ref('release/app/v1.0.0/README.md')).to eq(
+ ['release/app/v1.0.0', 'README.md'])
+ end
+
+ context 'when the repository does not have ambiguous refs' do
+ before do
+ allow(container.repository).to receive(:has_ambiguous_refs?).and_return(false)
+ end
+
+ it 'does not fetch all ref names when the first path component is a ref' do
+ expect(self).not_to receive(:ref_names)
+ expect(container.repository).to receive(:branch_names_include?).with('v1.0.0').and_return(false)
+ expect(container.repository).to receive(:tag_names_include?).with('v1.0.0').and_return(true)
+
+ expect(extract_ref('v1.0.0/doc/README.md')).to eq(['v1.0.0', 'doc/README.md'])
+ end
+
+ it 'fetches all ref names when the first path component is not a ref' do
+ expect(self).to receive(:ref_names).and_call_original
+ expect(container.repository).to receive(:branch_names_include?).with('release').and_return(false)
+ expect(container.repository).to receive(:tag_names_include?).with('release').and_return(false)
+
+ expect(extract_ref('release/app/doc/README.md')).to eq(['release/app', 'doc/README.md'])
+ end
+
+ context 'when the extracts_path_optimization feature flag is disabled' do
+ before do
+ stub_feature_flags(extracts_path_optimization: false)
+ end
+
+ it 'always fetches all ref names' do
+ expect(self).to receive(:ref_names).and_call_original
+ expect(container.repository).not_to receive(:branch_names_include?)
+ expect(container.repository).not_to receive(:tag_names_include?)
+
+ expect(extract_ref('v1.0.0/doc/README.md')).to eq(['v1.0.0', 'doc/README.md'])
+ end
+ end
+ end
+
+ context 'when the repository has ambiguous refs' do
+ before do
+ allow(container.repository).to receive(:has_ambiguous_refs?).and_return(true)
+ end
+
+ it 'always fetches all ref names' do
+ expect(self).to receive(:ref_names).and_call_original
+ expect(container.repository).not_to receive(:branch_names_include?)
+ expect(container.repository).not_to receive(:tag_names_include?)
+
+ expect(extract_ref('v1.0.0/doc/README.md')).to eq(['v1.0.0', 'doc/README.md'])
+ end
+ end
end
end
end
diff --git a/spec/support/shared_examples/policies/project_policy_shared_examples.rb b/spec/support/shared_examples/policies/project_policy_shared_examples.rb
index df8e4bc96dd..d8476f5dcc2 100644
--- a/spec/support/shared_examples/policies/project_policy_shared_examples.rb
+++ b/spec/support/shared_examples/policies/project_policy_shared_examples.rb
@@ -2,24 +2,13 @@
RSpec.shared_examples 'archived project policies' do
let(:feature_write_abilities) do
- described_class::READONLY_FEATURES_WHEN_ARCHIVED.flat_map do |feature|
+ described_class.readonly_features.flat_map do |feature|
described_class.create_update_admin_destroy(feature)
end + additional_maintainer_permissions
end
let(:other_write_abilities) do
- %i[
- create_merge_request_in
- create_merge_request_from
- push_to_delete_protected_branch
- push_code
- request_access
- upload_file
- resolve_note
- award_emoji
- admin_tag
- admin_issue_link
- ]
+ described_class.readonly_abilities
end
context 'when the project is archived' do
diff --git a/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb
index 5257980d7df..09743c20fba 100644
--- a/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/composer_packages_shared_examples.rb
@@ -7,13 +7,17 @@ RSpec.shared_context 'Composer user type' do |user_type, add_member|
end
end
-RSpec.shared_examples 'Composer package index' do |user_type, status, add_member = true|
+RSpec.shared_examples 'Composer package index' do |user_type, status, add_member, include_package|
include_context 'Composer user type', user_type, add_member do
+ let(:expected_packages) { include_package == :include_package ? [package] : [] }
+ let(:presenter) { ::Packages::Composer::PackagesPresenter.new(group, expected_packages ) }
+
it 'returns the package index' do
subject
expect(response).to have_gitlab_http_status(status)
expect(response).to match_response_schema('public_api/v4/packages/composer/index')
+ expect(json_response).to eq presenter.root
end
end
end
@@ -68,7 +72,7 @@ RSpec.shared_examples 'Composer package creation' do |user_type, status, add_mem
expect(response).to have_gitlab_http_status(status)
end
- it_behaves_like 'a gitlab tracking event', described_class.name, 'register_package'
+ it_behaves_like 'a gitlab tracking event', described_class.name, 'push_package'
end
end
@@ -85,7 +89,7 @@ end
RSpec.shared_context 'Composer auth headers' do |user_role, user_token|
let(:token) { user_token ? personal_access_token.token : 'wrong' }
- let(:headers) { user_role == :anonymous ? {} : build_basic_auth_header(user.username, token) }
+ let(:headers) { user_role == :anonymous ? {} : basic_auth_header(user.username, token) }
end
RSpec.shared_context 'Composer api project access' do |project_visibility_level, user_role, user_token|
@@ -114,7 +118,7 @@ RSpec.shared_examples 'rejects Composer access with unknown group id' do
end
context 'as authenticated user' do
- subject { get api(url), headers: build_basic_auth_header(user.username, personal_access_token.token) }
+ subject { get api(url), headers: basic_auth_header(user.username, personal_access_token.token) }
it_behaves_like 'process Composer api request', :anonymous, :not_found
end
@@ -130,7 +134,7 @@ RSpec.shared_examples 'rejects Composer access with unknown project id' do
end
context 'as authenticated user' do
- subject { get api(url), headers: build_basic_auth_header(user.username, personal_access_token.token) }
+ subject { get api(url), headers: basic_auth_header(user.username, personal_access_token.token) }
it_behaves_like 'process Composer api request', :anonymous, :not_found
end
diff --git a/spec/support/shared_examples/requests/api/graphql/mutations/subscription_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/mutations/subscription_shared_examples.rb
new file mode 100644
index 00000000000..40b88ef370f
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/graphql/mutations/subscription_shared_examples.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples 'a subscribable resource api' do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let(:project) { resource.project }
+ let(:input) { { subscribed_state: true } }
+ let(:resource_ref) { resource.class.name.camelize(:lower) }
+
+ let(:mutation) do
+ variables = {
+ project_path: project.full_path,
+ iid: resource.iid.to_s
+ }
+
+ graphql_mutation(
+ mutation_name,
+ variables.merge(input),
+ <<-QL.strip_heredoc
+ clientMutationId
+ errors
+ #{resource_ref} {
+ id
+ subscribed
+ }
+ QL
+ )
+ end
+
+ def mutation_response
+ graphql_mutation_response(mutation_name)[resource_ref]['subscribed']
+ end
+
+ context 'when the user is not authorized' do
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: ["The resource that you are attempting to access "\
+ "does not exist or you don't have permission to "\
+ "perform this action"]
+ end
+
+ context 'when user is authorized' do
+ before do
+ project.add_developer(current_user)
+ end
+
+ it 'marks the resource as subscribed' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response).to eq(true)
+ end
+
+ context 'when passing subscribe false as input' do
+ let(:input) { { subscribed_state: false } }
+
+ it 'unmarks the resource as subscribed' do
+ resource.subscribe(current_user, project)
+
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response).to eq(false)
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/milestones_shared_examples.rb b/spec/support/shared_examples/requests/api/milestones_shared_examples.rb
index 77b49b7caef..249a7b7cdac 100644
--- a/spec/support/shared_examples/requests/api/milestones_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/milestones_shared_examples.rb
@@ -266,6 +266,7 @@ RSpec.shared_examples 'group and project milestones' do |route_definition|
let!(:milestone) do
context_group ? create(:milestone, group: context_group) : create(:milestone, project: public_project)
end
+
let!(:issue) { create(:issue, project: public_project) }
let!(:confidential_issue) { create(:issue, confidential: true, project: public_project) }
let!(:issues_route) do
diff --git a/spec/support/shared_examples/requests/api/notes_shared_examples.rb b/spec/support/shared_examples/requests/api/notes_shared_examples.rb
index a34c48a5ba4..7066f803f9d 100644
--- a/spec/support/shared_examples/requests/api/notes_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/notes_shared_examples.rb
@@ -158,9 +158,11 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
end
it "creates an activity event when a note is created", :sidekiq_might_not_need_inline do
- expect(Event).to receive(:create!)
+ uri = "/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes"
- post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: { body: 'hi!' }
+ expect do
+ post api(uri, user), params: { body: 'hi!' }
+ end.to change(Event, :count).by(1)
end
context 'setting created_at' do
@@ -275,12 +277,53 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
end
describe "PUT /#{parent_type}/:id/#{noteable_type}/:noteable_id/notes/:note_id" do
- it 'returns modified note' do
- put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
- "notes/#{note.id}", user), params: { body: 'Hello!' }
+ let(:params) { { body: 'Hello!', confidential: false } }
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['body']).to eq('Hello!')
+ subject do
+ put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes/#{note.id}", user), params: params
+ end
+
+ context 'when eveything is ok' do
+ before do
+ note.update!(confidential: true)
+ end
+
+ context 'with multiple params present' do
+ before do
+ subject
+ end
+
+ it 'returns modified note' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['body']).to eq('Hello!')
+ expect(json_response['confidential']).to be_falsey
+ end
+
+ it 'updates the note' do
+ expect(note.reload.note).to eq('Hello!')
+ expect(note.confidential).to be_falsey
+ end
+ end
+
+ context 'when only body param is present' do
+ let(:params) { { body: 'Hello!' } }
+
+ it 'updates only the note text' do
+ expect { subject }.not_to change { note.reload.confidential }
+
+ expect(note.note).to eq('Hello!')
+ end
+ end
+
+ context 'when only confidential param is present' do
+ let(:params) { { confidential: false } }
+
+ it 'updates only the note text' do
+ expect { subject }.not_to change { note.reload.note }
+
+ expect(note.confidential).to be_falsey
+ end
+ end
end
it 'returns a 404 error when note id not found' do
@@ -290,9 +333,9 @@ RSpec.shared_examples 'noteable API' do |parent_type, noteable_type, id_name|
expect(response).to have_gitlab_http_status(:not_found)
end
- it 'returns a 400 bad request error if body not given' do
+ it 'returns a 400 bad request error if body is empty' do
put api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/"\
- "notes/#{note.id}", user)
+ "notes/#{note.id}", user), params: { body: '' }
expect(response).to have_gitlab_http_status(:bad_request)
end
diff --git a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
index 8d8483cae72..fcdc594f258 100644
--- a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
@@ -122,7 +122,7 @@ RSpec.shared_examples 'process nuget workhorse authorization' do |user_type, sta
context 'with a request that bypassed gitlab-workhorse' do
let(:headers) do
- build_basic_auth_header(user.username, personal_access_token.token)
+ basic_auth_header(user.username, personal_access_token.token)
.merge(workhorse_header)
.tap { |h| h.delete(Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER) }
end
@@ -180,6 +180,7 @@ RSpec.shared_examples 'process nuget upload' do |user_type, status, add_member =
body: 'content'
)
end
+
let(:fog_file) { fog_to_uploaded_file(tmp_object) }
let(:params) { { package: fog_file, 'package.remote_id' => file_name } }
@@ -400,7 +401,7 @@ RSpec.shared_examples 'rejects nuget access with unknown project id' do
end
context 'as authenticated user' do
- subject { get api(url), headers: build_basic_auth_header(user.username, personal_access_token.token) }
+ subject { get api(url), headers: basic_auth_header(user.username, personal_access_token.token) }
it_behaves_like 'rejects nuget packages access', :anonymous, :not_found
end
diff --git a/spec/support/shared_examples/requests/api/packages_shared_examples.rb b/spec/support/shared_examples/requests/api/packages_shared_examples.rb
index ec15d7a4d2e..6f4a0236b66 100644
--- a/spec/support/shared_examples/requests/api/packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/packages_shared_examples.rb
@@ -2,7 +2,7 @@
RSpec.shared_examples 'deploy token for package GET requests' do
context 'with deploy token headers' do
- let(:headers) { build_basic_auth_header(deploy_token.username, deploy_token.token) }
+ let(:headers) { basic_auth_header(deploy_token.username, deploy_token.token) }
subject { get api(url), headers: headers }
@@ -15,7 +15,7 @@ RSpec.shared_examples 'deploy token for package GET requests' do
end
context 'invalid token' do
- let(:headers) { build_basic_auth_header(deploy_token.username, 'bar') }
+ let(:headers) { basic_auth_header(deploy_token.username, 'bar') }
it_behaves_like 'returning response status', :unauthorized
end
@@ -24,7 +24,7 @@ end
RSpec.shared_examples 'deploy token for package uploads' do
context 'with deploy token headers' do
- let(:headers) { build_basic_auth_header(deploy_token.username, deploy_token.token).merge(workhorse_header) }
+ let(:headers) { basic_auth_header(deploy_token.username, deploy_token.token).merge(workhorse_header) }
before do
project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
@@ -35,7 +35,7 @@ RSpec.shared_examples 'deploy token for package uploads' do
end
context 'invalid token' do
- let(:headers) { build_basic_auth_header(deploy_token.username, 'bar').merge(workhorse_header) }
+ let(:headers) { basic_auth_header(deploy_token.username, 'bar').merge(workhorse_header) }
it_behaves_like 'returning response status', :unauthorized
end
diff --git a/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
index fcc166ac87d..4954151b93b 100644
--- a/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/pypi_packages_shared_examples.rb
@@ -24,6 +24,20 @@ RSpec.shared_examples 'PyPi package creation' do |user_type, status, add_member
it_behaves_like 'creating pypi package files'
+ context 'with a pre-existing file' do
+ it 'rejects the duplicated file' do
+ existing_package = create(:pypi_package, name: base_params[:name], version: base_params[:version], project: project)
+ create(:package_file, :pypi, package: existing_package, file_name: params[:content].original_filename)
+
+ expect { subject }
+ .to change { project.packages.pypi.count }.by(0)
+ .and change { Packages::PackageFile.count }.by(0)
+ .and change { Packages::Pypi::Metadatum.count }.by(0)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
context 'with object storage disabled' do
before do
stub_package_file_object_storage(enabled: false)
@@ -49,6 +63,7 @@ RSpec.shared_examples 'PyPi package creation' do |user_type, status, add_member
body: 'content'
)
end
+
let(:fog_file) { fog_to_uploaded_file(tmp_object) }
let(:params) { base_params.merge(content: fog_file, 'content.remote_id' => file_name) }
@@ -144,7 +159,7 @@ RSpec.shared_examples 'rejects PyPI access with unknown project id' do
end
context 'as authenticated user' do
- subject { get api(url), headers: build_basic_auth_header(user.username, personal_access_token.token) }
+ subject { get api(url), headers: basic_auth_header(user.username, personal_access_token.token) }
it_behaves_like 'process PyPi api request', :anonymous, :not_found
end
diff --git a/spec/support/shared_examples/requests/snippet_shared_examples.rb b/spec/support/shared_examples/requests/snippet_shared_examples.rb
index 644abb191a6..a17163328f4 100644
--- a/spec/support/shared_examples/requests/snippet_shared_examples.rb
+++ b/spec/support/shared_examples/requests/snippet_shared_examples.rb
@@ -106,3 +106,80 @@ RSpec.shared_examples 'snippet_multiple_files feature disabled' do
expect(json_response).not_to have_key('files')
end
end
+
+RSpec.shared_examples 'snippet creation with files parameter' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:path, :content, :status, :error) do
+ '.gitattributes' | 'file content' | :created | nil
+ 'valid/path/file.rb' | 'file content' | :created | nil
+
+ '.gitattributes' | nil | :bad_request | 'files[0][content] is empty'
+ '.gitattributes' | '' | :bad_request | 'files[0][content] is empty'
+
+ '' | 'file content' | :bad_request | 'files[0][file_path] is empty'
+ nil | 'file content' | :bad_request | 'files[0][file_path] should be a valid file path, files[0][file_path] is empty'
+ '../../etc/passwd' | 'file content' | :bad_request | 'files[0][file_path] should be a valid file path'
+ end
+
+ with_them do
+ let(:file_path) { path }
+ let(:file_content) { content }
+
+ before do
+ subject
+ end
+
+ it 'responds correctly' do
+ expect(response).to have_gitlab_http_status(status)
+ expect(json_response['error']).to eq(error)
+ end
+ end
+
+ it 'returns 400 if both files and content are provided' do
+ params[:file_name] = 'foo.rb'
+ params[:content] = 'bar'
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq 'files, content are mutually exclusive'
+ end
+
+ it 'returns 400 when neither files or content are provided' do
+ params.delete(:files)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq 'files, content are missing, exactly one parameter must be provided'
+ end
+end
+
+RSpec.shared_examples 'snippet creation without files parameter' do
+ let(:file_params) { { file_name: 'testing.rb', content: 'snippet content' } }
+
+ it 'allows file_name and content parameters' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:created)
+ end
+
+ it 'returns 400 if file_name and content are not both provided' do
+ params.delete(:file_name)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq 'file_name is missing'
+ end
+
+ it 'returns 400 if content is blank' do
+ params[:content] = ''
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq 'content is empty'
+ end
+end
diff --git a/spec/support/shared_examples/services/boards/issues_list_service_shared_examples.rb b/spec/support/shared_examples/services/boards/issues_list_service_shared_examples.rb
index 756c4136059..06e2b715e6d 100644
--- a/spec/support/shared_examples/services/boards/issues_list_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/boards/issues_list_service_shared_examples.rb
@@ -19,6 +19,15 @@ RSpec.shared_examples 'issues list service' do
end
end
+ it 'avoids N+1' do
+ params = { board_id: board.id }
+ control = ActiveRecord::QueryRecorder.new { described_class.new(parent, user, params).execute }
+
+ create(:list, board: board)
+
+ expect { described_class.new(parent, user, params).execute }.not_to exceed_query_limit(control)
+ end
+
context 'issues are ordered by priority' do
it 'returns opened issues when list_id is missing' do
params = { board_id: board.id }
@@ -71,4 +80,17 @@ RSpec.shared_examples 'issues list service' do
expect { service.execute }.to raise_error(ActiveRecord::RecordNotFound)
end
end
+
+ context 'when :all_lists is used' do
+ it 'returns issues from all lists' do
+ params = { board_id: board.id, all_lists: true }
+
+ issues = described_class.new(parent, user, params).execute
+
+ expected = [opened_issue2, reopened_issue1, opened_issue1, list1_issue1,
+ list1_issue2, list1_issue3, list2_issue1, closed_issue1,
+ closed_issue2, closed_issue3, closed_issue4, closed_issue5]
+ expect(issues).to match_array(expected)
+ end
+ end
end
diff --git a/spec/support/shared_examples/services/boards/lists_list_service_shared_examples.rb b/spec/support/shared_examples/services/boards/lists_list_service_shared_examples.rb
index 07a6353296d..41fd286682e 100644
--- a/spec/support/shared_examples/services/boards/lists_list_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/boards/lists_list_service_shared_examples.rb
@@ -26,4 +26,22 @@ RSpec.shared_examples 'lists list service' do
expect(service.execute(board)).to eq [board.backlog_list, list, board.closed_list]
end
end
+
+ context 'when wanting a specific list' do
+ let!(:list1) { create(:list, board: board) }
+
+ it 'returns list specified by id' do
+ service = described_class.new(parent, user, list_id: list1.id)
+
+ expect(service.execute(board, create_default_lists: false)).to eq [list1]
+ end
+
+ it 'returns empty result when list is not found' do
+ external_board = create(:board, resource_parent: create(:project))
+ external_list = create(:list, board: external_board)
+ service = described_class.new(parent, user, list_id: external_list.id)
+
+ expect(service.execute(board, create_default_lists: false)).to eq(List.none)
+ end
+ end
end
diff --git a/spec/support/shared_examples/services/jira_import/user_mapper_services_shared_examples.rb b/spec/support/shared_examples/services/jira_import/user_mapper_services_shared_examples.rb
new file mode 100644
index 00000000000..7fc7ff8a8de
--- /dev/null
+++ b/spec/support/shared_examples/services/jira_import/user_mapper_services_shared_examples.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'mapping jira users' do
+ let(:client) { double }
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:jira_service) { create(:jira_service, project: project, active: true) }
+
+ before do
+ allow(subject).to receive(:client).and_return(client)
+ allow(client).to receive(:get).with(url).and_return(jira_users)
+ end
+
+ subject { described_class.new(jira_service, start_at) }
+
+ context 'jira_users is nil' do
+ let(:jira_users) { nil }
+
+ it 'returns an empty array' do
+ expect(subject.execute).to be_empty
+ end
+ end
+
+ context 'when jira_users is present' do
+ # TODO: now we only create an array in a proper format
+ # mapping is tracked in https://gitlab.com/gitlab-org/gitlab/-/issues/219023
+ let(:mapped_users) do
+ [
+ { jira_account_id: 'abcd', jira_display_name: 'user1', jira_email: nil, gitlab_id: nil, gitlab_username: nil, gitlab_name: nil },
+ { jira_account_id: 'efg', jira_display_name: nil, jira_email: nil, gitlab_id: nil, gitlab_username: nil, gitlab_name: nil },
+ { jira_account_id: 'hij', jira_display_name: 'user3', jira_email: 'user3@example.com', gitlab_id: nil, gitlab_username: nil, gitlab_name: nil }
+ ]
+ end
+
+ it 'returns users mapped to Gitlab' do
+ expect(subject.execute).to eq(mapped_users)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb b/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb
index c8fabfe30b9..1501a2a0f52 100644
--- a/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb
+++ b/spec/support/shared_examples/services/metrics/dashboard_shared_examples.rb
@@ -62,7 +62,7 @@ end
RSpec.shared_examples 'dashboard_version contains SHA256 hash of dashboard file content' do
specify do
dashboard = File.read(Rails.root.join(dashboard_path))
- expect(Digest::SHA256.hexdigest(dashboard)).to eq(dashboard_version)
+ expect(dashboard_version).to eq(Digest::SHA256.hexdigest(dashboard))
end
end
@@ -78,6 +78,12 @@ RSpec.shared_examples 'raises error for users with insufficient permissions' do
it_behaves_like 'misconfigured dashboard service response', :unauthorized
end
+
+ context 'when the user is anonymous' do
+ let(:user) { nil }
+
+ it_behaves_like 'misconfigured dashboard service response', :unauthorized
+ end
end
RSpec.shared_examples 'valid dashboard cloning process' do |dashboard_template, sequence|
diff --git a/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb b/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
index 2ddbdebdb97..f201c7b1780 100644
--- a/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
@@ -2,9 +2,11 @@
RSpec.shared_examples 'moves repository to another storage' do |repository_type|
let(:project_repository_double) { double(:repository) }
+ let(:original_project_repository_double) { double(:repository) }
let!(:project_repository_checksum) { project.repository.checksum }
let(:repository_double) { double(:repository) }
+ let(:original_repository_double) { double(:repository) }
let(:repository_checksum) { repository.checksum }
before do
@@ -14,10 +16,16 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
allow(Gitlab::Git::Repository).to receive(:new)
.with('test_second_storage', project.repository.raw.relative_path, project.repository.gl_repository, project.repository.full_path)
.and_return(project_repository_double)
+ allow(Gitlab::Git::Repository).to receive(:new)
+ .with('default', project.repository.raw.relative_path, nil, nil)
+ .and_return(original_project_repository_double)
allow(Gitlab::Git::Repository).to receive(:new)
.with('test_second_storage', repository.raw.relative_path, repository.gl_repository, repository.full_path)
.and_return(repository_double)
+ allow(Gitlab::Git::Repository).to receive(:new)
+ .with('default', repository.raw.relative_path, nil, nil)
+ .and_return(original_repository_double)
end
context 'when the move succeeds', :clean_gitlab_redis_shared_state do
@@ -35,8 +43,8 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
allow(repository_double).to receive(:checksum)
.and_return(repository_checksum)
- expect(GitlabShellWorker).to receive(:perform_async).with(:mv_repository, 'default', anything, anything)
- .twice.and_call_original
+ expect(original_project_repository_double).to receive(:remove)
+ expect(original_repository_double).to receive(:remove)
end
it "moves the project and its #{repository_type} repository to the new storage and unmarks the repository as read only" do
@@ -110,13 +118,36 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
.with(repository.raw)
.and_raise(Gitlab::Git::CommandError)
- expect(GitlabShellWorker).not_to receive(:perform_async)
-
result = subject.execute
expect(result).to be_error
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('default')
+ expect(repository_storage_move).to be_failed
+ end
+ end
+
+ context "when the cleanup of the #{repository_type} repository fails" do
+ it 'sets the correct state' do
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('test_second_storage').and_return(SecureRandom.uuid)
+ allow(project_repository_double).to receive(:replicate)
+ .with(project.repository.raw)
+ allow(project_repository_double).to receive(:checksum)
+ .and_return(project_repository_checksum)
+ allow(original_project_repository_double).to receive(:remove)
+ allow(repository_double).to receive(:replicate)
+ .with(repository.raw)
+ allow(repository_double).to receive(:checksum)
+ .and_return(repository_checksum)
+
+ expect(original_repository_double).to receive(:remove)
+ .and_raise(Gitlab::Git::CommandError)
+
+ result = subject.execute
+
+ expect(result).to be_error
+ expect(repository_storage_move).to be_cleanup_failed
end
end
@@ -134,8 +165,6 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
allow(repository_double).to receive(:checksum)
.and_return('not matching checksum')
- expect(GitlabShellWorker).not_to receive(:perform_async)
-
result = subject.execute
expect(result).to be_error
diff --git a/spec/support/shared_examples/services/resource_events/change_milestone_service_shared_examples.rb b/spec/support/shared_examples/services/resource_events/change_milestone_service_shared_examples.rb
index ef41c2fcc13..d70ed707822 100644
--- a/spec/support/shared_examples/services/resource_events/change_milestone_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/resource_events/change_milestone_service_shared_examples.rb
@@ -1,49 +1,63 @@
# frozen_string_literal: true
-RSpec.shared_examples 'a milestone events creator' do
+RSpec.shared_examples 'timebox(milestone or iteration) resource events creator' do |timebox_event_class|
let_it_be(:user) { create(:user) }
- let(:created_at_time) { Time.utc(2019, 12, 30) }
- let(:service) { described_class.new(resource, user, created_at: created_at_time, old_milestone: nil) }
-
- context 'when milestone is present' do
- let_it_be(:milestone) { create(:milestone) }
+ context 'when milestone/iteration is added' do
+ let(:service) { described_class.new(resource, user, add_timebox_args) }
before do
- resource.milestone = milestone
+ set_timebox(timebox_event_class, timebox)
end
it 'creates the expected event record' do
- expect { service.execute }.to change { ResourceMilestoneEvent.count }.by(1)
+ expect { service.execute }.to change { timebox_event_class.count }.by(1)
- expect_event_record(ResourceMilestoneEvent.last, action: 'add', milestone: milestone, state: 'opened')
+ expect_event_record(timebox_event_class, timebox_event_class.last, action: 'add', state: 'opened', timebox: timebox)
end
end
- context 'when milestones is not present' do
+ context 'when milestone/iteration is removed' do
+ let(:service) { described_class.new(resource, user, remove_timebox_args) }
+
before do
- resource.milestone = nil
+ set_timebox(timebox_event_class, nil)
end
- let(:old_milestone) { create(:milestone, project: resource.project) }
- let(:service) { described_class.new(resource, user, created_at: created_at_time, old_milestone: old_milestone) }
-
it 'creates the expected event records' do
- expect { service.execute }.to change { ResourceMilestoneEvent.count }.by(1)
+ expect { service.execute }.to change { timebox_event_class.count }.by(1)
- expect_event_record(ResourceMilestoneEvent.last, action: 'remove', milestone: old_milestone, state: 'opened')
+ expect_event_record(timebox_event_class, timebox_event_class.last, action: 'remove', timebox: timebox, state: 'opened')
end
end
- def expect_event_record(event, expected_attrs)
+ def expect_event_record(timebox_event_class, event, expected_attrs)
expect(event.action).to eq(expected_attrs[:action])
- expect(event.state).to eq(expected_attrs[:state])
expect(event.user).to eq(user)
expect(event.issue).to eq(resource) if resource.is_a?(Issue)
expect(event.issue).to be_nil unless resource.is_a?(Issue)
expect(event.merge_request).to eq(resource) if resource.is_a?(MergeRequest)
expect(event.merge_request).to be_nil unless resource.is_a?(MergeRequest)
- expect(event.milestone).to eq(expected_attrs[:milestone])
expect(event.created_at).to eq(created_at_time)
+ expect_timebox(timebox_event_class, event, expected_attrs)
+ end
+
+ def set_timebox(timebox_event_class, timebox)
+ case timebox_event_class.name
+ when 'ResourceMilestoneEvent'
+ resource.milestone = timebox
+ when 'ResourceIterationEvent'
+ resource.iteration = timebox
+ end
+ end
+
+ def expect_timebox(timebox_event_class, event, expected_attrs)
+ case timebox_event_class.name
+ when 'ResourceMilestoneEvent'
+ expect(event.state).to eq(expected_attrs[:state])
+ expect(event.milestone).to eq(expected_attrs[:timebox])
+ when 'ResourceIterationEvent'
+ expect(event.iteration).to eq(expected_attrs[:timebox])
+ end
end
end
diff --git a/spec/support/shared_examples/services/wiki_pages/create_service_shared_examples.rb b/spec/support/shared_examples/services/wiki_pages/create_service_shared_examples.rb
index ebe78c299a5..980a752cf86 100644
--- a/spec/support/shared_examples/services/wiki_pages/create_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/wiki_pages/create_service_shared_examples.rb
@@ -16,8 +16,10 @@ RSpec.shared_examples 'WikiPages::CreateService#execute' do |container_type|
subject(:service) { described_class.new(container: container, current_user: user, params: opts) }
it 'creates wiki page with valid attributes' do
- page = service.execute
+ response = service.execute
+ page = response.payload[:page]
+ expect(response).to be_success
expect(page).to be_valid
expect(page).to be_persisted
expect(page.title).to eq(opts[:title])
@@ -77,7 +79,12 @@ RSpec.shared_examples 'WikiPages::CreateService#execute' do |container_type|
end
it 'reports the error' do
- expect(service.execute).to be_invalid
+ response = service.execute
+ page = response.payload[:page]
+
+ expect(response).to be_error
+
+ expect(page).to be_invalid
.and have_attributes(errors: be_present)
end
end
diff --git a/spec/support/shared_examples/services/wikis/create_attachment_service_shared_examples.rb b/spec/support/shared_examples/services/wikis/create_attachment_service_shared_examples.rb
index 541e332e3a1..555a6d5eed0 100644
--- a/spec/support/shared_examples/services/wikis/create_attachment_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/wikis/create_attachment_service_shared_examples.rb
@@ -14,6 +14,7 @@ RSpec.shared_examples 'Wikis::CreateAttachmentService#execute' do |container_typ
file_content: 'Content of attachment'
}
end
+
let(:opts) { file_opts }
let(:service) { Wikis::CreateAttachmentService.new(container: container, current_user: user, params: opts) }
diff --git a/spec/support/shared_examples/snippet_blob_shared_examples.rb b/spec/support/shared_examples/snippet_blob_shared_examples.rb
index ba97688d017..3ed777ee4b8 100644
--- a/spec/support/shared_examples/snippet_blob_shared_examples.rb
+++ b/spec/support/shared_examples/snippet_blob_shared_examples.rb
@@ -22,3 +22,24 @@ RSpec.shared_examples 'snippet blob raw path' do
end
end
end
+
+RSpec.shared_examples 'snippet blob raw url' do
+ let(:blob) { snippet.blobs.first }
+ let(:ref) { blob.repository.root_ref }
+
+ context 'for PersonalSnippets' do
+ let(:snippet) { personal_snippet }
+
+ it 'returns the raw personal snippet blob url' do
+ expect(subject).to eq("http://test.host/-/snippets/#{snippet.id}/raw/#{ref}/#{blob.path}")
+ end
+ end
+
+ context 'for ProjectSnippets' do
+ let(:snippet) { project_snippet }
+
+ it 'returns the raw project snippet blob url' do
+ expect(subject).to eq("http://test.host/#{snippet.project.full_path}/-/snippets/#{snippet.id}/raw/#{ref}/#{blob.path}")
+ end
+ end
+end
diff --git a/spec/support_specs/helpers/stub_feature_flags_spec.rb b/spec/support_specs/helpers/stub_feature_flags_spec.rb
index 8d5f16751ae..5d1e4e1627d 100644
--- a/spec/support_specs/helpers/stub_feature_flags_spec.rb
+++ b/spec/support_specs/helpers/stub_feature_flags_spec.rb
@@ -119,6 +119,42 @@ RSpec.describe StubFeatureFlags do
end
end
+ describe 'stub timing' do
+ context 'let_it_be variable' do
+ let_it_be(:let_it_be_var) { Feature.enabled?(:any_feature_flag) }
+
+ it { expect(let_it_be_var).to eq true }
+ end
+
+ context 'before_all variable' do
+ before_all do
+ @suite_var = Feature.enabled?(:any_feature_flag)
+ end
+
+ it { expect(@suite_var).to eq true }
+ end
+
+ context 'before(:all) variable' do
+ before(:all) do
+ @suite_var = Feature.enabled?(:any_feature_flag)
+ end
+
+ it { expect(@suite_var).to eq true }
+ end
+
+ context 'with stub_feature_flags meta' do
+ let(:var) { Feature.enabled?(:any_feature_flag) }
+
+ context 'as true', :stub_feature_flags do
+ it { expect(var).to eq true }
+ end
+
+ context 'as false', stub_feature_flags: false do
+ it { expect(var).to eq false }
+ end
+ end
+ end
+
def actor(actor)
case actor
when Array
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index 661ce8e74d2..6ac46712aa3 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -283,19 +283,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
context 'multiple repository storages' do
- let(:test_second_storage) do
- Gitlab::GitalyClient::StorageSettings.new(@default_storage_hash.merge('path' => 'tmp/tests/custom_storage'))
- end
- let(:storages) do
- {
- 'default' => Gitlab.config.repositories.storages.default,
- 'test_second_storage' => test_second_storage
- }
- end
-
- before(:all) do
- @default_storage_hash = Gitlab.config.repositories.storages.default.to_h
- end
+ let_it_be(:default_storage_hash) { Gitlab.config.repositories.storages.default.to_h }
before do
# We only need a backup of the repositories for this test
@@ -306,17 +294,6 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
# Avoid asking gitaly about the root ref (which will fail because of the
# mocked storages)
allow_any_instance_of(Repository).to receive(:empty?).and_return(false)
- end
-
- after do
- FileUtils.rm_rf(Settings.absolute('tmp/tests/custom_storage'))
- end
-
- it 'includes repositories in all repository storages' do
- project_a = create(:project, :repository)
- project_b = create(:project, :repository, repository_storage: 'test_second_storage')
-
- b_storage_dir = File.join(Settings.absolute('tmp/tests/custom_storage'), File.dirname(project_b.disk_path))
FileUtils.mkdir_p(b_storage_dir)
@@ -327,16 +304,91 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
Rails.root.join(storages['test_second_storage'].legacy_disk_path, project_b.repository.disk_path + '.git')
)
end
+ end
+
+ after do
+ FileUtils.rm_rf(test_second_storage_dir)
+ end
+
+ let(:test_second_storage_dir) { Dir.mktmpdir }
+
+ let(:test_second_storage) do
+ Gitlab::GitalyClient::StorageSettings.new(default_storage_hash.merge('path' => test_second_storage_dir))
+ end
+
+ let(:storages) do
+ {
+ 'default' => Gitlab.config.repositories.storages.default,
+ 'test_second_storage' => test_second_storage
+ }
+ end
+
+ let!(:project_a) { create(:project, :repository) }
+ let!(:project_b) { create(:project, :repository, repository_storage: 'test_second_storage') }
+ let!(:b_storage_dir) { File.join(test_second_storage_dir, File.dirname(project_b.disk_path)) }
+
+ context 'no concurrency' do
+ it 'includes repositories in all repository storages' do
+ expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
+
+ tar_contents, exit_status = Gitlab::Popen.popen(
+ %W{tar -tvf #{backup_tar} repositories}
+ )
+
+ expect(exit_status).to eq(0)
+ expect(tar_contents).to match("repositories/#{project_a.disk_path}.bundle")
+ expect(tar_contents).to match("repositories/#{project_b.disk_path}.bundle")
+ end
+ end
+
+ context 'with concurrency' do
+ before do
+ stub_env('GITLAB_BACKUP_MAX_CONCURRENCY', 4)
+ end
+
+ it 'includes repositories in all repository storages' do
+ expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
+
+ tar_contents, exit_status = Gitlab::Popen.popen(
+ %W{tar -tvf #{backup_tar} repositories}
+ )
+
+ expect(exit_status).to eq(0)
+ expect(tar_contents).to match("repositories/#{project_a.disk_path}.bundle")
+ expect(tar_contents).to match("repositories/#{project_b.disk_path}.bundle")
+ end
+ end
+ end
+
+ context 'concurrency settings' do
+ before do
+ # We only need a backup of the repositories for this test
+ stub_env('SKIP', 'db,uploads,builds,artifacts,lfs,registry')
+
+ create(:project, :repository)
+ end
+
+ it 'has defaults' do
+ expect_next_instance_of(::Backup::Repository) do |instance|
+ expect(instance).to receive(:dump)
+ .with(max_concurrency: 1, max_storage_concurrency: 1)
+ .and_call_original
+ end
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
+ end
- tar_contents, exit_status = Gitlab::Popen.popen(
- %W{tar -tvf #{backup_tar} repositories}
- )
+ it 'passes through concurrency environment variables' do
+ stub_env('GITLAB_BACKUP_MAX_CONCURRENCY', 5)
+ stub_env('GITLAB_BACKUP_MAX_STORAGE_CONCURRENCY', 2)
- expect(exit_status).to eq(0)
- expect(tar_contents).to match("repositories/#{project_a.disk_path}.bundle")
- expect(tar_contents).to match("repositories/#{project_b.disk_path}.bundle")
+ expect_next_instance_of(::Backup::Repository) do |instance|
+ expect(instance).to receive(:dump)
+ .with(max_concurrency: 5, max_storage_concurrency: 2)
+ .and_call_original
+ end
+
+ expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
end
end
end # backup_create task
diff --git a/spec/tasks/gitlab/packages/migrate_rake_spec.rb b/spec/tasks/gitlab/packages/migrate_rake_spec.rb
index 0a296eb0808..618ff215c74 100644
--- a/spec/tasks/gitlab/packages/migrate_rake_spec.rb
+++ b/spec/tasks/gitlab/packages/migrate_rake_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe 'gitlab:packages namespace rake task' do
context 'object storage disabled' do
before do
- stub_packages_object_storage(enabled: false)
+ stub_package_file_object_storage(enabled: false)
end
it "doesn't migrate files" do
@@ -28,7 +28,7 @@ RSpec.describe 'gitlab:packages namespace rake task' do
context 'object storage enabled' do
before do
- stub_packages_object_storage
+ stub_package_file_object_storage
end
it 'migrates local file to object storage' do
diff --git a/spec/tooling/lib/tooling/helm3_client_spec.rb b/spec/tooling/lib/tooling/helm3_client_spec.rb
index f12bae051f0..41c51ec5754 100644
--- a/spec/tooling/lib/tooling/helm3_client_spec.rb
+++ b/spec/tooling/lib/tooling/helm3_client_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe Tooling::Helm3Client do
]
OUTPUT
end
+
let(:raw_helm_list_page2) do
<<~OUTPUT
[
@@ -22,6 +23,7 @@ RSpec.describe Tooling::Helm3Client do
]
OUTPUT
end
+
let(:raw_helm_list_empty) do
<<~OUTPUT
[]
diff --git a/spec/uploaders/ci/pipeline_artifact_uploader_spec.rb b/spec/uploaders/ci/pipeline_artifact_uploader_spec.rb
new file mode 100644
index 00000000000..0630e9f6546
--- /dev/null
+++ b/spec/uploaders/ci/pipeline_artifact_uploader_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::PipelineArtifactUploader do
+ let(:pipeline_artifact) { create(:ci_pipeline_artifact) }
+ let(:uploader) { described_class.new(pipeline_artifact, :file) }
+
+ subject { uploader }
+
+ it_behaves_like "builds correct paths",
+ store_dir: %r[\h{2}/\h{2}/\h{64}/pipelines/\d+/artifacts/\d+],
+ cache_dir: %r[artifacts/tmp/cache],
+ work_dir: %r[artifacts/tmp/work]
+
+ context 'when object store is REMOTE' do
+ before do
+ stub_artifacts_object_storage(described_class)
+ end
+
+ include_context 'with storage', described_class::Store::REMOTE
+
+ it_behaves_like 'builds correct paths', store_dir: %r[\h{2}/\h{2}/\h{64}/pipelines/\d+/artifacts/\d+]
+ end
+
+ context 'when file is stored in valid local_path' do
+ let(:file) do
+ fixture_file_upload('spec/fixtures/pipeline_artifacts/code_coverage.json', 'application/json')
+ end
+
+ before do
+ uploader.store!(file)
+ end
+
+ subject { uploader.file.path }
+
+ it { is_expected.to match(%r[#{uploader.root}/#{uploader.class.base_dir}\h{2}/\h{2}/\h{64}/pipelines/#{pipeline_artifact.pipeline_id}/artifacts/#{pipeline_artifact.id}/code_coverage.json]) }
+ end
+end
diff --git a/spec/uploaders/file_mover_spec.rb b/spec/uploaders/file_mover_spec.rb
index 0f7496f17d5..3b8c6f6f881 100644
--- a/spec/uploaders/file_mover_spec.rb
+++ b/spec/uploaders/file_mover_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe FileMover do
"test ![banana_sample](/#{temp_file_path}) "\
"same ![banana_sample](/#{temp_file_path}) "
end
+
let(:file_path) { File.join('uploads/-/system/personal_snippet', snippet.id.to_s, secret, filename) }
let(:snippet) { create(:personal_snippet, description: temp_description) }
diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb
index 694aafe5ed5..12c936e154b 100644
--- a/spec/uploaders/object_storage_spec.rb
+++ b/spec/uploaders/object_storage_spec.rb
@@ -382,6 +382,32 @@ RSpec.describe ObjectStorage do
it { is_expected.to eq(nil) }
end
+ describe '#fog_attributes' do
+ subject { uploader.fog_attributes }
+
+ it { is_expected.to eq({}) }
+
+ context 'with encryption configured' do
+ let(:raw_options) do
+ {
+ "enabled" => true,
+ "connection" => { "provider" => 'AWS' },
+ "storage_options" => { "server_side_encryption" => "AES256" }
+ }
+ end
+
+ let(:options) { Settingslogic.new(raw_options) }
+
+ before do
+ allow(uploader_class).to receive(:options) do
+ double(object_store: options)
+ end
+ end
+
+ it { is_expected.to eq({ "x-amz-server-side-encryption" => "AES256" }) }
+ end
+ end
+
describe '.workhorse_authorize' do
let(:has_length) { true }
let(:maximum_size) { nil }
@@ -459,13 +485,18 @@ RSpec.describe ObjectStorage do
context 'uses AWS' do
let(:storage_url) { "https://uploads.s3-eu-central-1.amazonaws.com/" }
+ let(:credentials) do
+ {
+ provider: "AWS",
+ aws_access_key_id: "AWS_ACCESS_KEY_ID",
+ aws_secret_access_key: "AWS_SECRET_ACCESS_KEY",
+ region: "eu-central-1"
+ }
+ end
before do
- expect(uploader_class).to receive(:object_store_credentials) do
- { provider: "AWS",
- aws_access_key_id: "AWS_ACCESS_KEY_ID",
- aws_secret_access_key: "AWS_SECRET_ACCESS_KEY",
- region: "eu-central-1" }
+ expect_next_instance_of(ObjectStorage::Config) do |instance|
+ allow(instance).to receive(:credentials).and_return(credentials)
end
end
@@ -502,12 +533,17 @@ RSpec.describe ObjectStorage do
context 'uses Google' do
let(:storage_url) { "https://storage.googleapis.com/uploads/" }
+ let(:credentials) do
+ {
+ provider: "Google",
+ google_storage_access_key_id: 'ACCESS_KEY_ID',
+ google_storage_secret_access_key: 'SECRET_ACCESS_KEY'
+ }
+ end
before do
- expect(uploader_class).to receive(:object_store_credentials) do
- { provider: "Google",
- google_storage_access_key_id: 'ACCESS_KEY_ID',
- google_storage_secret_access_key: 'SECRET_ACCESS_KEY' }
+ expect_next_instance_of(ObjectStorage::Config) do |instance|
+ allow(instance).to receive(:credentials).and_return(credentials)
end
end
@@ -537,15 +573,18 @@ RSpec.describe ObjectStorage do
context 'uses GDK/minio' do
let(:storage_url) { "http://minio:9000/uploads/" }
+ let(:credentials) do
+ { provider: "AWS",
+ aws_access_key_id: "AWS_ACCESS_KEY_ID",
+ aws_secret_access_key: "AWS_SECRET_ACCESS_KEY",
+ endpoint: 'http://minio:9000',
+ path_style: true,
+ region: "gdk" }
+ end
before do
- expect(uploader_class).to receive(:object_store_credentials) do
- { provider: "AWS",
- aws_access_key_id: "AWS_ACCESS_KEY_ID",
- aws_secret_access_key: "AWS_SECRET_ACCESS_KEY",
- endpoint: 'http://minio:9000',
- path_style: true,
- region: "gdk" }
+ expect_next_instance_of(ObjectStorage::Config) do |instance|
+ allow(instance).to receive(:credentials).and_return(credentials)
end
end
diff --git a/spec/uploaders/packages/package_file_uploader_spec.rb b/spec/uploaders/packages/package_file_uploader_spec.rb
index 1fe65649d7a..f4e72892c8f 100644
--- a/spec/uploaders/packages/package_file_uploader_spec.rb
+++ b/spec/uploaders/packages/package_file_uploader_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Packages::PackageFileUploader do
context 'object store is remote' do
before do
- stub_packages_object_storage
+ stub_package_file_object_storage
end
include_context 'with storage', described_class::Store::REMOTE
@@ -29,7 +29,7 @@ RSpec.describe Packages::PackageFileUploader do
context 'with object storage enabled' do
before do
- stub_packages_object_storage
+ stub_package_file_object_storage
end
it 'can store file remotely' do
diff --git a/spec/validators/qualified_domain_array_validator_spec.rb b/spec/validators/qualified_domain_array_validator_spec.rb
index 788e007970a..865ecffe05a 100644
--- a/spec/validators/qualified_domain_array_validator_spec.rb
+++ b/spec/validators/qualified_domain_array_validator_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe QualifiedDomainArrayValidator do
end
end
end
+
let!(:record) do
qualified_domain_array_validator_test_class.new(['gitlab.com'])
end
diff --git a/spec/views/admin/dashboard/index.html.haml_spec.rb b/spec/views/admin/dashboard/index.html.haml_spec.rb
index 569a20e8f08..70fb77944cc 100644
--- a/spec/views/admin/dashboard/index.html.haml_spec.rb
+++ b/spec/views/admin/dashboard/index.html.haml_spec.rb
@@ -32,4 +32,13 @@ RSpec.describe 'admin/dashboard/index.html.haml' do
expect(rendered).to have_content "#{Gitlab::VERSION} (#{Gitlab.revision})"
end
+
+ it 'does not include license breakdown' do
+ render
+
+ expect(rendered).not_to have_content "Users in License"
+ expect(rendered).not_to have_content "Active Users"
+ expect(rendered).not_to have_content "Maximum Users"
+ expect(rendered).not_to have_content "Users over License"
+ end
end
diff --git a/spec/views/layouts/_flash.html.haml_spec.rb b/spec/views/layouts/_flash.html.haml_spec.rb
new file mode 100644
index 00000000000..82c06feb4fb
--- /dev/null
+++ b/spec/views/layouts/_flash.html.haml_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'layouts/_flash' do
+ before do
+ allow(view).to receive(:flash).and_return(flash)
+ render
+ end
+
+ describe 'closable flash messages' do
+ %w(alert notice success).each do |flash_type|
+ let(:flash) { { flash_type => 'This is a closable flash message' } }
+
+ it 'shows a close button' do
+ expect(rendered).to include('js-close-icon')
+ end
+ end
+ end
+
+ describe 'non closable flash messages' do
+ %w(error message toast warning).each do |flash_type|
+ let(:flash) { { flash_type => 'This is a non closable flash message' } }
+
+ it 'shows a close button' do
+ expect(rendered).not_to include('js-close-icon')
+ end
+ end
+ end
+end
diff --git a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
index bf0bf63e164..bf5b5785b8d 100644
--- a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
@@ -47,6 +47,58 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
end
end
+ describe 'Packages' do
+ let(:user) { create(:user) }
+
+ let_it_be(:package_menu_name) { 'Packages & Registries' }
+ let_it_be(:package_entry_name) { 'Package Registry' }
+
+ before do
+ project.team.add_developer(user)
+ sign_in(user)
+ stub_container_registry_config(enabled: true)
+ end
+
+ context 'when packages is enabled' do
+ it 'packages link is visible' do
+ render
+
+ expect(rendered).to have_link(package_menu_name, href: project_packages_path(project))
+ end
+
+ it 'packages list link is visible' do
+ render
+
+ expect(rendered).to have_link(package_entry_name, href: project_packages_path(project))
+ end
+
+ it 'container registry link is visible' do
+ render
+
+ expect(rendered).to have_link('Container Registry', href: project_container_registry_index_path(project))
+ end
+ end
+
+ context 'when container registry is disabled' do
+ before do
+ stub_container_registry_config(enabled: false)
+ end
+
+ it 'packages top level and list link are visible' do
+ render
+
+ expect(rendered).to have_link(package_menu_name, href: project_packages_path(project))
+ expect(rendered).to have_link(package_entry_name, href: project_packages_path(project))
+ end
+
+ it 'container registry link is not visible' do
+ render
+
+ expect(rendered).not_to have_link('Container Registry', href: project_container_registry_index_path(project))
+ end
+ end
+ end
+
describe 'releases entry' do
it 'renders releases link' do
render
diff --git a/spec/views/notify/changed_milestone_email.html.haml_spec.rb b/spec/views/notify/changed_milestone_email.html.haml_spec.rb
index 50a06683409..03904ff0747 100644
--- a/spec/views/notify/changed_milestone_email.html.haml_spec.rb
+++ b/spec/views/notify/changed_milestone_email.html.haml_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe 'notify/changed_milestone_email.html.haml' do
context 'milestone with start and due dates' do
before do
- milestone.update(start_date: '2018-01-01', due_date: '2018-12-31')
+ milestone.update!(start_date: '2018-01-01', due_date: '2018-12-31')
end
it 'renders with date range' do
diff --git a/spec/views/profiles/preferences/show.html.haml_spec.rb b/spec/views/profiles/preferences/show.html.haml_spec.rb
index 5acfbfb9db1..1b8b28367c1 100644
--- a/spec/views/profiles/preferences/show.html.haml_spec.rb
+++ b/spec/views/profiles/preferences/show.html.haml_spec.rb
@@ -12,6 +12,26 @@ RSpec.describe 'profiles/preferences/show' do
allow(controller).to receive(:current_user).and_return(user)
end
+ context 'navigation theme' do
+ before do
+ render
+ end
+
+ it 'has an id for anchoring' do
+ expect(rendered).to have_css('#navigation-theme')
+ end
+ end
+
+ context 'syntax highlighting theme' do
+ before do
+ render
+ end
+
+ it 'has an id for anchoring' do
+ expect(rendered).to have_css('#syntax-highlighting-theme')
+ end
+ end
+
context 'behavior' do
before do
render
@@ -20,6 +40,25 @@ RSpec.describe 'profiles/preferences/show' do
it 'has option for Render whitespace characters in the Web IDE' do
expect(rendered).to have_unchecked_field('Render whitespace characters in the Web IDE')
end
+
+ it 'has an id for anchoring' do
+ expect(rendered).to have_css('#behavior')
+ end
+
+ it 'has helpful homepage setup guidance' do
+ expect(rendered).to have_field('Homepage content')
+ expect(rendered).to have_content('Choose what content you want to see on your homepage.')
+ end
+ end
+
+ context 'localization' do
+ before do
+ render
+ end
+
+ it 'has an id for anchoring' do
+ expect(rendered).to have_css('#localization')
+ end
end
context 'sourcegraph' do
@@ -28,7 +67,7 @@ RSpec.describe 'profiles/preferences/show' do
end
def have_integrations_section
- have_css('.profile-settings-sidebar', { text: 'Integrations' })
+ have_css('#integrations.profile-settings-sidebar', { text: 'Integrations' })
end
before do
diff --git a/spec/views/projects/ci/lints/show.html.haml_spec.rb b/spec/views/projects/ci/lints/show.html.haml_spec.rb
index bcfb952ca66..a71cea6d3c8 100644
--- a/spec/views/projects/ci/lints/show.html.haml_spec.rb
+++ b/spec/views/projects/ci/lints/show.html.haml_spec.rb
@@ -82,6 +82,20 @@ RSpec.describe 'projects/ci/lints/show' do
expect(rendered).to have_content('Environment: testing')
expect(rendered).to have_content('When: on_success')
end
+
+ context 'when content has warnings' do
+ before do
+ assign(:warnings, ['Warning 1', 'Warning 2'])
+ end
+
+ it 'shows warning messages' do
+ render
+
+ expect(rendered).to have_content('Warning:')
+ expect(rendered).to have_content('Warning 1')
+ expect(rendered).to have_content('Warning 2')
+ end
+ end
end
context 'when the content is invalid' do
@@ -89,6 +103,7 @@ RSpec.describe 'projects/ci/lints/show' do
assign(:project, project)
assign(:status, false)
assign(:errors, ['Undefined error'])
+ assign(:warnings, ['Warning 1', 'Warning 2'])
end
it 'shows error message' do
@@ -98,5 +113,13 @@ RSpec.describe 'projects/ci/lints/show' do
expect(rendered).to have_content('Undefined error')
expect(rendered).not_to have_content('Tag list:')
end
+
+ it 'shows warning messages' do
+ render
+
+ expect(rendered).to have_content('Warning:')
+ expect(rendered).to have_content('Warning 1')
+ expect(rendered).to have_content('Warning 2')
+ end
end
end
diff --git a/spec/views/projects/commit/show.html.haml_spec.rb b/spec/views/projects/commit/show.html.haml_spec.rb
index c2970abbe9f..e23ffe300c5 100644
--- a/spec/views/projects/commit/show.html.haml_spec.rb
+++ b/spec/views/projects/commit/show.html.haml_spec.rb
@@ -14,6 +14,12 @@ RSpec.describe 'projects/commit/show.html.haml' do
assign(:notes, [])
assign(:diffs, commit.diffs)
+ controller.params[:controller] = 'projects/commit'
+ controller.params[:action] = 'show'
+ controller.params[:namespace_id] = project.namespace.to_param
+ controller.params[:project_id] = project.to_param
+ controller.params[:id] = commit.id
+
allow(view).to receive(:current_user).and_return(nil)
allow(view).to receive(:can?).and_return(false)
allow(view).to receive(:can_collaborate_with_project?).and_return(false)
diff --git a/spec/views/projects/imports/new.html.haml_spec.rb b/spec/views/projects/imports/new.html.haml_spec.rb
index edf9eadf924..7c171ee65b9 100644
--- a/spec/views/projects/imports/new.html.haml_spec.rb
+++ b/spec/views/projects/imports/new.html.haml_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe "projects/imports/new.html.haml" do
let(:project) { create(:project_empty_repo, :import_failed, import_type: :gitlab_project, import_source: '/var/opt/gitlab/gitlab-rails/shared/tmp/project_exports/uploads/t.tar.gz', import_url: nil) }
before do
- project.import_state.update(last_error: '<a href="http://googl.com">Foo</a>')
+ project.import_state.update!(last_error: '<a href="http://googl.com">Foo</a>')
sign_in(user)
project.add_maintainer(user)
end
diff --git a/spec/views/projects/merge_requests/show.html.haml_spec.rb b/spec/views/projects/merge_requests/show.html.haml_spec.rb
index 32819fc2cb0..1acc07dabb6 100644
--- a/spec/views/projects/merge_requests/show.html.haml_spec.rb
+++ b/spec/views/projects/merge_requests/show.html.haml_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe 'projects/merge_requests/show.html.haml' do
describe 'merge request assignee sidebar' do
context 'when assignee is allowed to merge' do
it 'does not show a warning icon' do
- closed_merge_request.update(assignee_id: user.id)
+ closed_merge_request.update!(assignee_id: user.id)
project.add_maintainer(user)
assign(:issuable_sidebar, serialize_issuable_sidebar(user, project, closed_merge_request))
@@ -42,20 +42,4 @@ RSpec.describe 'projects/merge_requests/show.html.haml' do
expect(rendered).to have_css('a', visible: false, text: 'Close')
end
end
-
- context 'when the merge request is open' do
- it 'closes the merge request if the source project does not exist' do
- closed_merge_request.update(state: 'open')
- forked_project.destroy
- # Reload merge request so MergeRequest#source_project turns to `nil`
- closed_merge_request.reload
- preload_view_requirements
-
- render
-
- expect(closed_merge_request.reload.state).to eq('closed')
- expect(rendered).to have_css('a', visible: false, text: 'Reopen')
- expect(rendered).to have_css('a', visible: false, text: 'Close')
- end
- end
end
diff --git a/spec/views/projects/pipelines/new.html.haml_spec.rb b/spec/views/projects/pipelines/new.html.haml_spec.rb
new file mode 100644
index 00000000000..2deacfa8478
--- /dev/null
+++ b/spec/views/projects/pipelines/new.html.haml_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'projects/pipelines/new' do
+ include Devise::Test::ControllerHelpers
+ let_it_be(:project) { create(:project, :repository) }
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ before do
+ assign(:project, project)
+ assign(:pipeline, pipeline)
+
+ stub_feature_flags(new_pipeline_form: false)
+ end
+
+ describe 'warning messages' do
+ let(:warning_messages) do
+ [double(content: 'warning 1'), double(content: 'warning 2')]
+ end
+
+ before do
+ allow(pipeline).to receive(:warning_messages).and_return(warning_messages)
+ end
+
+ it 'displays the warnings' do
+ render
+
+ expect(rendered).to have_css('div.alert-warning')
+ expect(rendered).to have_content('warning 1')
+ expect(rendered).to have_content('warning 2')
+ end
+ end
+end
diff --git a/spec/views/projects/pipelines/show.html.haml_spec.rb b/spec/views/projects/pipelines/show.html.haml_spec.rb
new file mode 100644
index 00000000000..49add434ab5
--- /dev/null
+++ b/spec/views/projects/pipelines/show.html.haml_spec.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'projects/pipelines/show' do
+ include Devise::Test::ControllerHelpers
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+ let(:presented_pipeline) { pipeline.present(current_user: user) }
+
+ before do
+ assign(:project, project)
+ assign(:pipeline, presented_pipeline)
+
+ stub_feature_flags(new_pipeline_form: false)
+ end
+
+ shared_examples 'pipeline with warning messages' do
+ let(:warning_messages) do
+ [double(content: 'warning 1'), double(content: 'warning 2')]
+ end
+
+ before do
+ allow(pipeline).to receive(:warning_messages).and_return(warning_messages)
+ end
+
+ it 'displays the warnings' do
+ render
+
+ expect(rendered).to have_css('.bs-callout-warning')
+ expect(rendered).to have_content('warning 1')
+ expect(rendered).to have_content('warning 2')
+ end
+ end
+
+ context 'when pipeline has errors' do
+ before do
+ allow(pipeline).to receive(:yaml_errors).and_return('some errors')
+ end
+
+ it 'shows errors' do
+ render
+
+ expect(rendered).to have_content('Found errors in your .gitlab-ci.yml')
+ expect(rendered).to have_content('some errors')
+ end
+
+ it 'does not render the pipeline tabs' do
+ render
+
+ expect(rendered).not_to have_css('ul.pipelines-tabs')
+ end
+
+ context 'when pipeline has also warnings' do
+ it_behaves_like 'pipeline with warning messages'
+ end
+ end
+
+ context 'when pipeline is valid' do
+ it 'does not show errors' do
+ render
+
+ expect(rendered).not_to have_content('Found errors in your .gitlab-ci.yml')
+ end
+
+ it 'renders the pipeline tabs' do
+ render
+
+ expect(rendered).to have_css('ul.pipelines-tabs')
+ end
+
+ context 'when pipeline has warnings' do
+ it_behaves_like 'pipeline with warning messages'
+ end
+ end
+end
diff --git a/spec/views/projects/services/_form.haml_spec.rb b/spec/views/projects/services/_form.haml_spec.rb
index f5c4e79a082..4ca64eb3129 100644
--- a/spec/views/projects/services/_form.haml_spec.rb
+++ b/spec/views/projects/services/_form.haml_spec.rb
@@ -7,8 +7,6 @@ RSpec.describe 'projects/services/_form' do
let(:user) { create(:admin) }
before do
- stub_feature_flags(integration_form_refactor: false)
-
assign(:project, project)
allow(controller).to receive(:current_user).and_return(user)
@@ -29,8 +27,6 @@ RSpec.describe 'projects/services/_form' do
render
- expect(rendered).to have_content('Event will be triggered when a commit is created/updated')
- expect(rendered).to have_content('Event will be triggered when a merge request is created/updated/merged')
expect(rendered).to have_css("input[name='redirect_to'][value='/services']", count: 1, visible: false)
end
end
diff --git a/spec/views/projects/settings/operations/show.html.haml_spec.rb b/spec/views/projects/settings/operations/show.html.haml_spec.rb
index 1f0b0ea76bf..b4d20da0a5c 100644
--- a/spec/views/projects/settings/operations/show.html.haml_spec.rb
+++ b/spec/views/projects/settings/operations/show.html.haml_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe 'projects/settings/operations/show' do
it 'renders the Operations Settings page' do
render template: 'projects/settings/operations/show', locals: operations_show_locals
- expect(rendered).to have_content _('Error Tracking')
+ expect(rendered).to have_content _('Error tracking')
expect(rendered).to have_content _('To link Sentry to GitLab, enter your Sentry URL and Auth Token')
end
end
diff --git a/spec/views/search/show.html.haml_spec.rb b/spec/views/search/show.html.haml_spec.rb
index 9ddfe08c8f3..eb763d424d3 100644
--- a/spec/views/search/show.html.haml_spec.rb
+++ b/spec/views/search/show.html.haml_spec.rb
@@ -33,5 +33,37 @@ RSpec.describe 'search/show' do
expect(rendered).to render_template('search/_category')
expect(rendered).to render_template('search/_results')
end
+
+ context 'unfurling support' do
+ let(:group) { build(:group) }
+ let(:search_results) do
+ instance_double(Gitlab::GroupSearchResults).tap do |double|
+ allow(double).to receive(:formatted_count).and_return(0)
+ end
+ end
+
+ before do
+ assign(:search_results, search_results)
+ assign(:scope, 'issues')
+ assign(:group, group)
+ end
+
+ it 'renders meta tags for a group' do
+ render
+
+ expect(view.page_description).to match(/\d+ issues for term '#{search_term}'/)
+ expect(view.page_card_attributes).to eq("Namespace" => group.full_path)
+ end
+
+ it 'renders meta tags for both group and project' do
+ project = build(:project, group: group)
+ assign(:project, project)
+
+ render
+
+ expect(view.page_description).to match(/\d+ issues for term '#{search_term}'/)
+ expect(view.page_card_attributes).to eq("Namespace" => group.full_path, "Project" => project.full_path)
+ end
+ end
end
end
diff --git a/spec/views/shared/_label_row.html.haml_spec.rb b/spec/views/shared/_label_row.html.haml_spec.rb
index 1e2ed41bafc..8f8aa3072e2 100644
--- a/spec/views/shared/_label_row.html.haml_spec.rb
+++ b/spec/views/shared/_label_row.html.haml_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'shared/_label_row.html.haml' do
label_types.each do |label_type, label_factory|
let!(:label) do
- label_record = create(label_factory)
+ label_record = create(label_factory) # rubocop: disable Rails/SaveBang
label_record.present(issuable_subject: label_record.subject)
end
diff --git a/spec/workers/deployments/finished_worker_spec.rb b/spec/workers/deployments/finished_worker_spec.rb
index 9b4bd78c03a..e1ec2d89e0a 100644
--- a/spec/workers/deployments/finished_worker_spec.rb
+++ b/spec/workers/deployments/finished_worker_spec.rb
@@ -49,5 +49,29 @@ RSpec.describe Deployments::FinishedWorker do
expect(ProjectServiceWorker).not_to have_received(:perform_async)
end
+
+ it 'execute webhooks' do
+ deployment = create(:deployment)
+ project = deployment.project
+ web_hook = create(:project_hook, deployment_events: true, project: project)
+
+ expect_next_instance_of(WebHookService, web_hook, an_instance_of(Hash), "deployment_hooks") do |service|
+ expect(service).to receive(:async_execute)
+ end
+
+ worker.perform(deployment.id)
+ end
+
+ it 'does not execute webhooks if feature flag is disabled' do
+ stub_feature_flags(deployment_webhooks: false)
+
+ deployment = create(:deployment)
+ project = deployment.project
+ create(:project_hook, deployment_events: true, project: project)
+
+ expect(WebHookService).not_to receive(:new)
+
+ worker.perform(deployment.id)
+ end
end
end
diff --git a/spec/workers/flush_counter_increments_worker_spec.rb b/spec/workers/flush_counter_increments_worker_spec.rb
new file mode 100644
index 00000000000..14b49b97ac3
--- /dev/null
+++ b/spec/workers/flush_counter_increments_worker_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FlushCounterIncrementsWorker, :counter_attribute do
+ let(:project_statistics) { create(:project_statistics) }
+ let(:model) { CounterAttributeModel.find(project_statistics.id) }
+
+ describe '#perform', :redis do
+ let(:attribute) { model.class.counter_attributes.first }
+ let(:worker) { described_class.new }
+
+ subject { worker.perform(model.class.name, model.id, attribute) }
+
+ it 'flushes increments to database' do
+ expect(model.class).to receive(:find_by_id).and_return(model)
+ expect(model)
+ .to receive(:flush_increments_to_database!)
+ .with(attribute)
+ .and_call_original
+
+ subject
+ end
+
+ context 'when model class does not exist' do
+ subject { worker.perform('non-existend-model') }
+
+ it 'does nothing' do
+ expect(worker).not_to receive(:in_lock)
+ end
+ end
+
+ context 'when record does not exist' do
+ subject { worker.perform(model.class.name, model.id + 100, attribute) }
+
+ it 'does nothing' do
+ expect(worker).not_to receive(:in_lock)
+ end
+ end
+ end
+end
diff --git a/spec/workers/git_garbage_collect_worker_spec.rb b/spec/workers/git_garbage_collect_worker_spec.rb
index cb6396e2859..223f5aea813 100644
--- a/spec/workers/git_garbage_collect_worker_spec.rb
+++ b/spec/workers/git_garbage_collect_worker_spec.rb
@@ -7,35 +7,61 @@ require 'spec_helper'
RSpec.describe GitGarbageCollectWorker do
include GitHelpers
- let(:project) { create(:project, :repository) }
+ let_it_be(:project) { create(:project, :repository) }
let(:shell) { Gitlab::Shell.new }
let!(:lease_uuid) { SecureRandom.uuid }
let!(:lease_key) { "project_housekeeping:#{project.id}" }
+ let(:params) { [project.id, task, lease_key, lease_uuid] }
subject { described_class.new }
+ shared_examples 'it calls Gitaly' do
+ specify do
+ expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive(gitaly_task)
+ .and_return(nil)
+
+ subject.perform(*params)
+ end
+ end
+
+ shared_examples 'it updates the project statistics' do
+ specify do
+ expect_any_instance_of(Projects::UpdateStatisticsService).to receive(:execute).and_call_original
+ expect(Projects::UpdateStatisticsService)
+ .to receive(:new)
+ .with(project, nil, statistics: [:repository_size, :lfs_objects_size])
+ .and_call_original
+
+ subject.perform(*params)
+ end
+ end
+
describe "#perform" do
+ let(:gitaly_task) { :garbage_collect }
+ let(:task) { :gc }
+
context 'with active lease_uuid' do
before do
allow(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
end
+ it_behaves_like 'it calls Gitaly'
+ it_behaves_like 'it updates the project statistics'
+
it "flushes ref caches when the task if 'gc'" do
expect(subject).to receive(:renew_lease).with(lease_key, lease_uuid).and_call_original
- expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive(:garbage_collect)
- .and_return(nil)
expect_any_instance_of(Repository).to receive(:after_create_branch).and_call_original
expect_any_instance_of(Repository).to receive(:branch_names).and_call_original
expect_any_instance_of(Repository).to receive(:has_visible_content?).and_call_original
expect_any_instance_of(Gitlab::Git::Repository).to receive(:has_visible_content?).and_call_original
- subject.perform(project.id, :gc, lease_key, lease_uuid)
+ subject.perform(*params)
end
it 'handles gRPC errors' do
expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive(:garbage_collect).and_raise(GRPC::NotFound)
- expect { subject.perform(project.id, :gc, lease_key, lease_uuid) }.to raise_exception(Gitlab::Git::Repository::NoRepository)
+ expect { subject.perform(*params) }.to raise_exception(Gitlab::Git::Repository::NoRepository)
end
end
@@ -49,11 +75,13 @@ RSpec.describe GitGarbageCollectWorker do
expect_any_instance_of(Repository).not_to receive(:branch_names).and_call_original
expect_any_instance_of(Repository).not_to receive(:has_visible_content?).and_call_original
- subject.perform(project.id, :gc, lease_key, lease_uuid)
+ subject.perform(*params)
end
end
context 'with no active lease' do
+ let(:params) { [project.id] }
+
before do
allow(subject).to receive(:get_lease_uuid).and_return(false)
end
@@ -63,15 +91,17 @@ RSpec.describe GitGarbageCollectWorker do
allow(subject).to receive(:try_obtain_lease).and_return(SecureRandom.uuid)
end
+ it_behaves_like 'it calls Gitaly'
+ it_behaves_like 'it updates the project statistics'
+
it "flushes ref caches when the task if 'gc'" do
- expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive(:garbage_collect)
- .and_return(nil)
+ expect(subject).to receive(:get_lease_uuid).with("git_gc:#{task}:#{project.id}").and_return(false)
expect_any_instance_of(Repository).to receive(:after_create_branch).and_call_original
expect_any_instance_of(Repository).to receive(:branch_names).and_call_original
expect_any_instance_of(Repository).to receive(:has_visible_content?).and_call_original
expect_any_instance_of(Gitlab::Git::Repository).to receive(:has_visible_content?).and_call_original
- subject.perform(project.id)
+ subject.perform(*params)
end
context 'when the repository has joined a pool' do
@@ -81,7 +111,57 @@ RSpec.describe GitGarbageCollectWorker do
it 'ensures the repositories are linked' do
expect_any_instance_of(PoolRepository).to receive(:link_repository).once
- subject.perform(project.id)
+ subject.perform(*params)
+ end
+ end
+
+ context 'LFS object garbage collection' do
+ before do
+ stub_lfs_setting(enabled: true)
+ end
+
+ let_it_be(:lfs_reference) { create(:lfs_objects_project, project: project) }
+ let(:lfs_object) { lfs_reference.lfs_object }
+
+ context 'with cleanup_lfs_during_gc feature flag enabled' do
+ before do
+ stub_feature_flags(cleanup_lfs_during_gc: true)
+ end
+
+ it 'cleans up unreferenced LFS objects' do
+ expect_next_instance_of(Gitlab::Cleanup::OrphanLfsFileReferences) do |svc|
+ expect(svc.project).to eq(project)
+ expect(svc.dry_run).to be_falsy
+ expect(svc).to receive(:run!).and_call_original
+ end
+
+ subject.perform(*params)
+
+ expect(project.lfs_objects.reload).not_to include(lfs_object)
+ end
+
+ it 'does nothing if the database is read-only' do
+ expect(Gitlab::Database).to receive(:read_only?) { true }
+ expect_any_instance_of(Gitlab::Cleanup::OrphanLfsFileReferences).not_to receive(:run!)
+
+ subject.perform(*params)
+
+ expect(project.lfs_objects.reload).to include(lfs_object)
+ end
+ end
+
+ context 'with cleanup_lfs_during_gc feature flag disabled' do
+ before do
+ stub_feature_flags(cleanup_lfs_during_gc: false)
+ end
+
+ it 'does not clean up unreferenced LFS objects' do
+ expect_any_instance_of(Gitlab::Cleanup::OrphanLfsFileReferences).not_to receive(:run!)
+
+ subject.perform(*params)
+
+ expect(project.lfs_objects.reload).to include(lfs_object)
+ end
end
end
end
@@ -97,48 +177,55 @@ RSpec.describe GitGarbageCollectWorker do
expect_any_instance_of(Repository).not_to receive(:branch_names).and_call_original
expect_any_instance_of(Repository).not_to receive(:has_visible_content?).and_call_original
- subject.perform(project.id)
+ subject.perform(*params)
end
end
end
context "repack_full" do
+ let(:task) { :full_repack }
+ let(:gitaly_task) { :repack_full }
+
before do
expect(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
end
- it "calls Gitaly" do
- expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive(:repack_full)
- .and_return(nil)
-
- subject.perform(project.id, :full_repack, lease_key, lease_uuid)
- end
+ it_behaves_like 'it calls Gitaly'
+ it_behaves_like 'it updates the project statistics'
end
context "pack_refs" do
+ let(:task) { :pack_refs }
+ let(:gitaly_task) { :pack_refs }
+
before do
expect(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
end
it "calls Gitaly" do
- expect_any_instance_of(Gitlab::GitalyClient::RefService).to receive(:pack_refs)
+ expect_any_instance_of(Gitlab::GitalyClient::RefService).to receive(task)
.and_return(nil)
- subject.perform(project.id, :pack_refs, lease_key, lease_uuid)
+ subject.perform(*params)
+ end
+
+ it 'does not update the project statistics' do
+ expect(Projects::UpdateStatisticsService).not_to receive(:new)
+
+ subject.perform(*params)
end
end
context "repack_incremental" do
+ let(:task) { :incremental_repack }
+ let(:gitaly_task) { :repack_incremental }
+
before do
expect(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
end
- it "calls Gitaly" do
- expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive(:repack_incremental)
- .and_return(nil)
-
- subject.perform(project.id, :incremental_repack, lease_key, lease_uuid)
- end
+ it_behaves_like 'it calls Gitaly'
+ it_behaves_like 'it updates the project statistics'
end
shared_examples 'gc tasks' do
diff --git a/spec/workers/gitlab/import/stuck_project_import_jobs_worker_spec.rb b/spec/workers/gitlab/import/stuck_project_import_jobs_worker_spec.rb
index 510c41cba21..d12d5a605a7 100644
--- a/spec/workers/gitlab/import/stuck_project_import_jobs_worker_spec.rb
+++ b/spec/workers/gitlab/import/stuck_project_import_jobs_worker_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::Import::StuckProjectImportJobsWorker do
let(:import_state) { create(:project, :import_scheduled).import_state }
before do
- import_state.update(jid: '123')
+ import_state.update!(jid: '123')
end
end
end
@@ -20,7 +20,7 @@ RSpec.describe Gitlab::Import::StuckProjectImportJobsWorker do
let(:import_state) { create(:project, :import_started).import_state }
before do
- import_state.update(jid: '123')
+ import_state.update!(jid: '123')
end
end
end
diff --git a/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb b/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
index 4a4ef5700fa..324e8010887 100644
--- a/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/import_issue_worker_spec.rb
@@ -24,7 +24,8 @@ RSpec.describe Gitlab::JiraImport::ImportIssueWorker do
build(:issue, project_id: project.id, title: 'jira issue')
.as_json.merge(
'label_ids' => [jira_issue_label_1.id, jira_issue_label_2.id], 'assignee_ids' => assignee_ids
- ).compact
+ ).except('issue_type')
+ .compact
end
context 'when any exception raised while inserting to DB' do
diff --git a/spec/workers/gitlab/jira_import/stuck_jira_import_jobs_worker_spec.rb b/spec/workers/gitlab/jira_import/stuck_jira_import_jobs_worker_spec.rb
index 8271af4db2f..7f1cb8a2076 100644
--- a/spec/workers/gitlab/jira_import/stuck_jira_import_jobs_worker_spec.rb
+++ b/spec/workers/gitlab/jira_import/stuck_jira_import_jobs_worker_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe ::Gitlab::JiraImport::StuckJiraImportJobsWorker do
let(:import_state) { create(:jira_import_state, :scheduled, project: project) }
before do
- import_state.update(jid: '123')
+ import_state.update!(jid: '123')
end
end
end
@@ -22,7 +22,7 @@ RSpec.describe ::Gitlab::JiraImport::StuckJiraImportJobsWorker do
let(:import_state) { create(:jira_import_state, :started, project: project) }
before do
- import_state.update(jid: '123')
+ import_state.update!(jid: '123')
end
end
end
diff --git a/spec/workers/gitlab_usage_ping_worker_spec.rb b/spec/workers/gitlab_usage_ping_worker_spec.rb
index 05d6f2e585b..a180d29fd5f 100644
--- a/spec/workers/gitlab_usage_ping_worker_spec.rb
+++ b/spec/workers/gitlab_usage_ping_worker_spec.rb
@@ -2,16 +2,42 @@
require 'spec_helper'
-RSpec.describe GitlabUsagePingWorker do
- subject { described_class.new }
+RSpec.describe GitlabUsagePingWorker, :clean_gitlab_redis_shared_state do
+ before do
+ allow_next_instance_of(SubmitUsagePingService) { |service| allow(service).to receive(:execute) }
+ allow(subject).to receive(:sleep)
+ end
it 'delegates to SubmitUsagePingService' do
- allow(subject).to receive(:try_obtain_lease).and_return(true)
+ expect_next_instance_of(SubmitUsagePingService) { |service| expect(service).to receive(:execute) }
- expect_next_instance_of(SubmitUsagePingService) do |instance|
- expect(instance).to receive(:execute)
- end
+ subject.perform
+ end
+
+ it "obtains a #{described_class::LEASE_TIMEOUT} second exclusive lease" do
+ expect(Gitlab::ExclusiveLeaseHelpers::SleepingLock)
+ .to receive(:new)
+ .with(described_class::LEASE_KEY, hash_including(timeout: described_class::LEASE_TIMEOUT))
+ .and_call_original
subject.perform
end
+
+ it 'sleeps for between 0 and 60 seconds' do
+ expect(subject).to receive(:sleep).with(0..60)
+
+ subject.perform
+ end
+
+ context 'when lease is not obtained' do
+ before do
+ Gitlab::ExclusiveLease.new(described_class::LEASE_KEY, timeout: described_class::LEASE_TIMEOUT).try_obtain
+ end
+
+ it 'does not invoke SubmitUsagePingService' do
+ allow_next_instance_of(SubmitUsagePingService) { |service| expect(service).not_to receive(:execute) }
+
+ expect { subject.perform }.to raise_error(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+ end
+ end
end
diff --git a/spec/workers/incident_management/process_alert_worker_spec.rb b/spec/workers/incident_management/process_alert_worker_spec.rb
index bed6dc59ac7..20ab283b49b 100644
--- a/spec/workers/incident_management/process_alert_worker_spec.rb
+++ b/spec/workers/incident_management/process_alert_worker_spec.rb
@@ -18,15 +18,15 @@ RSpec.describe IncidentManagement::ProcessAlertWorker do
before do
allow(Gitlab::AppLogger).to receive(:warn).and_call_original
- allow(IncidentManagement::CreateIssueService)
- .to receive(:new).with(alert.project, parsed_payload)
+ allow(AlertManagement::CreateAlertIssueService)
+ .to receive(:new).with(alert, User.alert_bot)
.and_call_original
end
shared_examples 'creates issue successfully' do
it 'creates an issue' do
- expect(IncidentManagement::CreateIssueService)
- .to receive(:new).with(alert.project, parsed_payload)
+ expect(AlertManagement::CreateAlertIssueService)
+ .to receive(:new).with(alert, User.alert_bot)
expect { subject }.to change { Issue.count }.by(1)
end
@@ -58,10 +58,10 @@ RSpec.describe IncidentManagement::ProcessAlertWorker do
subject
expect(Gitlab::AppLogger).to have_received(:warn).with(
- message: 'Cannot link an Issue with Alert',
+ message: 'Cannot process an Incident',
issue_id: created_issue.id,
alert_id: alert.id,
- alert_errors: { hosts: ['hosts array is over 255 chars'] }
+ errors: 'Hosts hosts array is over 255 chars'
)
end
end
@@ -80,7 +80,7 @@ RSpec.describe IncidentManagement::ProcessAlertWorker do
subject { described_class.new.perform(nil, nil, invalid_alert_id) }
it 'does not create issues' do
- expect(IncidentManagement::CreateIssueService).not_to receive(:new)
+ expect(AlertManagement::CreateAlertIssueService).not_to receive(:new)
expect { subject }.not_to change { Issue.count }
end
diff --git a/spec/workers/migrate_external_diffs_worker_spec.rb b/spec/workers/migrate_external_diffs_worker_spec.rb
index 86d4680acbe..36669b4e694 100644
--- a/spec/workers/migrate_external_diffs_worker_spec.rb
+++ b/spec/workers/migrate_external_diffs_worker_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe MigrateExternalDiffsWorker do
end
it 'does nothing if the diff is missing' do
- diff.destroy
+ diff.destroy!
worker.perform(diff.id)
end
diff --git a/spec/workers/namespaceless_project_destroy_worker_spec.rb b/spec/workers/namespaceless_project_destroy_worker_spec.rb
index ef396bc7fbb..618cd9cabe9 100644
--- a/spec/workers/namespaceless_project_destroy_worker_spec.rb
+++ b/spec/workers/namespaceless_project_destroy_worker_spec.rb
@@ -60,7 +60,7 @@ RSpec.describe NamespacelessProjectDestroyWorker do
let!(:parent_project) { create(:project) }
let(:project) do
namespaceless_project = fork_project(parent_project)
- namespaceless_project.save
+ namespaceless_project.save!
namespaceless_project
end
diff --git a/spec/workers/namespaces/root_statistics_worker_spec.rb b/spec/workers/namespaces/root_statistics_worker_spec.rb
index 0c6e3e89973..a97a850bbcf 100644
--- a/spec/workers/namespaces/root_statistics_worker_spec.rb
+++ b/spec/workers/namespaces/root_statistics_worker_spec.rb
@@ -51,7 +51,7 @@ RSpec.describe Namespaces::RootStatisticsWorker, '#perform' do
context 'with no namespace' do
before do
- group.destroy
+ group.destroy!
end
it 'does not execute the refresher service' do
@@ -64,7 +64,7 @@ RSpec.describe Namespaces::RootStatisticsWorker, '#perform' do
context 'with a namespace with no aggregation scheduled' do
before do
- group.aggregation_schedule.destroy
+ group.aggregation_schedule.destroy!
end
it 'does not execute the refresher service' do
diff --git a/spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb b/spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb
index 7c745e51df5..dac8c529984 100644
--- a/spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb
+++ b/spec/workers/pages_domain_ssl_renewal_cron_worker_spec.rb
@@ -18,9 +18,11 @@ RSpec.describe PagesDomainSslRenewalCronWorker do
let!(:domain_with_obtained_letsencrypt) do
create(:pages_domain, :letsencrypt, project: project, auto_ssl_enabled: true)
end
+
let!(:domain_without_auto_certificate) do
create(:pages_domain, :without_certificate, :without_key, project: project, auto_ssl_enabled: true)
end
+
let!(:domain_with_failed_auto_ssl) do
create(:pages_domain, :without_certificate, :without_key, project: project,
auto_ssl_enabled: true, auto_ssl_failed: true)
diff --git a/spec/workers/pages_domain_verification_worker_spec.rb b/spec/workers/pages_domain_verification_worker_spec.rb
index 74b9730f7c1..6d2f9ee2f8d 100644
--- a/spec/workers/pages_domain_verification_worker_spec.rb
+++ b/spec/workers/pages_domain_verification_worker_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe PagesDomainVerificationWorker do
end
it 'does nothing for a non-existent domain' do
- domain.destroy
+ domain.destroy!
expect(VerifyPagesDomainService).not_to receive(:new)
diff --git a/spec/workers/pages_update_configuration_worker_spec.rb b/spec/workers/pages_update_configuration_worker_spec.rb
new file mode 100644
index 00000000000..890b39b22a5
--- /dev/null
+++ b/spec/workers/pages_update_configuration_worker_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+require "spec_helper"
+
+RSpec.describe PagesUpdateConfigurationWorker do
+ describe "#perform" do
+ let_it_be(:project) { create(:project) }
+
+ it "does not break if the project doesn't exist" do
+ expect { subject.perform(-1) }.not_to raise_error
+ end
+
+ it "calls the correct service" do
+ expect_next_instance_of(Projects::UpdatePagesConfigurationService, project) do |service|
+ expect(service).to receive(:execute).and_return({})
+ end
+
+ subject.perform(project.id)
+ end
+
+ it "raises an exception if the service returned an error" do
+ allow_next_instance_of(Projects::UpdatePagesConfigurationService) do |service|
+ allow(service).to receive(:execute).and_return({ exception: ":boom:" })
+ end
+
+ expect { subject.perform(project.id) }.to raise_error(":boom:")
+ end
+
+ it_behaves_like "an idempotent worker" do
+ let(:job_args) { [project.id] }
+ let(:pages_dir) { Dir.mktmpdir }
+ let(:config_path) { File.join(pages_dir, "config.json") }
+
+ before do
+ allow(Project).to receive(:find_by_id).with(project.id).and_return(project)
+ allow(project).to receive(:pages_path).and_return(pages_dir)
+
+ # Make sure _some_ config exists
+ FileUtils.touch(config_path)
+ end
+
+ after do
+ FileUtils.remove_entry(pages_dir)
+ end
+
+ it "only updates the config file once" do
+ described_class.new.perform(project.id)
+
+ expect(File.mtime(config_path)).not_to be_nil
+ expect { subject }.not_to change { File.mtime(config_path) }
+ end
+ end
+ end
+end
diff --git a/spec/workers/personal_access_tokens/expired_notification_worker_spec.rb b/spec/workers/personal_access_tokens/expired_notification_worker_spec.rb
new file mode 100644
index 00000000000..676a419553f
--- /dev/null
+++ b/spec/workers/personal_access_tokens/expired_notification_worker_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe PersonalAccessTokens::ExpiredNotificationWorker, type: :worker do
+ subject(:worker) { described_class.new }
+
+ describe '#perform' do
+ context 'when a token has expired' do
+ let(:expired_today) { create(:personal_access_token, expires_at: Date.current) }
+
+ context 'when feature is enabled' do
+ it 'uses notification service to send email to the user' do
+ expect_next_instance_of(NotificationService) do |notification_service|
+ expect(notification_service).to receive(:access_token_expired).with(expired_today.user)
+ end
+
+ worker.perform
+ end
+
+ it 'updates notified column' do
+ expect { worker.perform }.to change { expired_today.reload.after_expiry_notification_delivered }.from(false).to(true)
+ end
+ end
+
+ context 'when feature is disabled' do
+ before do
+ stub_feature_flags(expired_pat_email_notification: false)
+ end
+
+ it 'does not update notified column' do
+ expect { worker.perform }.not_to change { expired_today.reload.after_expiry_notification_delivered }
+ end
+
+ it 'does not trigger email' do
+ expect { worker.perform }.not_to change { ActionMailer::Base.deliveries.count }
+ end
+ end
+ end
+
+ shared_examples 'expiry notification is not required to be sent for the token' do
+ it do
+ expect_next_instance_of(NotificationService) do |notification_service|
+ expect(notification_service).not_to receive(:access_token_expired).with(token.user)
+ end
+
+ worker.perform
+ end
+ end
+
+ context 'when token has expired in the past' do
+ let(:token) { create(:personal_access_token, expires_at: Date.yesterday) }
+
+ it_behaves_like 'expiry notification is not required to be sent for the token'
+ end
+
+ context 'when token is impersonated' do
+ let(:token) { create(:personal_access_token, expires_at: Date.current, impersonation: true) }
+
+ it_behaves_like 'expiry notification is not required to be sent for the token'
+ end
+
+ context 'when token is revoked' do
+ let(:token) { create(:personal_access_token, expires_at: Date.current, revoked: true) }
+
+ it_behaves_like 'expiry notification is not required to be sent for the token'
+ end
+ end
+end
diff --git a/spec/workers/pipeline_process_worker_spec.rb b/spec/workers/pipeline_process_worker_spec.rb
index a6e6b505a38..5d45a131095 100644
--- a/spec/workers/pipeline_process_worker_spec.rb
+++ b/spec/workers/pipeline_process_worker_spec.rb
@@ -12,17 +12,6 @@ RSpec.describe PipelineProcessWorker do
described_class.new.perform(pipeline.id)
end
-
- context 'when build_ids are passed' do
- let(:build) { create(:ci_build, pipeline: pipeline, name: 'my-build') }
-
- it 'processes pipeline with a list of builds' do
- expect_any_instance_of(Ci::ProcessPipelineService).to receive(:execute)
- .with([build.id])
-
- described_class.new.perform(pipeline.id, [build.id])
- end
- end
end
context 'when pipeline does not exist' do
diff --git a/spec/workers/pipeline_update_worker_spec.rb b/spec/workers/pipeline_update_worker_spec.rb
deleted file mode 100644
index c5c1cc0eefd..00000000000
--- a/spec/workers/pipeline_update_worker_spec.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe PipelineUpdateWorker do
- describe '#perform' do
- context 'when pipeline exists' do
- let(:pipeline) { create(:ci_pipeline) }
-
- it 'updates pipeline status' do
- expect_any_instance_of(Ci::Pipeline).to receive(:set_status).with('skipped')
-
- described_class.new.perform(pipeline.id)
- end
-
- include_examples 'an idempotent worker' do
- let(:job_args) { [pipeline.id] }
-
- it 'sets pipeline status to skipped' do
- expect { subject }.to change { pipeline.reload.status }.from('pending').to('skipped')
- end
- end
- end
-
- context 'when pipeline does not exist' do
- it 'does not raise exception' do
- expect { described_class.new.perform(123) }
- .not_to raise_error
- end
- end
- end
-end
diff --git a/spec/workers/process_commit_worker_spec.rb b/spec/workers/process_commit_worker_spec.rb
index a33ee6e1da5..7a168bf054e 100644
--- a/spec/workers/process_commit_worker_spec.rb
+++ b/spec/workers/process_commit_worker_spec.rb
@@ -160,7 +160,7 @@ RSpec.describe ProcessCommitWorker do
context 'when issue has first_mentioned_in_commit_at earlier than given committed_date' do
before do
- issue.metrics.update(first_mentioned_in_commit_at: commit.committed_date - 1.day)
+ issue.metrics.update!(first_mentioned_in_commit_at: commit.committed_date - 1.day)
end
it "doesn't update issue metrics" do
@@ -170,7 +170,7 @@ RSpec.describe ProcessCommitWorker do
context 'when issue has first_mentioned_in_commit_at later than given committed_date' do
before do
- issue.metrics.update(first_mentioned_in_commit_at: commit.committed_date + 1.day)
+ issue.metrics.update!(first_mentioned_in_commit_at: commit.committed_date + 1.day)
end
it "doesn't update issue metrics" do
diff --git a/spec/workers/propagate_integration_worker_spec.rb b/spec/workers/propagate_integration_worker_spec.rb
index a0fdd37b3c0..3fe76f14750 100644
--- a/spec/workers/propagate_integration_worker_spec.rb
+++ b/spec/workers/propagate_integration_worker_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe PropagateIntegrationWorker do
describe '#perform' do
let(:integration) do
- PushoverService.create(
+ PushoverService.create!(
template: true,
active: true,
device: 'MyDevice',
diff --git a/spec/workers/propagate_service_template_worker_spec.rb b/spec/workers/propagate_service_template_worker_spec.rb
index 4cba313a23f..48151b25d4b 100644
--- a/spec/workers/propagate_service_template_worker_spec.rb
+++ b/spec/workers/propagate_service_template_worker_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe PropagateServiceTemplateWorker do
describe '#perform' do
it 'calls the propagate service with the template' do
- template = PushoverService.create(
+ template = PushoverService.create!(
template: true,
active: true,
properties: {
diff --git a/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb b/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb
index e716d4806d3..f14c2b67f2c 100644
--- a/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb
+++ b/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb
@@ -18,12 +18,14 @@ RSpec.describe RemoveUnreferencedLfsObjectsWorker do
lfs_object: referenced_lfs_object1
)
end
+
let!(:lfs_objects_project2_1) do
create(:lfs_objects_project,
project: project2,
lfs_object: referenced_lfs_object1
)
end
+
let!(:lfs_objects_project1_2) do
create(:lfs_objects_project,
project: project1,
@@ -46,7 +48,7 @@ RSpec.describe RemoveUnreferencedLfsObjectsWorker do
end
it 'removes unreferenced lfs objects after project removal' do
- project1.destroy
+ project1.destroy!
worker.perform
diff --git a/spec/workers/repository_check/single_repository_worker_spec.rb b/spec/workers/repository_check/single_repository_worker_spec.rb
index 28e3f43d374..205d7c08f54 100644
--- a/spec/workers/repository_check/single_repository_worker_spec.rb
+++ b/spec/workers/repository_check/single_repository_worker_spec.rb
@@ -86,7 +86,7 @@ RSpec.describe RepositoryCheck::SingleRepositoryWorker do
end
def create_push_event(project)
- project.events.create(action: :pushed, author_id: create(:user).id)
+ project.events.create!(action: :pushed, author_id: create(:user).id)
end
def break_wiki(project)
diff --git a/spec/workers/repository_cleanup_worker_spec.rb b/spec/workers/repository_cleanup_worker_spec.rb
index 41bfeabb7f3..f5887d08bd2 100644
--- a/spec/workers/repository_cleanup_worker_spec.rb
+++ b/spec/workers/repository_cleanup_worker_spec.rb
@@ -25,13 +25,13 @@ RSpec.describe RepositoryCleanupWorker do
end
it 'raises an error if the project cannot be found' do
- project.destroy
+ project.destroy!
expect { worker.perform(project.id, user.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
it 'raises an error if the user cannot be found' do
- user.destroy
+ user.destroy!
expect { worker.perform(project.id, user.id) }.to raise_error(ActiveRecord::RecordNotFound)
end
diff --git a/spec/workers/repository_import_worker_spec.rb b/spec/workers/repository_import_worker_spec.rb
index a2c19debdfd..4a80f4f9da6 100644
--- a/spec/workers/repository_import_worker_spec.rb
+++ b/spec/workers/repository_import_worker_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe RepositoryImportWorker do
it 'hide the credentials that were used in the import URL' do
error = %q{remote: Not Found fatal: repository 'https://user:pass@test.com/root/repoC.git/' not found }
- import_state.update(jid: '123')
+ import_state.update!(jid: '123')
expect_next_instance_of(Projects::ImportService) do |instance|
expect(instance).to receive(:execute).and_return({ status: :error, message: error })
end
@@ -63,8 +63,8 @@ RSpec.describe RepositoryImportWorker do
it 'updates the error on Import/Export' do
error = %q{remote: Not Found fatal: repository 'https://user:pass@test.com/root/repoC.git/' not found }
- project.update(import_type: 'gitlab_project')
- import_state.update(jid: '123')
+ project.update!(import_type: 'gitlab_project')
+ import_state.update!(jid: '123')
expect_next_instance_of(Projects::ImportService) do |instance|
expect(instance).to receive(:execute).and_return({ status: :error, message: error })
end
diff --git a/spec/workers/repository_update_remote_mirror_worker_spec.rb b/spec/workers/repository_update_remote_mirror_worker_spec.rb
index 37eed57cf2e..c6e667097ec 100644
--- a/spec/workers/repository_update_remote_mirror_worker_spec.rb
+++ b/spec/workers/repository_update_remote_mirror_worker_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe RepositoryUpdateRemoteMirrorWorker, :clean_gitlab_redis_shared_st
end
it 'does not do anything if the mirror was already updated' do
- remote_mirror.update(last_update_started_at: Time.current, update_status: :finished)
+ remote_mirror.update!(last_update_started_at: Time.current, update_status: :finished)
expect(Projects::UpdateRemoteMirrorService).not_to receive(:new)
diff --git a/spec/workers/stuck_ci_jobs_worker_spec.rb b/spec/workers/stuck_ci_jobs_worker_spec.rb
index b96d506771d..24d3b6fadf5 100644
--- a/spec/workers/stuck_ci_jobs_worker_spec.rb
+++ b/spec/workers/stuck_ci_jobs_worker_spec.rb
@@ -132,7 +132,7 @@ RSpec.describe StuckCiJobsWorker do
let(:updated_at) { 2.days.ago }
before do
- job.project.update(pending_delete: true)
+ job.project.update!(pending_delete: true)
end
it 'does drop job' do
diff --git a/spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb b/spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb
index e6f4f415987..5ed600e308b 100644
--- a/spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb
+++ b/spec/workers/update_head_pipeline_for_merge_request_worker_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe UpdateHeadPipelineForMergeRequestWorker do
context 'when merge request sha does not equal pipeline sha' do
before do
- merge_request.merge_request_diff.update(head_commit_sha: Digest::SHA1.hexdigest(SecureRandom.hex))
+ merge_request.merge_request_diff.update!(head_commit_sha: Digest::SHA1.hexdigest(SecureRandom.hex))
end
it 'does not update head pipeline' do
diff --git a/spec/workers/update_highest_role_worker_spec.rb b/spec/workers/update_highest_role_worker_spec.rb
index 19512fb0cfc..0c8ee53da9a 100644
--- a/spec/workers/update_highest_role_worker_spec.rb
+++ b/spec/workers/update_highest_role_worker_spec.rb
@@ -21,6 +21,7 @@ RSpec.describe UpdateHighestRoleWorker, :clean_gitlab_redis_shared_state do
user_type: nil
}
end
+
let(:user) { create(:user, active_attributes) }
subject { worker.perform(user.id) }